Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import csv
- import datetime
- import numpy
- import os
- import yaml
- import math
- import numpy as np
- from itertools import islice
- from nupic.algorithms.sdr_classifier_factory import SDRClassifierFactory
- from nupic.algorithms.spatial_pooler import SpatialPooler
- from nupic.algorithms.temporal_memory import TemporalMemory
- from nupic.algorithms.sdr_classifier import SDRClassifier
- from nupic.encoders.date import DateEncoder
- from nupic.encoders.random_distributed_scalar import \
- RandomDistributedScalarEncoder
- from nupic.encoders.multi import MultiEncoder
- from nupic.encoders.scalar import ScalarEncoder
- import capnp
- from experiment_save import classifier
- from experiment_save import classifier1
- from experiment_save import classifier2
- from nupic.proto.SpatialPoolerProto_capnp import SpatialPoolerProto
- from nupic.proto.TemporalMemoryProto_capnp import TemporalMemoryProto
- from nupic.algorithms.spatial_pooler import SpatialPooler
- from time import time
- import json
- import urllib2
- from pytz import timezone
- result_testing = [0]
- result_testing1 = [0]
- result_testing2 = [0]
- result_testing3 = [0]
- result_testing4 = [0]
- result_testing5 = [0]
- result_testing7 = [0]
- results = []
- results1 = []
- results2 = []
- results7 = []
- sum_event = []
- sum_event2 = []
- sum_event3 = []
- sum_event4 = []
- sum_event5 = []
- sum_event6 = []
- sum_event7 = []
- res2 = []
- evnt2 = []
- res5 = []
- res7 = []
- evnt7 = []
- evnt5 =[]
- res3 = []
- evnt3 = []
- flow_x = []
- bezline = [] # row 10
- encoding_csv = [] # row 9
- sum_flow = []
- flow_rate = []
- pressure = []
- PARAMS_PATH = ("model_iot.yaml")
- with open(PARAMS_PATH, "r") as f:
- modelParams = yaml.safe_load(f)["modelParams"]
- # enParams = modelParams["sensorParams"]["encoders"]
- spParams = modelParams["spParams"]
- tmParams = modelParams["tmParams"]
- clParams = modelParams["clParams"]
- eventEncoder = ScalarEncoder(name="event", w=7, n=14, minval=0, maxval=1,forced=True)
- eventEncoder1 = ScalarEncoder(name="event1", w=7, n=14, minval=0, maxval=1,forced=True)
- eventEncoder7 = ScalarEncoder(name="event7", w=7, n=14, minval=0, maxval=1,forced=True)
- eventEncoder2 = ScalarEncoder(name="event2", w=7, n=14, minval=0, maxval=1,forced=True)
- #eventEncoder2 = ScalarEncoder(name="event2", w=9, n=18, minval=0, maxval=1,forced=True)
- baselineEncoder = ScalarEncoder(name = "baseline",w = 21, n=1365,minval= 51,maxval=63,forced= True)
- flowEncoder = ScalarEncoder(name="flow", w=15, n = 900, minval=0, maxval=5,forced = True)
- encodingWidth = (eventEncoder.getWidth()+eventEncoder1.getWidth()+eventEncoder2.getWidth()+flowEncoder.getWidth()+baselineEncoder.getWidth())
- encodingWidth1 =(eventEncoder1.getWidth()
- + flowEncoder.getWidth())
- # classifier = SDRClassifier(
- # steps = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
- # )
- # classifier1 = SDRClassifier(
- # steps = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
- # )
- # classifier2 = SDRClassifier(
- # steps = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
- # )
- with open("out_sp.tmp", "rb") as f1:
- sp2 = SpatialPooler.readFromFile(f1)
- with open("out_tm.tmp", "rb") as f2:
- tm2 = TemporalMemory.readFromFile(f2)
- with open("out_classifier.tmp", "rb") as f2:
- classifier4 = SDRClassifier.readFromFile(f2)
- with open("out_classifier1.tmp", "rb") as f3:
- classifier5= SDRClassifier.readFromFile(f3)
- with open("out_classifier2.tmp", "rb") as f4:
- classifier6= SDRClassifier.readFromFile(f4)
- filename1 = datetime.datetime.strftime(datetime.datetime.now(), "%Y.%m.%d_%H:%M:%S")
- with open("test3.csv", "r") as fin:
- reader = csv.reader(fin)
- headers = reader.next()
- reader.next()
- reader.next()
- for row in reader:
- sum_event.append(int(row[2]))
- sum_event2.append(int(row[3]))
- sum_event3.append(int(row[4]))
- sum_event4.append(int(row[5]))
- sum_event5.append(int(row[6]))
- sum_event6.append(int(row[7]))
- sum_event7.append(int(row[8]))
- encoding_csv.append(float(row[9]))
- bezline.append(float(row[10]))
- pressure.append(float(row[11]))
- flow_rate.append(float(row[0]))
- def runTesting(numRecords):
- testing_time = time()
- global result_testing,oneStep,result_testing1,oneStep1,result_testing7,oneStep7,result_testing2,oneStep2
- with open("test3.csv", "r") as fin:
- reader = csv.reader(fin)
- headers = reader.next()
- reader.next()
- reader.next()
- for count, record in enumerate(reader):
- print "Testing count",count
- if count >= numRecords: break
- # Convert data string into Python date object.
- #dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M")
- # Convert data value string into float.
- priv = count
- event_value = result_testing[count-1]
- event_value_2 = result_testing2[count-1]
- event_value_3 = result_testing1[count-1]
- # event_value_7 = result_testing7[count]
- pres_data = float(record[11])
- bezline_all = float(record[10])
- flow_value = float(record[0])
- # bezline = float(record[10])
- # encoding = float[record[9]]
- # To encode, we need to provide zero-filled numpy arrays for the encoders
- # to populate.
- eventBits = numpy.zeros(eventEncoder.getWidth())
- eventBits_2 = numpy.zeros(eventEncoder2.getWidth())
- eventBits_3 = numpy.zeros(eventEncoder1.getWidth())
- # eventBits_7 = numpy.zeros(eventEncoder7.getWidth())
- flowBits = numpy.zeros(flowEncoder.getWidth())
- baseline_Bits = numpy.zeros(baselineEncoder.getWidth())
- # Now we call the encoders to create bit representations for each value.
- eventEncoder.encodeIntoArray(event_value, eventBits)
- eventEncoder2.encodeIntoArray(event_value_2, eventBits_2)
- eventEncoder1.encodeIntoArray(event_value_3,eventBits_3)
- # eventEncoder7.encodeIntoArray(event_value_7, eventBits_7)
- baselineEncoder.encodeIntoArray(bezline_all, baseline_Bits)
- flowEncoder.encodeIntoArray(flow_value, flowBits)
- # Concatenate all these encodings into one large encoding for Spatial
- # Pooling.
- encoding = numpy.concatenate(
- [eventBits,eventBits_2,eventBits_3,flowBits,baseline_Bits,]
- )
- # enc2 = numpy.concatenate([eventBits,eventBits_2,eventBits_3])
- # enc = numpy.concatenate(encoding, encoding)
- # Create an array to represent active columns, all initially zero. This
- # will be populated by the compute method below. It must have the same
- # dimensions as the Spatial Pooler.
- colum_count = sp2.getColumnDimensions()
- print "Column_cout:", colum_count
- activeColumns = numpy.zeros(colum_count)
- # Execute Spatial Pooling algorithm over input space.
- # sp2.compute(encoding, False, activeColumns)
- sp2.compute(encoding,True,activeColumns)
- activeColumnIndices = numpy.nonzero(activeColumns)[0]
- # Execute Temporal Memory algorithm over active mini-columns.
- tm2.compute(activeColumnIndices, learn=False)
- activeCells = tm2.getActiveCells()
- # Get the bucket info for this input value for classification.
- bucketIdx = eventEncoder.getBucketIndices(event_value)[0]
- bucketIdx_2 = eventEncoder2.getBucketIndices(event_value_2)[0]
- bucketIdx_3 = eventEncoder1.getBucketIndices(event_value_3)[0]
- # bucketIdx_7 = eventEncoder7.getBucketIndices(event_value_7)[0]r.getWidth()+eventEncoder1.getWidth()+ eventEncoder2.getWidth()
- # bucketIdx_7 = eventEncoder7.getBucketIndices(event_value_7)[0]
- # Run classifier to translate active cells back to scalar value.
- classifierResult = classifier4.compute(
- recordNum=count+30000,
- patternNZ=activeCells,
- classification={
- "bucketIdx": bucketIdx,
- "actValue": event_value
- },
- learn=False,
- infer=True
- )
- classifierResult1 = classifier5.compute(
- recordNum=count+30000,
- patternNZ= activeCells,
- classification={
- "bucketIdx": bucketIdx_3,
- "actValue": event_value_3
- },
- learn=False,
- infer=True
- )
- classifierResult2 = classifier6.compute(
- recordNum=count+30000,
- patternNZ= activeCells,
- classification={
- "bucketIdx": bucketIdx_2,
- "actValue": event_value_2
- },
- learn=False,
- infer=True
- )
- # Print the best prediction for 1 step out.
- oneStepConfidence, oneStep = sorted(
- zip(classifierResult[1], classifierResult["actualValues"]),
- reverse=True
- )[0]
- oneStepConfidence1, oneStep1 = sorted(
- zip(classifierResult1[1], classifierResult1["actualValues"]),
- reverse=True
- )[0]
- oneStepConfidence2, oneStep2 = sorted(
- zip(classifierResult2[1], classifierResult2["actualValues"]),
- reverse=True
- )[0]
- testing_time_end = time()
- print "OneStep",oneStep
- print "OneStep1",oneStep1
- print "OneStep2",oneStep2
- print "BucketIdx",bucketIdx
- print "BucketIdx_2",bucketIdx_2
- print "BucketIdx_3",bucketIdx_3
- print "Time testing", (testing_time_end - testing_time)
- results.append([oneStep])
- results1.append([oneStep1])
- results2.append([oneStep2])
- result_testing.append(oneStep)
- result_testing1.append(oneStep1)
- result_testing2.append(oneStep2)
- # result_testing7.append(oneStep7)
- with open(filename1+'_result_graphic1.csv', 'w') as csv_file:
- csv_writer = csv.writer(csv_file)
- headers = ("prediction_1","event_1","prediction_3","event_3","prediction2","event2","encoding","bezline","flow","pressure","id","time")
- csv_writer.writerow(headers)
- for l in range(len(result_testing)):
- if result_testing[l] == 1:
- res5.append(1)
- else :
- res5.append(0)
- if sum_event[l] == 1:
- evnt5.append(-1)
- else :
- evnt5.append(0)
- if result_testing1[l] == 1:
- res3.append(3)
- else:
- res3.append(0)
- if sum_event3[l] == 1:
- evnt3.append(-3)
- else:
- evnt3.append(0)
- if result_testing2[l] == 1:
- res2.append(2)
- else:
- res2.append(0)
- if sum_event2[l] == 1:
- evnt2.append(-2)
- else:
- evnt2.append(0)
- csv_writer.writerow([res5[l],evnt5[l],res3[l],evnt3[l],res2[l],evnt2[l],encoding_csv[l],bezline[l],flow_rate[l],pressure[l]])
- testing_time_end = time()
- print "Time testingL",(testing_time_end - testing_time)
- return results
- if __name__ == "__main__":
- runTesting(5000)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement