Advertisement
Sergey91

Untitled

Jul 19th, 2018
105
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 10.85 KB | None | 0 0
  1. import csv
  2. import datetime
  3. import numpy
  4. import os
  5. import yaml
  6. import math
  7.  
  8. import numpy as np
  9. from itertools import islice
  10. from nupic.algorithms.sdr_classifier_factory import SDRClassifierFactory
  11. from nupic.algorithms.spatial_pooler import SpatialPooler
  12. from nupic.algorithms.temporal_memory import TemporalMemory
  13. from nupic.algorithms.sdr_classifier import  SDRClassifier
  14. from nupic.encoders.date import DateEncoder
  15. from nupic.encoders.random_distributed_scalar import \
  16.   RandomDistributedScalarEncoder
  17. from nupic.encoders.multi import  MultiEncoder
  18. from  nupic.encoders.scalar import ScalarEncoder
  19. import capnp
  20. from experiment_save  import classifier
  21. from experiment_save  import classifier1
  22. from experiment_save  import classifier2
  23. from nupic.proto.SpatialPoolerProto_capnp import  SpatialPoolerProto
  24. from nupic.proto.TemporalMemoryProto_capnp import TemporalMemoryProto
  25. from nupic.algorithms.spatial_pooler import SpatialPooler
  26. from time import  time
  27. import json
  28. import urllib2
  29.  
  30. from pytz import timezone
  31.  
  32. result_testing =  [0]
  33. result_testing1 = [0]
  34. result_testing2 = [0]
  35. result_testing3 = [0]
  36. result_testing4 = [0]
  37. result_testing5 = [0]
  38. result_testing7 = [0]
  39.  
  40. results = []
  41. results1 = []
  42. results2 = []
  43. results7 = []
  44.  
  45. sum_event  = []
  46. sum_event2 = []
  47. sum_event3 = []
  48. sum_event4 = []
  49. sum_event5 = []
  50. sum_event6 = []
  51. sum_event7 = []
  52. res2 = []
  53. evnt2 = []
  54. res5 = []
  55. res7 = []
  56. evnt7 = []
  57. evnt5 =[]
  58. res3 = []
  59. evnt3  = []
  60. flow_x = []
  61. bezline = [] # row 10
  62. encoding_csv = [] # row 9
  63. sum_flow = []
  64. flow_rate = []
  65. pressure = []
  66. PARAMS_PATH =  ("model_iot.yaml")
  67. with open(PARAMS_PATH, "r") as f:
  68.     modelParams = yaml.safe_load(f)["modelParams"]
  69.     # enParams = modelParams["sensorParams"]["encoders"]
  70.     spParams = modelParams["spParams"]
  71.     tmParams = modelParams["tmParams"]
  72.     clParams = modelParams["clParams"]
  73.  
  74. eventEncoder = ScalarEncoder(name="event", w=7, n=14, minval=0, maxval=1,forced=True)
  75. eventEncoder1 = ScalarEncoder(name="event1", w=7, n=14, minval=0, maxval=1,forced=True)
  76. eventEncoder7 = ScalarEncoder(name="event7", w=7, n=14, minval=0, maxval=1,forced=True)
  77. eventEncoder2 = ScalarEncoder(name="event2", w=7, n=14, minval=0, maxval=1,forced=True)
  78. #eventEncoder2 = ScalarEncoder(name="event2", w=9, n=18, minval=0, maxval=1,forced=True)
  79. baselineEncoder = ScalarEncoder(name = "baseline",w = 21, n=1365,minval= 51,maxval=63,forced= True)
  80. flowEncoder = ScalarEncoder(name="flow", w=15, n = 900, minval=0, maxval=5,forced = True)
  81. encodingWidth = (eventEncoder.getWidth()+eventEncoder1.getWidth()+eventEncoder2.getWidth()+flowEncoder.getWidth()+baselineEncoder.getWidth())
  82. encodingWidth1 =(eventEncoder1.getWidth()
  83.                  + flowEncoder.getWidth())
  84. # classifier = SDRClassifier(
  85. #   steps  = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
  86. # )
  87. # classifier1 = SDRClassifier(
  88. #   steps  = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
  89. # )
  90. # classifier2 = SDRClassifier(
  91. #   steps  = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
  92. # )
  93.  
  94. with open("out_sp.tmp", "rb") as f1:
  95.   sp2 = SpatialPooler.readFromFile(f1)
  96. with open("out_tm.tmp", "rb") as f2:
  97.   tm2 = TemporalMemory.readFromFile(f2)
  98. with open("out_classifier.tmp", "rb") as f2:
  99.       classifier4 = SDRClassifier.readFromFile(f2)
  100. with open("out_classifier1.tmp", "rb") as f3:
  101.     classifier5= SDRClassifier.readFromFile(f3)
  102. with open("out_classifier2.tmp", "rb") as f4:
  103.     classifier6= SDRClassifier.readFromFile(f4)
  104.  
  105.  
  106.  
  107. filename1  = datetime.datetime.strftime(datetime.datetime.now(), "%Y.%m.%d_%H:%M:%S")
  108. with open("test3.csv", "r") as fin:
  109.     reader = csv.reader(fin)
  110.     headers = reader.next()
  111.     reader.next()
  112.     reader.next()
  113.     for row in reader:
  114.  
  115.          sum_event.append(int(row[2]))
  116.          sum_event2.append(int(row[3]))
  117.          sum_event3.append(int(row[4]))
  118.          sum_event4.append(int(row[5]))
  119.          sum_event5.append(int(row[6]))
  120.          sum_event6.append(int(row[7]))
  121.          sum_event7.append(int(row[8]))
  122.          encoding_csv.append(float(row[9]))
  123.          bezline.append(float(row[10]))
  124.          pressure.append(float(row[11]))
  125.          flow_rate.append(float(row[0]))
  126.  
  127. def runTesting(numRecords):
  128.   testing_time = time()
  129.   global  result_testing,oneStep,result_testing1,oneStep1,result_testing7,oneStep7,result_testing2,oneStep2
  130.   with open("test3.csv", "r") as fin:
  131.     reader = csv.reader(fin)
  132.     headers = reader.next()
  133.     reader.next()
  134.     reader.next()
  135.  
  136.     for count, record in enumerate(reader):
  137.       print "Testing count",count
  138.       if count >= numRecords: break
  139.  
  140.       # Convert data string into Python date object.
  141.       #dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M")
  142.       # Convert data value string into float.
  143.       priv = count
  144.       event_value = result_testing[count-1]
  145.       event_value_2 = result_testing2[count-1]
  146.       event_value_3 = result_testing1[count-1]
  147.       # event_value_7 = result_testing7[count]
  148.       pres_data = float(record[11])
  149.       bezline_all = float(record[10])
  150.       flow_value  = float(record[0])
  151.  
  152.       # bezline = float(record[10])
  153.       # encoding = float[record[9]]
  154.       # To encode, we need to provide zero-filled numpy arrays for the encoders
  155.       # to populate.
  156.       eventBits = numpy.zeros(eventEncoder.getWidth())
  157.       eventBits_2 = numpy.zeros(eventEncoder2.getWidth())
  158.       eventBits_3 = numpy.zeros(eventEncoder1.getWidth())
  159.       # eventBits_7 = numpy.zeros(eventEncoder7.getWidth())
  160.  
  161.       flowBits = numpy.zeros(flowEncoder.getWidth())
  162.       baseline_Bits = numpy.zeros(baselineEncoder.getWidth())
  163.  
  164.       # Now we call the encoders to create bit representations for each value.
  165.       eventEncoder.encodeIntoArray(event_value, eventBits)
  166.       eventEncoder2.encodeIntoArray(event_value_2, eventBits_2)
  167.       eventEncoder1.encodeIntoArray(event_value_3,eventBits_3)
  168.       # eventEncoder7.encodeIntoArray(event_value_7, eventBits_7)
  169.  
  170.       baselineEncoder.encodeIntoArray(bezline_all, baseline_Bits)
  171.       flowEncoder.encodeIntoArray(flow_value, flowBits)
  172.  
  173.  
  174.       # Concatenate all these encodings into one large encoding for Spatial
  175.       # Pooling.
  176.       encoding = numpy.concatenate(
  177.         [eventBits,eventBits_2,eventBits_3,flowBits,baseline_Bits,]
  178.       )
  179.       # enc2 = numpy.concatenate([eventBits,eventBits_2,eventBits_3])
  180.  
  181.  
  182.       # enc = numpy.concatenate(encoding, encoding)
  183.       # Create an array to represent active columns, all initially zero. This
  184.       # will be populated by the compute method below. It must have the same
  185.       # dimensions as the Spatial Pooler.
  186.  
  187.  
  188.       colum_count = sp2.getColumnDimensions()
  189.  
  190.       print "Column_cout:", colum_count
  191.       activeColumns = numpy.zeros(colum_count)
  192.  
  193.  
  194.       # Execute Spatial Pooling algorithm over input space.
  195.       # sp2.compute(encoding, False, activeColumns)
  196.       sp2.compute(encoding,True,activeColumns)
  197.  
  198.  
  199.       activeColumnIndices = numpy.nonzero(activeColumns)[0]
  200.  
  201.  
  202.       # Execute Temporal Memory algorithm over active mini-columns.
  203.       tm2.compute(activeColumnIndices, learn=False)
  204.  
  205.       activeCells = tm2.getActiveCells()
  206.  
  207.       # Get the bucket info for this input value for classification.
  208.       bucketIdx  =  eventEncoder.getBucketIndices(event_value)[0]
  209.       bucketIdx_2 = eventEncoder2.getBucketIndices(event_value_2)[0]
  210.       bucketIdx_3 = eventEncoder1.getBucketIndices(event_value_3)[0]
  211.       # bucketIdx_7 = eventEncoder7.getBucketIndices(event_value_7)[0]r.getWidth()+eventEncoder1.getWidth()+ eventEncoder2.getWidth()
  212.       # bucketIdx_7 = eventEncoder7.getBucketIndices(event_value_7)[0]
  213.  
  214.       # Run classifier to translate active cells back to scalar value.
  215.       classifierResult = classifier4.compute(
  216.         recordNum=count+30000,
  217.         patternNZ=activeCells,
  218.         classification={
  219.           "bucketIdx": bucketIdx,
  220.           "actValue": event_value
  221.         },
  222.         learn=False,
  223.         infer=True
  224.       )
  225.       classifierResult1 = classifier5.compute(
  226.         recordNum=count+30000,
  227.         patternNZ= activeCells,
  228.         classification={
  229.           "bucketIdx": bucketIdx_3,
  230.           "actValue": event_value_3
  231.         },
  232.         learn=False,
  233.         infer=True
  234.       )
  235.  
  236.       classifierResult2 = classifier6.compute(
  237.         recordNum=count+30000,
  238.         patternNZ= activeCells,
  239.         classification={
  240.           "bucketIdx": bucketIdx_2,
  241.           "actValue": event_value_2
  242.         },
  243.         learn=False,
  244.         infer=True
  245.       )
  246.       # Print the best prediction for 1 step out.
  247.       oneStepConfidence, oneStep = sorted(
  248.         zip(classifierResult[1], classifierResult["actualValues"]),
  249.         reverse=True
  250.       )[0]
  251.       oneStepConfidence1, oneStep1 = sorted(
  252.         zip(classifierResult1[1], classifierResult1["actualValues"]),
  253.         reverse=True
  254.       )[0]
  255.  
  256.       oneStepConfidence2, oneStep2 = sorted(
  257.         zip(classifierResult2[1], classifierResult2["actualValues"]),
  258.         reverse=True
  259.       )[0]
  260.       testing_time_end = time()
  261.       print "OneStep",oneStep
  262.       print "OneStep1",oneStep1
  263.       print "OneStep2",oneStep2
  264.       print "BucketIdx",bucketIdx
  265.       print "BucketIdx_2",bucketIdx_2
  266.       print "BucketIdx_3",bucketIdx_3
  267.       print "Time testing", (testing_time_end - testing_time)
  268.       results.append([oneStep])
  269.       results1.append([oneStep1])
  270.  
  271.       results2.append([oneStep2])
  272.       result_testing.append(oneStep)
  273.       result_testing1.append(oneStep1)
  274.       result_testing2.append(oneStep2)
  275.       # result_testing7.append(oneStep7)
  276.        
  277.     with open(filename1+'_result_graphic1.csv', 'w') as csv_file:
  278.         csv_writer = csv.writer(csv_file)
  279.         headers = ("prediction_1","event_1","prediction_3","event_3","prediction2","event2","encoding","bezline","flow","pressure","id","time")
  280.         csv_writer.writerow(headers)
  281.  
  282.         for l in range(len(result_testing)):
  283.             if result_testing[l] == 1:
  284.                    res5.append(1)
  285.             else :
  286.                     res5.append(0)
  287.             if sum_event[l] == 1:
  288.                  evnt5.append(-1)
  289.             else :
  290.                 evnt5.append(0)
  291.             if result_testing1[l] == 1:
  292.                 res3.append(3)
  293.             else:
  294.                 res3.append(0)
  295.             if sum_event3[l] == 1:
  296.                 evnt3.append(-3)
  297.             else:
  298.                 evnt3.append(0)
  299.             if result_testing2[l] == 1:
  300.                 res2.append(2)
  301.             else:
  302.                 res2.append(0)
  303.             if sum_event2[l] == 1:
  304.                 evnt2.append(-2)
  305.             else:
  306.                 evnt2.append(0)
  307.  
  308.             csv_writer.writerow([res5[l],evnt5[l],res3[l],evnt3[l],res2[l],evnt2[l],encoding_csv[l],bezline[l],flow_rate[l],pressure[l]])
  309.     testing_time_end = time()
  310.     print "Time testingL",(testing_time_end - testing_time)
  311.     return results
  312.  
  313. if __name__ == "__main__":
  314.   runTesting(5000)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement