Sergey91

learning_stage

Jul 19th, 2018
81
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 17.12 KB | None | 0 0
  1. import csv
  2. import datetime
  3. import numpy
  4. import os
  5. import yaml
  6. import math
  7. import numpy as np
  8. from itertools import islice
  9. from nupic.algorithms.sdr_classifier_factory import SDRClassifierFactory
  10. from nupic.algorithms.spatial_pooler import SpatialPooler
  11. from nupic.algorithms.temporal_memory import TemporalMemory
  12.  
  13. from nupic.encoders.date import DateEncoder
  14. from nupic.encoders.random_distributed_scalar import \
  15.   RandomDistributedScalarEncoder
  16. from nupic.encoders.multi import  MultiEncoder
  17. from  nupic.encoders.scalar import ScalarEncoder
  18. from nupic.algorithms.sdr_classifier import  SDRClassifier
  19. from time import  time
  20. import json
  21. import urllib2
  22. import capnp
  23. from nupic.proto.SpatialPoolerProto_capnp import  SpatialPoolerProto
  24. from nupic.proto.TemporalMemoryProto_capnp import TemporalMemoryProto
  25. from nupic.algorithms.spatial_pooler import SpatialPooler
  26.  
  27. from pytz import timezone
  28.  
  29. _INPUT_FILE_PATH =  ("test1.csv")
  30. PARAMS_PATH =  ("model_iot.yaml")
  31. result_testing =  [0]
  32. result_testing1 = [0]
  33. result_testing2 = [0]
  34. result_testing3 = [0]
  35. result_testing4 = [0]
  36. result_testing5 = [0]
  37. result_testing7 = [0]
  38.  
  39. sum_event  = []
  40. sum_event2 = []
  41. sum_event3 = []
  42. sum_event4 = []
  43. sum_event5 = []
  44. sum_event6 = []
  45. sum_event7 = []
  46. res2 = []
  47. evnt2 = []
  48. res5 = []
  49. res7 = []
  50. evnt7 = []
  51. evnt5 =[]
  52. res3 = []
  53. evnt3  = []
  54. flow_x = []
  55. bezline = [] # row 10
  56. encoding_csv = [] # row 9
  57. sum_flow = []
  58. # parameter file
  59. PARAMS_PATH =  ("model_iot.yaml")
  60. #base dataset
  61. INPUT = ("real_dataset.csv")
  62. #Work dataset
  63. PREPARE = ("prepare_model.csv")
  64. TEST =("test.csv")
  65. pressure = []
  66. flow_rate =[]
  67. event = []
  68. # Massive metrics
  69. metric_massive = []
  70. #Massive with results of learning
  71. result_learining = [0]
  72. #Massive with results of testing
  73.  
  74. flow = []
  75. pres = []
  76. ev = []
  77. sred = []
  78. p=[]
  79. tr =[]
  80. l =[]
  81. pl=[]
  82. dr=[]
  83. fl = []
  84. fl_r = []
  85. tnt =[]
  86. v1 = []
  87. v =[]
  88. switch_event = [] # switch event
  89. encod_value = 0
  90. val = 0
  91. ev1 = 0
  92. ev2 = 0
  93. ev3 = 0
  94. ev4 = 0
  95. ev5 = 0
  96. ev6 = 0
  97. ev7 = 0
  98.  
  99. event1 = []
  100. event2 = []
  101. event3 = []
  102. event4 = []
  103. event5 = []
  104. event6 = []
  105. event7 = []
  106. event10 = []
  107.  
  108. event11 = []
  109. event12 = []
  110. event13 = []
  111. event14 = []
  112. event15 = []
  113. event16 = []
  114. event17 = []
  115. event18 = []
  116.  
  117. even = 0
  118. pr = []
  119. dr = []
  120. dx = 0
  121. kr = 0
  122. event18 = []
  123.  
  124. with open("real_dataset3.csv") as fin:
  125.  
  126.     reader = csv.reader(fin, delimiter=',')
  127.     headers = reader.next()
  128.     reader.next()
  129.     reader.next()
  130.     for row in reader:
  131.          pressure.append(float(row[1]))
  132.          flow_rate.append(float(row[2]))
  133.          event.append(row[3])
  134.     for tt in range(0,29900):
  135.         event10.append(event[tt])
  136.  
  137.  
  138. for cur in xrange(len(event10)): # cur - current index in massive event
  139.      kr = map(int,event[cur].split(",")) #  Parse string
  140.  
  141.      for tir in xrange(len(kr)):
  142.  
  143.         if kr[tir] == 0:
  144.             ev1 = 0
  145.             ev2 = 0
  146.             ev3 = 0
  147.             ev4 = 0
  148.             ev5 = 0
  149.             ev6 = 0
  150.             ev7 = 0
  151.         if kr[tir] == 5:
  152.             ev1 = 0
  153.             ev2 = 0
  154.             ev3 = 0
  155.             ev4 = 0
  156.             ev5 = 1
  157.             ev6 = 0
  158.             ev7 = 0
  159.         if kr[tir] == 7:
  160.             ev1 = 0
  161.             ev2 = 0
  162.             ev3 = 0
  163.             ev4 = 0
  164.             ev5 = 0
  165.             ev6 = 0
  166.             ev7 = 1
  167.         if kr[tir] == 2 :
  168.             ev1 = 0
  169.             ev2 = 1
  170.             ev3 = 0
  171.             ev4 = 0
  172.             ev5 = 0
  173.             ev6 = 0
  174.             ev7 = 0
  175.         if kr[tir] == 3:
  176.             ev1 = 0
  177.             ev2 = 0
  178.             ev3 = 1
  179.             ev4 = 0
  180.             ev5 = 0
  181.             ev6 = 0
  182.             ev7 = 0
  183.         if kr[tir] == 1:
  184.             ev1 = 1
  185.             ev2 = 0
  186.             ev3 = 0
  187.             ev4 = 0
  188.             ev5 = 0
  189.             ev6 = 0
  190.             ev7 = 0
  191.         if kr[tir] == 4:
  192.             ev1 = 0
  193.             ev2 = 0
  194.             ev3 = 0
  195.             ev4 = 1
  196.             ev5 = 0
  197.             ev6 = 0
  198.             ev7 = 0
  199.         if kr[tir] == 6:
  200.             ev1 = 0
  201.             ev2 = 0
  202.             ev3 = 0
  203.             ev4 = 0
  204.             ev5 = 0
  205.             ev6 = 1
  206.             ev7 = 0
  207.         event1.append(ev1)
  208.  
  209.         event2.append(ev2)
  210.  
  211.         event3.append(ev3)
  212.  
  213.         event4.append(ev4)
  214.  
  215.         event5.append(ev5)
  216.  
  217.         event6.append(ev6)
  218.  
  219.         event7.append(ev7)
  220.  
  221.         if tir == 0:
  222.          event11.append(ev1)
  223.          event12.append(ev2)
  224.          event13.append(ev3)
  225.          event14.append(ev4)
  226.          event15.append(ev5)
  227.          event16.append(ev6)
  228.          event17.append(ev7)
  229.  
  230.         if len(kr)>1 and tir != 0:
  231.                 event11[cur] += event1[tir]
  232.                 event12[cur] += event2[tir]
  233.                 event13[cur] += event3[tir]
  234.                 event14[cur] += event4[tir]
  235.                 event15[cur] += event5[tir]
  236.                 event16[cur] += event6[tir]
  237.                 event17[cur] += event7[tir]
  238.  
  239.      event1 =[]
  240.      event2 =[]
  241.      event3 =[]
  242.      event4 =[]
  243.      event5 =[]
  244.      event6 =[]
  245.      event7 =[]
  246.  
  247.  
  248. with open("real_dataset3.csv") as tes1:
  249.   # global max_square_10
  250.   reader = csv.reader(tes1, delimiter=',')
  251.   headers = reader.next()
  252.   reader.next()
  253.   reader.next()
  254.   encoding = []
  255.   enc = []
  256. vr = []
  257. pr = []
  258. dr =[]
  259. bezline = []
  260. sum_el = 0
  261.  
  262. val_massive =[]
  263. val_massive_permanent = []
  264. def diff(left_r):
  265.     global val_massive,sum_sred,index
  266.     val_massive.append(pressure[left_r])
  267.     if(len(val_massive)>1):
  268.  
  269.         #val_massive.append(srw[xs+1])
  270.         for i in range(2,len(pressure)):
  271.             sum_sred = math.ceil(sum(val_massive)/len(val_massive))
  272.             enc_value = (sum_sred - val_massive[-1])
  273.             sq = math.sqrt(enc_value**2)
  274.  
  275.             if ((flow_rate[left_r + i-1] - flow_rate[left_r + i - 2])<0.2 and (flow_rate[left_r + i-2] - flow_rate[left_r + i - 1])<0.2):
  276.  
  277.                     print "Flow_rate-1", flow_rate[left_r + i - 1] - flow_rate[left_r + i - 2]
  278.             else:
  279.                             del val_massive[-1]
  280.                             # print "Lenny:",len(val_massive)
  281.                             try:
  282.                                 sum_sred_1 = sum(val_massive) / len(val_massive)
  283.                             except ZeroDivisionError:
  284.                                 pass
  285.                             for vts in range(0, len(val_massive)):
  286.                                 positive_value = sum_sred_1 - val_massive[vts]
  287.  
  288.                                 encoding.append(round(positive_value, 2))
  289.                                 bezline.append(round(sum_sred_1, 2))
  290.                             break
  291.  
  292.     else:
  293.         val_massive.append(pressure[left_r+1])
  294.  
  295.         for i in xrange(2,len(pressure)):
  296.             sum_sred = math.ceil(sum(val_massive) / len(val_massive))
  297.             enc_value = sum_sred - val_massive[-1]
  298.  
  299.             sq = math.sqrt(enc_value**2)
  300.  
  301.             if ((flow_rate[left_r + i - 1] - flow_rate[left_r + i - 2]) < 0.2 and (flow_rate[left_r + i - 2] - flow_rate[left_r + i - 1]) < 0.2):
  302.  
  303.                     val_massive.append(pressure[left_r+i])
  304.  
  305.             else:
  306.                             del val_massive[-1]
  307.                             # print "Lenny:",len(val_massive)
  308.                             try:
  309.                                 sum_sred_1 = sum(val_massive) / len(val_massive)
  310.                             except ZeroDivisionError:
  311.                                 pass
  312.  
  313.                             for vts in range(0, len(val_massive)):
  314.                                 positive_value = sum_sred_1 - val_massive[vts]
  315.                                 encoding.append(round(positive_value, 2))
  316.                                 bezline.append(round(sum_sred_1, 2))
  317.                             break
  318.     index = i +left_r
  319.  
  320.     val_massive = []
  321.  
  322. ty = 1
  323. diff(ty-1)
  324.  
  325. while (index <29900):
  326.      diff(index-1)
  327. vrs = []
  328.  
  329. print "Min flow:",min(flow_rate)
  330. print "Max flow:",max(flow_rate)
  331. print "Baseline",len(bezline)
  332. print "Bezline min:",min(bezline)
  333. print "Bezline max:" ,max(bezline)
  334.  
  335. headers = ("flow","event","event1","event2","event3","event4","event5","event6","event7","encoding","bezline","pressure")
  336. type = ("float","int","int","int","int","int","int","int","int","float","float","float")
  337. meta = (" "," "," "," "," ")
  338. with open('test3.csv', 'w') as csv_file:
  339.    csv_writer = csv.writer(csv_file)
  340.    csv_writer.writerow(headers)
  341.    csv_writer.writerow(type)
  342.    csv_writer.writerow(meta)
  343.    for pot in range(0, 29899):
  344.        csv_writer.writerow([flow_rate[pot],event[pot],event11[pot],event12[pot],event13[pot],event14[pot],event15[pot],event16[pot],event17[pot],encoding[pot],bezline[pot],pressure[pot]])
  345.  
  346. with open("test3.csv", "r") as fin:
  347.     reader = csv.reader(fin)
  348.     headers = reader.next()
  349.     reader.next()
  350.     reader.next()
  351.     for row in reader:
  352.  
  353.          sum_event.append(int(row[2]))
  354.          sum_event2.append(int(row[3]))
  355.          sum_event3.append(int(row[4]))
  356.          sum_event4.append(int(row[5]))
  357.          sum_event5.append(int(row[6]))
  358.          sum_event6.append(int(row[7]))
  359.          sum_event7.append(int(row[8]))
  360.          encoding_csv.append(float(row[9]))
  361.          bezline.append(float(row[10]))
  362.  
  363.          sum_flow.append(float(row[0]))
  364.  
  365.  
  366. with open(PARAMS_PATH, "r") as f:
  367.     modelParams = yaml.safe_load(f)["modelParams"]
  368.     # enParams = modelParams["sensorParams"]["encoders"]
  369.     spParams = modelParams["spParams"]
  370.     tmParams = modelParams["tmParams"]
  371.     clParams = modelParams["clParams"]
  372. eventEncoder = ScalarEncoder(name="event", w=7, n=14, minval=0, maxval=1,forced=True)
  373. eventEncoder1 = ScalarEncoder(name="event1", w=7, n=14, minval=0, maxval=1,forced=True)
  374. eventEncoder7 = ScalarEncoder(name="event7", w=7, n=14, minval=0, maxval=1,forced=True)
  375. eventEncoder2 = ScalarEncoder(name="event2", w=7, n=14, minval=0, maxval=1,forced=True)
  376. #eventEncoder2 = ScalarEncoder(name="event2", w=9, n=18, minval=0, maxval=1,forced=True)
  377. baselineEncoder = ScalarEncoder(name = "baseline",w = 21, n=1365,minval= 51,maxval=63,forced= True)
  378. flowEncoder = ScalarEncoder(name="flow", w=15, n = 900, minval=0, maxval=5,forced = True)
  379. encodingWidth = (eventEncoder.getWidth()+eventEncoder1.getWidth()+eventEncoder2.getWidth()+flowEncoder.getWidth()+baselineEncoder.getWidth())
  380.  
  381. encodingWidth1 =(eventEncoder1.getWidth()
  382.                  + flowEncoder.getWidth())
  383.  
  384. sp = SpatialPooler(
  385.     inputDimensions=(encodingWidth,),
  386.     columnDimensions=(spParams["columnCount"],),
  387.     potentialPct=spParams["potentialPct"],
  388.     potentialRadius=encodingWidth,
  389.     globalInhibition=spParams["globalInhibition"],
  390.     localAreaDensity=spParams["localAreaDensity"],
  391.     numActiveColumnsPerInhArea=spParams["numActiveColumnsPerInhArea"],
  392.     synPermInactiveDec=spParams["synPermInactiveDec"],
  393.     synPermActiveInc=spParams["synPermActiveInc"],
  394.     synPermConnected=spParams["synPermConnected"],
  395.     boostStrength=spParams["boostStrength"],
  396.     seed=spParams["seed"],
  397.     wrapAround=True
  398. )
  399.  
  400.  
  401.  
  402. tm = TemporalMemory(
  403.     columnDimensions=(tmParams["columnCount"],),
  404.     cellsPerColumn=tmParams["cellsPerColumn"],
  405.     activationThreshold=tmParams["activationThreshold"],
  406.     initialPermanence=tmParams["initialPerm"],
  407.     connectedPermanence=spParams["synPermConnected"],
  408.     minThreshold=tmParams["minThreshold"],
  409.     maxNewSynapseCount=tmParams["newSynapseCount"],
  410.     permanenceIncrement=tmParams["permanenceInc"],
  411.     permanenceDecrement=tmParams["permanenceDec"],
  412.     predictedSegmentDecrement=tmParams["predictedSegmentDecrement"],
  413.     maxSegmentsPerCell=tmParams["maxSegmentsPerCell"],
  414.     maxSynapsesPerSegment=tmParams["maxSynapsesPerSegment"],
  415.     seed=tmParams["seed"]
  416. )
  417.  
  418. classifier = SDRClassifier(
  419.     steps  = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
  420. )
  421. classifier1 = SDRClassifier(
  422.     steps  = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
  423. )
  424. classifier2 = SDRClassifier(
  425.     steps  = [1],alpha=clParams["alpha"], verbosity= clParams["verbosity"]
  426. )
  427.  
  428. with open("real_dataset3.csv", "r") as fin:
  429.     reader = csv.reader(fin)
  430.     headers = reader.next()
  431.     reader.next()
  432.     reader.next()
  433.     for row in reader:
  434.         pressure.append(float(row[1]))
  435.  
  436. print "Columcount",spParams["columnCount"]
  437. def runLearning(numRecords):
  438.  # global  activeCells,encoding
  439.   learning_time = time()
  440.   with open("test3.csv", "r") as fin:
  441.     reader = csv.reader(fin)
  442.     headers = reader.next()
  443.     reader.next()
  444.     reader.next()
  445.  
  446.     for count, record in enumerate(reader):
  447.       print "Count",count
  448.       if count >= numRecords: break
  449.  
  450.       # Convert data string into Python date object.
  451.       #dateString = datetime.datetime.strptime(record[0], "%m/%d/%y %H:%M")
  452.       # Convert data value string into float.
  453.       event_value = float(record[2]) # device 1
  454.       event_value_3 = float(record[4]) # device 3
  455.       event_value_2 = float(record[3]) #device 2
  456.       # event_value_7 = float(record[8]) # device 7
  457.       bezline_all = float(record[10])
  458.       pres_data    = float(record[11])
  459.       flow_value  = float(record[0])
  460.       # To encode, we need to provide zero-filled numpy arrays for the encoders
  461.       # to populate.
  462.       eventBits = numpy.zeros(eventEncoder.getWidth())
  463.       eventBits_2 = numpy.zeros(eventEncoder2.getWidth())
  464.       eventBits_3 = numpy.zeros(eventEncoder1.getWidth())
  465.  
  466.  
  467.       baseline_Bits = numpy.zeros(baselineEncoder.getWidth())
  468.       flowBits = numpy.zeros(flowEncoder.getWidth())
  469.  
  470.  
  471.       # Now we call the encoders to create bit representations for each value.
  472.       eventEncoder.encodeIntoArray(event_value, eventBits)
  473.       eventEncoder1.encodeIntoArray(event_value_3,eventBits_3)
  474.       eventEncoder2.encodeIntoArray(event_value_2,eventBits_2)
  475.  
  476.  
  477.       baselineEncoder.encodeIntoArray(bezline_all,baseline_Bits)
  478.       flowEncoder.encodeIntoArray(flow_value, flowBits)
  479.  
  480.  
  481.       # Concatenate all these encodings into one large encoding for Spatial
  482.       # Pooling.
  483.       encoding = numpy.concatenate(
  484.         [eventBits,eventBits_2,eventBits_3,flowBits,baseline_Bits,]
  485.       )
  486.  
  487.       # enc2 = numpy.concatenate([eventBits,eventBits_2,eventBits_3])
  488.       # enc2 = np.pad(enc2, (0, encodingWidth - len(enc2)), 'constant')
  489.       # Create an array to represent active columns, all initially zero. This
  490.       # will be populated by the compute method below. It must have the same
  491.       # dimensions as the Spatial Pooler.
  492.  
  493.       activeColumns = numpy.zeros(spParams["columnCount"])
  494.       # activeColumns1 = numpy.zeros(spParams["columnCount"])
  495.  
  496.  
  497.       # Execute Spatial Pooling algorithm over input space.
  498.  
  499.       sp.compute(encoding,True,activeColumns)
  500.  
  501.       # sp.compute(enc2,True,activeColumns)
  502.      # sp.compute(encoding1, True, activeColumns)
  503.  
  504.       activeColumnIndices = numpy.nonzero(activeColumns)[0]
  505.  
  506.       # Execute Temporal Memory algorithm over active mini-columns.
  507.       tm.compute(activeColumnIndices, learn=True)
  508.  
  509.       activeCells = tm.getActiveCells()
  510.  
  511.       # Get the bucket info for this input value for classification.
  512.     #   bucketIdx = eventEncoder.getBucketIndices(event_value)[0]
  513.       bucketIdx  =  eventEncoder.getBucketIndices(event_value)[0]
  514.       bucketIdx_2 = eventEncoder2.getBucketIndices(event_value_2)[0]
  515.       bucketIdx_3 = eventEncoder1.getBucketIndices(event_value_3)[0]
  516.       # bucketIdx_7 = eventEncoder7.getBucketIndices(event_value_7)[0]r.getWidth()+eventEncoder1.getWidth()+ eventEncoder2.getWidth()
  517.  
  518.  
  519.       print "BucketIdx",bucketIdx
  520.       print "BucketIdx_2",bucketIdx_2
  521.       print "BucketIdx_3",bucketIdx_3
  522.       # Run classifier to translate active cells back to scalar value.
  523.       classifierResult = classifier.compute(
  524.         recordNum=count,
  525.         patternNZ=activeCells,
  526.         classification={
  527.           "bucketIdx": bucketIdx,
  528.           "actValue": event_value
  529.         },
  530.         learn=True,
  531.         infer=False
  532.       )
  533.       classifierResult1 = classifier1.compute(
  534.         recordNum=count,
  535.         patternNZ=activeCells,
  536.         classification={
  537.           "bucketIdx": bucketIdx_3,
  538.           "actValue": event_value_3
  539.         },
  540.         learn=True,
  541.         infer=False
  542.       )
  543.  
  544.       classifierResult2 = classifier2.compute(
  545.         recordNum=count,
  546.         patternNZ=activeCells,
  547.         classification={
  548.           "bucketIdx": bucketIdx_2,
  549.           "actValue": event_value_2
  550.         },
  551.         learn=True,
  552.         infer=False
  553.       )
  554.       learning_time_end = time()
  555.       print "Time",(learning_time - learning_time_end)
  556.       if (count%100) == 0:
  557.         with open("out_sp.tmp", "w") as f1:
  558.             sp.writeToFile(f1)
  559.         with open("out_tm.tmp", "w") as f2:
  560.             tm.writeToFile(f2)
  561.         with open("out_classifier.tmp", "w") as f3:
  562.             classifier.writeToFile(f3)
  563.         with open("out_classifier1.tmp", "w") as f4:
  564.             classifier1.writeToFile(f4)
  565.         with open("out_classifier2.tmp", "w") as f5:
  566.             classifier2.writeToFile(f5)
  567.   # builder = SpatialPoolerProto.new_message()
  568.   # sp.write(builder)
  569.   # serializedMessage = builder.to_bytes_packed()
  570.   # builder1 = TemporalMemoryProto.new_message()
  571.   # tm.write(builder1)
  572.   # serializedMessage = builder1.to_bytes_packed()
  573. if __name__ == "__main__":
  574.   runLearning(20000)
Add Comment
Please, Sign In to add comment