Advertisement
Guest User

normalizacija

a guest
May 21st, 2018
82
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 11.46 KB | None | 0 0
  1.  
  2. from math import exp
  3. import random
  4.  
  5.  
  6. def initialize_network(n_inputs, n_hidden, n_outputs):
  7. network = list()
  8. hidden_layer = [{'weights': [random.random() for i in range(n_inputs + 1)]} for i in range(n_hidden)]
  9. network.append(hidden_layer)
  10. output_layer = [{'weights': [random.random() for i in range(n_hidden + 1)]} for i in range(n_outputs)]
  11. network.append(output_layer)
  12. return network
  13.  
  14.  
  15. def activate(weights, inputs):
  16. activation = weights[-1]
  17. for i in range(len(weights) - 1):
  18. activation += weights[i] * inputs[i]
  19. return activation
  20.  
  21.  
  22. def transfer(activation):
  23. return 1.0 / (1.0 + exp(-activation))
  24.  
  25.  
  26. def forward_propagate(network, row):
  27. inputs = row
  28. for layer in network:
  29. new_inputs = []
  30. for neuron in layer:
  31. activation = activate(neuron['weights'], inputs)
  32. neuron['output'] = transfer(activation)
  33. new_inputs.append(neuron['output'])
  34. inputs = new_inputs
  35. return inputs
  36.  
  37.  
  38. def transfer_derivative(output):
  39. return output * (1.0 - output)
  40.  
  41.  
  42. def backward_propagate_error(network, expected):
  43. for i in reversed(range(len(network))):
  44. layer = network[i]
  45. errors = list()
  46. if i != len(network) - 1:
  47. for j in range(len(layer)):
  48. error = 0.0
  49. for neuron in network[i + 1]:
  50. error += (neuron['weights'][j] * neuron['delta'])
  51. errors.append(error)
  52. else:
  53. for j in range(len(layer)):
  54. neuron = layer[j]
  55. errors.append(expected[j] - neuron['output'])
  56. for j in range(len(layer)):
  57. neuron = layer[j]
  58. neuron['delta'] = errors[j] * transfer_derivative(neuron['output'])
  59.  
  60.  
  61. def update_weights(network, row, l_rate):
  62. for i in range(len(network)):
  63. inputs = row[:-1]
  64. if i != 0:
  65. inputs = [neuron['output'] for neuron in network[i - 1]]
  66. for neuron in network[i]:
  67. for j in range(len(inputs)):
  68. neuron['weights'][j] += l_rate * neuron['delta'] * inputs[j]
  69. neuron['weights'][-1] += l_rate * neuron['delta']
  70.  
  71.  
  72. def train_network(network, train, l_rate, n_epoch, n_outputs):
  73. for epoch in range(n_epoch):
  74. sum_error = 0
  75. for row in train:
  76. outputs = forward_propagate(network, row)
  77. expected = [0 for i in range(n_outputs)]
  78. expected[row[-1]] = 1
  79. sum_error += sum([(expected[i] - outputs[i]) ** 2 for i in range(len(expected))])
  80. backward_propagate_error(network, expected)
  81. update_weights(network, row, l_rate)
  82. # print('>epoch=%d, lrate=%.3f, error=%.3f' % (epoch, l_rate, sum_error))
  83.  
  84.  
  85. def predict(network, row):
  86. outputs = forward_propagate(network, row)
  87. return outputs.index(max(outputs))
  88.  
  89.  
  90. random.seed(1)
  91. training_data = [
  92. [3.6216, 8.6661, -2.8073, -0.44699, 0],
  93. [4.5459, 8.1674, -2.4586, -1.4621, 0],
  94. [3.866, -2.6383, 1.9242, 0.10645, 0],
  95. [3.4566, 9.5228, -4.0112, -3.5944, 0],
  96. [0.32924, -4.4552, 4.5718, -0.9888, 0],
  97. [4.3684, 9.6718, -3.9606, -3.1625, 0],
  98. [3.5912, 3.0129, 0.72888, 0.56421, 0],
  99. [2.0922, -6.81, 8.4636, -0.60216, 0],
  100. [3.2032, 5.7588, -0.75345, -0.61251, 0],
  101. [1.5356, 9.1772, -2.2718, -0.73535, 0],
  102. [1.2247, 8.7779, -2.2135, -0.80647, 0],
  103. [3.9899, -2.7066, 2.3946, 0.86291, 0],
  104. [1.8993, 7.6625, 0.15394, -3.1108, 0],
  105. [-1.5768, 10.843, 2.5462, -2.9362, 0],
  106. [3.404, 8.7261, -2.9915, -0.57242, 0],
  107. [4.6765, -3.3895, 3.4896, 1.4771, 0],
  108. [2.6719, 3.0646, 0.37158, 0.58619, 0],
  109. [0.80355, 2.8473, 4.3439, 0.6017, 0],
  110. [1.4479, -4.8794, 8.3428, -2.1086, 0],
  111. [5.2423, 11.0272, -4.353, -4.1013, 0],
  112. [5.7867, 7.8902, -2.6196, -0.48708, 0],
  113. [0.3292, -4.4552, 4.5718, -0.9888, 0],
  114. [3.9362, 10.1622, -3.8235, -4.0172, 0],
  115. [0.93584, 8.8855, -1.6831, -1.6599, 0],
  116. [4.4338, 9.887, -4.6795, -3.7483, 0],
  117. [0.7057, -5.4981, 8.3368, -2.8715, 0],
  118. [1.1432, -3.7413, 5.5777, -0.63578, 0],
  119. [-0.38214, 8.3909, 2.1624, -3.7405, 0],
  120. [6.5633, 9.8187, -4.4113, -3.2258, 0],
  121. [4.8906, -3.3584, 3.4202, 1.0905, 0],
  122. [-0.24811, -0.17797, 4.9068, 0.15429, 0],
  123. [1.4884, 3.6274, 3.308, 0.48921, 0],
  124. [4.2969, 7.617, -2.3874, -0.96164, 0],
  125. [-0.96511, 9.4111, 1.7305, -4.8629, 0],
  126. [-1.6162, 0.80908, 8.1628, 0.60817, 0],
  127. [2.4391, 6.4417, -0.80743, -0.69139, 0],
  128. [2.6881, 6.0195, -0.46641, -0.69268, 0],
  129. [3.6289, 0.81322, 1.6277, 0.77627, 0],
  130. [4.5679, 3.1929, -2.1055, 0.29653, 0],
  131. [3.4805, 9.7008, -3.7541, -3.4379, 0],
  132. [4.1711, 8.722, -3.0224, -0.59699, 0],
  133. [-0.2062, 9.2207, -3.7044, -6.8103, 0],
  134. [-0.0068919, 9.2931, -0.41243, -1.9638, 0],
  135. [0.96441, 5.8395, 2.3235, 0.066365, 0],
  136. [2.8561, 6.9176, -0.79372, 0.48403, 0],
  137. [-0.7869, 9.5663, -3.7867, -7.5034, 0],
  138. [2.0843, 6.6258, 0.48382, -2.2134, 0],
  139. [-0.7869, 9.5663, -3.7867, -7.5034, 0],
  140. [3.9102, 6.065, -2.4534, -0.68234, 0],
  141. [1.6349, 3.286, 2.8753, 0.087054, 0],
  142. [4.3239, -4.8835, 3.4356, -0.5776, 0],
  143. [5.262, 3.9834, -1.5572, 1.0103, 0],
  144. [3.1452, 5.825, -0.51439, -1.4944, 0],
  145. [2.549, 6.1499, -1.1605, -1.2371, 0],
  146. [4.9264, 5.496, -2.4774, -0.50648, 0],
  147. [4.8265, 0.80287, 1.6371, 1.1875, 0],
  148. [2.5635, 6.7769, -0.61979, 0.38576, 0],
  149. [5.807, 5.0097, -2.2384, 0.43878, 0],
  150. [3.1377, -4.1096, 4.5701, 0.98963, 0],
  151. [-0.78289, 11.3603, -0.37644, -7.0495, 0],
  152. [-1.3971, 3.3191, -1.3927, -1.9948, 1],
  153. [0.39012, -0.14279, -0.031994, 0.35084, 1],
  154. [-1.6677, -7.1535, 7.8929, 0.96765, 1],
  155. [-3.8483, -12.8047, 15.6824, -1.281, 1],
  156. [-3.5681, -8.213, 10.083, 0.96765, 1],
  157. [-2.2804, -0.30626, 1.3347, 1.3763, 1],
  158. [-1.7582, 2.7397, -2.5323, -2.234, 1],
  159. [-0.89409, 3.1991, -1.8219, -2.9452, 1],
  160. [0.3434, 0.12415, -0.28733, 0.14654, 1],
  161. [-0.9854, -6.661, 5.8245, 0.5461, 1],
  162. [-2.4115, -9.1359, 9.3444, -0.65259, 1],
  163. [-1.5252, -6.2534, 5.3524, 0.59912, 1],
  164. [-0.61442, -0.091058, -0.31818, 0.50214, 1],
  165. [-0.36506, 2.8928, -3.6461, -3.0603, 1],
  166. [-5.9034, 6.5679, 0.67661, -6.6797, 1],
  167. [-1.8215, 2.7521, -0.72261, -2.353, 1],
  168. [-0.77461, -1.8768, 2.4023, 1.1319, 1],
  169. [-1.8187, -9.0366, 9.0162, -0.12243, 1],
  170. [-3.5801, -12.9309, 13.1779, -2.5677, 1],
  171. [-1.8219, -6.8824, 5.4681, 0.057313, 1],
  172. [-0.3481, -0.38696, -0.47841, 0.62627, 1],
  173. [0.47368, 3.3605, -4.5064, -4.0431, 1],
  174. [-3.4083, 4.8587, -0.76888, -4.8668, 1],
  175. [-1.6662, -0.30005, 1.4238, 0.024986, 1],
  176. [-2.0962, -7.1059, 6.6188, -0.33708, 1],
  177. [-2.6685, -10.4519, 9.1139, -1.7323, 1],
  178. [-0.47465, -4.3496, 1.9901, 0.7517, 1],
  179. [1.0552, 1.1857, -2.6411, 0.11033, 1],
  180. [1.1644, 3.8095, -4.9408, -4.0909, 1],
  181. [-4.4779, 7.3708, -0.31218, -6.7754, 1],
  182. [-2.7338, 0.45523, 2.4391, 0.21766, 1],
  183. [-2.286, -5.4484, 5.8039, 0.88231, 1],
  184. [-1.6244, -6.3444, 4.6575, 0.16981, 1],
  185. [0.50813, 0.47799, -1.9804, 0.57714, 1],
  186. [1.6408, 4.2503, -4.9023, -2.6621, 1],
  187. [0.81583, 4.84, -5.2613, -6.0823, 1],
  188. [-5.4901, 9.1048, -0.38758, -5.9763, 1],
  189. [-3.2238, 2.7935, 0.32274, -0.86078, 1],
  190. [-2.0631, -1.5147, 1.219, 0.44524, 1],
  191. [-0.91318, -2.0113, -0.19565, 0.066365, 1],
  192. [0.6005, 1.9327, -3.2888, -0.32415, 1],
  193. [0.91315, 3.3377, -4.0557, -1.6741, 1],
  194. [-0.28015, 3.0729, -3.3857, -2.9155, 1],
  195. [-3.6085, 3.3253, -0.51954, -3.5737, 1],
  196. [-6.2003, 8.6806, 0.0091344, -3.703, 1],
  197. [-4.2932, 3.3419, 0.77258, -0.99785, 1],
  198. [-3.0265, -0.062088, 0.68604, -0.055186, 1],
  199. [-1.7015, -0.010356, -0.99337, -0.53104, 1],
  200. [-0.64326, 2.4748, -2.9452, -1.0276, 1],
  201. [-0.86339, 1.9348, -2.3729, -1.0897, 1],
  202. [-2.0659, 1.0512, -0.46298, -1.0974, 1],
  203. [-2.1333, 1.5685, -0.084261, -1.7453, 1],
  204. [-1.2568, -1.4733, 2.8718, 0.44653, 1],
  205. [-3.1128, -6.841, 10.7402, -1.0172, 1],
  206. [-4.8554, -5.9037, 10.9818, -0.82199, 1],
  207. [-2.588, 3.8654, -0.3336, -1.2797, 1],
  208. [0.24394, 1.4733, -1.4192, -0.58535, 1],
  209. [-1.5322, -5.0966, 6.6779, 0.17498, 1],
  210. [-4.0025, -13.4979, 17.6772, -3.3202, 1],
  211. [-4.0173, -8.3123, 12.4547, -1.4375, 1]
  212. ]
  213.  
  214. testing_data = [
  215. [2.888, 0.44696, 4.5907, -0.24398, 0],
  216. [0.49665, 5.527, 1.7785, -0.47156, 0],
  217. [4.2586, 11.2962, -4.0943, -4.3457, 0],
  218. [1.7939, -1.1174, 1.5454, -0.26079, 0],
  219. [5.4021, 3.1039, -1.1536, 1.5651, 0],
  220. [2.5367, 2.599, 2.0938, 0.20085, 0],
  221. [4.6054, -4.0765, 2.7587, 0.31981, 0],
  222. [-1.979, 3.2301, -1.3575, -2.5819, 1],
  223. [-0.4294, -0.14693, 0.044265, -0.15605, 1],
  224. [-2.234, -7.0314, 7.4936, 0.61334, 1],
  225. [-4.211, -12.4736, 14.9704, -1.3884, 1],
  226. [-3.8073, -8.0971, 10.1772, 0.65084, 1],
  227. [-2.5912, -0.10554, 1.2798, 1.0414, 1],
  228. [-2.2482, 3.0915, -2.3969, -2.6711, 1]
  229. ]
  230.  
  231.  
  232. # 0 vistinska, 1 falsifikat
  233.  
  234. # f-ja za normalizacija
  235. def minMaxNrom(training_data, testing_data):
  236. # vashiot kod tuka
  237. new_data = [[] for i in range(len(training_data))] # lista od listi, za da mozheme da gi dodavame novite elem. kolona po kolona
  238. new_testing_data = [[] for i in range(len(testing_data))] # isto |
  239.  
  240. for kolona in range(0, len(training_data[0]) - 1):
  241. min = 1000
  242. max = -1000
  243. for row in training_data:
  244. if row[kolona] > max:
  245. max = row[kolona]
  246. if row[kolona] < min:
  247. min = row[kolona]
  248. # print(min)
  249. # print(max)
  250. # print
  251. index = 0
  252. for row in training_data:
  253. nov = (row[kolona] - min) / (max - min)
  254. new_data[index].append(nov) # se dodavaat 1 po drug na ista adresa
  255. index += 1
  256. # dodavanje na odgovorite, t.e. tochnite klasifikacii na kraj
  257. index = -1
  258. for row in new_data:
  259. index += 1
  260. row.append(training_data[index][len(training_data[index]) - 1])
  261.  
  262. for kolona in range(0, len(testing_data[0]) - 1):
  263. min = 1000
  264. max = -1000
  265. for row in testing_data:
  266. if row[kolona] > max:
  267. max = row[kolona]
  268. if row[kolona] < min:
  269. min = row[kolona]
  270. # print(min)
  271. # print(max)
  272. # print
  273. index = 0
  274. for row in testing_data:
  275. nov = (row[kolona] - min) / (max - min)
  276. new_testing_data[index].append(nov) # se dodavaat 1 po drug na ista adresa
  277. index += 1
  278. index = -1
  279. for row in new_testing_data:
  280. index += 1
  281. row.append(testing_data[index][len(testing_data[index]) - 1])
  282.  
  283. return new_data, new_testing_data
  284.  
  285.  
  286. if __name__ == "__main__":
  287. # vashiot kod tuka
  288. neural_network1 = initialize_network(4, 3, 2)
  289. neural_network2 = initialize_network(4, 3, 2)
  290. normalized_training_data, normalized_testing_data = minMaxNrom(training_data, testing_data)
  291. for row in normalized_testing_data:
  292. print(row)
  293. train_network(neural_network1, training_data, 0.3, 50, 2)
  294. train_network(neural_network2, normalized_training_data, 0.3, 50, 2)
  295.  
  296. normal = 0
  297. for row in testing_data:
  298. predvideno = predict(neural_network1, row)
  299. if predvideno == row[-1]:
  300. normal += 1
  301. print("Prva mrezha:")
  302. print(normal)
  303.  
  304. normalized = 0
  305. for row in normalized_testing_data:
  306. predvideno = predict(neural_network2, row)
  307. if predvideno == row[-1]:
  308. normalized += 1
  309. print("Vtora mrezha:")
  310. print(normalized)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement