Advertisement
Guest User

Untitled

a guest
Jul 19th, 2018
68
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 2.30 KB | None | 0 0
  1. import numpy as np
  2. import math
  3. import matplotlib.pyplot as plt
  4. import random
  5.  
  6. dataset_size = 1000
  7.  
  8.  
  9. data_0 = np.array([(max(0, np.random.normal(3, 0.5)), np.random.uniform(0, math.pi * 2)) for x in range(dataset_size)])
  10. data_1 = np.array([(max(0, np.random.normal(6, 0.5)), np.random.uniform(0, math.pi * 2)) for x in range(dataset_size)])
  11. train_y= np.concatenate([np.zeros(dataset_size), np.ones(dataset_size)])
  12. train_x = np.concatenate([data_0, data_1], axis=0)
  13. together = list(zip(train_x, train_y))
  14. print('?', together[0])
  15. #print(together)
  16. random.shuffle(together)
  17. together = [ (np.array([math.cos(x[0][1]) * x[0][0], math.sin(x[0][1]) * x[0][0]]), x[1]) for x in together]
  18. tmp = np.array(together)
  19. print('!', tmp.shape)
  20. print('?', together[0])
  21. train_x = np.array([x[0] for x in together])
  22. train_y = np.array([x[1] for x in together])
  23. print(train_y.shape)
  24. print(train_x.shape)
  25. train_y = train_y.astype(dtype=np.int32)
  26. print(train_y)
  27. print(max(train_y))
  28. print(train_y.dtype)
  29.  
  30.  
  31. color_name = 'rb'
  32. color_letter = [color_name[x] for x in train_y]
  33. #plt.scatter(train_x[:, 0], train_x[:, 1], color=color_letter)
  34. #plt.show()
  35.  
  36.  
  37. num_units = [2, 20, 20, 1]
  38. num_layers = len(num_units) - 1
  39.  
  40. var_dict = {}
  41. for i in range(num_layers):
  42. var_dict['w_' + str(i)] = np.random.normal(0, 0.05, (num_units[i], num_units[i + 1]))
  43. var_dict['b_' + str(i)] = np.zeros((num_units[i + 1],))
  44.  
  45.  
  46. #for key, value in var_dict.items():
  47. #print(key, value.shape)
  48.  
  49. def dense_forward(x, layer_id):
  50. x = np.matmul(x, w) + b
  51. return x
  52.  
  53. def relu_forward(x, layer_id):
  54. x = np.max(x, 0)
  55. return x
  56.  
  57. def sigmoid_forward(x):
  58. x = np.exp(-x)
  59. x = 1.0 / (1 + x)
  60. return x
  61.  
  62.  
  63. batch_size = 32
  64.  
  65.  
  66. for i in range(1000):
  67. idx = np.random.randint(0, train_x.shape[0], batch_size)
  68. batch_x = train_x[idx]
  69. batch_y = train_y[idx]
  70. backprop_dense = []
  71. backprop_relu = []
  72. backprop_sigmoid
  73. for i in range(num_layers):
  74. batch_x, mem = dense_forward(batch_x, i)
  75. backprop_dense.append(mem)
  76. if i + 1 != num_layers:
  77. batch_x, mem = relu_forward(batch_x, i)
  78. backprop_relu.append(mem)
  79.  
  80. batch_x, mem = sigmoid_forward(batch_x)
  81. backpro_sigmoid = mem
  82.  
  83.  
  84.  
  85.  
  86. print(data_0)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement