# Untitled

a guest
Jan 11th, 2021
75
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. import numpy as np
2. import matplotlib.pyplot as plt
3. import math
4. from matplotlib import cm
5. from mpl_toolkits.mplot3d import Axes3D
6.
7.
8. def get_alphas(nr):
9.     return np.linspace(0.0, 2*np.pi, nr)
10.
11.
12. def list_to_nparray(list_to_convert):
13.     # https://stackoverflow.com/questions/62994636/numpy-stop-numpy-array-from-trying-to-reconcile-elements-create-ndarry-from
14.     np_errors_biases = np.empty(len(list_to_convert), dtype=object)
15.     np_errors_biases[:] = list_to_convert
16.     return np_errors_biases
17.
18.
19. def clipped_sig(z):
20.     return 1.0/(1.0+np.exp(-np.clip(z, -700, 700)))
21.
22.
23. def generate_network_weights_and_biases_for_single_tower(number_of_neurons=20, width=0.1, x0=0.5, y0=0.5):
24.     """
25.        We need two neurons per bump
26.       The first neuron creates a step at first
27.       Then the second neuron creates a step which will be subtracted from the first so we get a bump
28.       Since we have 2 input neurons we will have two incoming weights
29.    """
30.     if number_of_neurons % 2:
31.         # number of neurons need to be a multiple of two since we need two neurons per bump
32.         number_of_neurons += 1
33.
34.     weights_layer_0 = np.zeros((number_of_neurons, 2))
35.     weights_layer_1 = np.zeros((1, number_of_neurons))
36.
37.     biases = np.zeros((number_of_neurons, 1))
38.     for index, alpha in enumerate(get_alphas(number_of_neurons//2)):
39.         # constant multiplier for increasing the steepness of the step
40.         # divide by the width, because the smaller the width is the steeper our neurons must be
41.         c = 10000/width
42.
43.         # wx1,wx2 = wy1,wy2 because they are only influenced by the angle
44.         wx = math.cos(alpha) * c
45.         wy = math.sin(alpha) * c
46.
47.         bias = (-math.cos(alpha)*x0 - math.sin(alpha)*y0) * c
48.
49.         bias2 = bias - width * c
50.
52.         weights_layer_0[index * 2][0] = wx
53.         weights_layer_0[index * 2][1] = wy
54.         biases[index * 2][0] = bias
55.         weights_layer_1[0][index * 2] = 4.0 / number_of_neurons
56.
58.         weights_layer_0[index * 2 + 1][0] = wx
59.         weights_layer_0[index * 2 + 1][1] = wy
60.         biases[index * 2 + 1][0] = bias2
61.         weights_layer_1[0][index * 2 + 1] = -4.0 / number_of_neurons
62.
63.     return list_to_nparray([weights_layer_0, weights_layer_1]), list_to_nparray([biases, np.array([float(number_of_neurons) * -0.0]).reshape(1, 1)])
64.
65.
66. def compute_z_network(xs, ys, tower):
67.     z = np.zeros((len(xs), len(ys)))
68.     weights, biases = generate_network_weights_and_biases_for_single_tower(**tower)
69.
70.     for index_x, x in enumerate(xs):
71.         for index_y, y in enumerate(ys):
72.             activations_layer_1 = clipped_sig(np.dot(weights[0], np.array([x, y]).reshape(-1, 1)) + biases[0])
73.             z[index_x][index_y] = np.dot(weights[1], activations_layer_1).sum()
74.     return z
75.
76.
77. def main():
78.     start = 0.4999995
79.     end = 0.5000005
80.     xy_resolution = 300  # the performance required grows with the square. Consider tuning start and end instead.
81.     xs = np.linspace(start, end, num=xy_resolution)
82.     ys = np.linspace(start, end, num=xy_resolution)
83.
84.     tower = {"width": 0.0000001, "x0": 0.5, "y0": 0.5, "number_of_neurons": 1000}
85.
86.     z_surface = compute_z_network(xs, ys, tower)
87.
88.     x_grid, y_grid = np.meshgrid(xs, ys)
89.     fig = plt.figure()
90.     fig.suptitle("Step Width: %s (radius)\nNumber of Neurons: %s" % (tower["width"], tower["number_of_neurons"]))
91.     ax = fig.gca(projection=Axes3D.name)
92.     ax.plot_surface(x_grid, y_grid, z_surface, cmap=cm.coolwarm)
93.     plt.show()
94.
95.
96. if __name__ == '__main__':
97.     main()
98.
RAW Paste Data