Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import matplotlib.pyplot as plt
- import numpy as py
- import numpy as np
- from numpy import random
- class Neuron:
- def __int__ (self, weight_count):
- random.seed(1)
- weight_count = 0
- self.weights = weight_count = 2 * random.random((1, 1))-1
- def tanh_derivative (self, x):
- return 1 - np.tanh(x) ** 2
- def step(self,x):
- dot_product = np.dot(x, self.weights)
- return np.tanh(dot_product)
- def train (self, iteration, train_inputs, train_outputs):
- for i in range(iterations):
- output = self.step(train_outputs)
- error = train_outputs - output
- adjustment = np.dot(train_inputs.T, (error * self.tanh_derivative(output)))
- self.weights += adjustment
- def function(x):
- return 2 * x
- x = [i/100 for i in range(300)]
- y = [Neuron.function(i/100) for i in range(300)]
- data = []
- for i in range(300):
- data.append(Neuron.function(i/100)+random.randint(1, 100)/50)
- plt.plot(data, "b.")
- plt.show()
- x = np.asarray([x])/100
- y = np.asarray([y])/100
- neurObject = Neuron(300)
- x = x.reshape(300, 1)
- y = y.T
- neuron.train(10000,x,y)
- consant = neuron.weights[0][0]
- print(constant)
- test_data = []
- for i in x:
- test_data.append(i*100*constant)
- plt.plot(data, "bo")
- plt.plot(test_data, "r-")
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement