Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import math
- import numpy as np
- # Fonction de trasfert tahn, et sa dérivée.
- def sigmoid(x):
- return np.tanh(x)
- def dsigmoid(x):
- return 1.0 - x**2
- class Reseau_Neurones(object):
- def __init__(self, *args):
- # On fixe la structure du RN (ie. nb et taille des couches)
- # self.layers : prendra les valeurs de la propagation de X dans le RN
- n = len(args)
- self.layers = [np.ones(args[i] + (i==0)) for i in range(0, n)]
- # liste des matrices Wn, à valeurs rand entre -0.2 et 0.2
- self.weights = list()
- for i in range(n-1):
- R = np.random.random((self.layers[i].size, self.layers[i+1].size))
- self.weights.append((2*R-1)*0.20)
- # Qu'es-ce ?
- self.m = [0 for i in range(len(self.weights))]
- # Fonction de propagation de l'entrèe dans le RN
- def propag(self, inputs):
- # on donne l'entrée X au RN, en gardant un "1" à la fin (seuil)
- self.layers[0][:-1] = inputs
- for i in range(1, len(self.layers)):
- # sigmoide du produit matriciel X.Wn
- self.layers[i] = sigmoid(np.dot(self.layers[i-1], self.weights[i-1]))
- return self.layers[-1]
- def retroPropag(self, inputs, outputs, a=0.1, m=0.1):
- error = outputs - self.propag(inputs)
- de = error*dsigmoid(self.layers[-1])
- deltas = list()
- deltas.append(de)
- for i in range(len(self.layers)-2, 0, -1):
- deh = np.dot(deltas[-1], self.weights[i].T) * dsigmoid(self.layers[i])
- deltas.append(deh)
- deltas.reverse()
- for i, j in enumerate(self.weights):
- layer = np.atleast_2d(self.layers[i])
- delta = np.atleast_2d(deltas[i])
- dw = np.dot(layer.T,delta)
- self.weights[i] += a*dw + m*self.m[i]
- self.m[i] = dw
- RN = Reseau_Neurones(2, 3, 1)
- X = np.array(([3,5], [5,1], [10,2]), dtype = float)
- y = np.array(([75], [82], [93]), dtype = float)
- pat = (((0, 0), 0),
- ((0, 1), 1),
- ((1, 0), 1),
- ((1, 1), 0))
- n = RN(2, 3, 1)
- for i in range(1000):
- for p in pat:
- n.backPropagate(p[0], p[1])
- for p in pat:
- print n.update(p[0])
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement