Advertisement
Guest User

Neural network

a guest
Dec 22nd, 2018
123
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 4.52 KB | None | 0 0
  1. import numpy as np
  2. from sklearn.utils import shuffle
  3.  
  4. x=64
  5. Y=10
  6. z=1
  7. i=0
  8. q=0
  9. X = np.array([[0]*x])
  10. y = np.array([0])
  11. nb=20
  12.  
  13.  
  14. while q!=nb:
  15.     def lirecompter():
  16.             inputfile=("test.txt")
  17.             with open (inputfile,'r') as inputfile:
  18.                 filedata=inputfile.read()
  19.             destinationpath=("test.txt")
  20.             filedata=filedata.replace(texttofind,texttoreplace)
  21.             with open (destinationpath,'w') as file:
  22.                 file.write(filedata)
  23.        
  24.     #pour modifier les [0, 0, 0, 255] en 0
  25.     texttofind="[0, 0, 0, 255]"
  26.     texttoreplace="0"
  27.     lirecompter()
  28.                
  29.     #pour modifier les [255, 255, 255, 255] en 1
  30.     texttofind="[255, 255, 255, 255]"
  31.     texttoreplace="1"
  32.     lirecompter()
  33.        
  34.     texttofind="[255, 255, 255]"
  35.     texttoreplace="1"
  36.     lirecompter()
  37.        
  38.     texttofind="[0, 0, 0]"
  39.     texttoreplace="0"
  40.     lirecompter()
  41.        
  42.     texttofind="[["
  43.     texttoreplace=" "
  44.     lirecompter()
  45.        
  46.     texttofind="]]"
  47.     texttoreplace=" "
  48.     lirecompter()
  49.        
  50.     texttofind="["
  51.     texttoreplace=""
  52.     lirecompter()
  53.        
  54.     texttofind="]"
  55.     texttoreplace=""
  56.     lirecompter()
  57.        
  58.        
  59.     texttofind=","
  60.     texttoreplace=""
  61.     lirecompter()
  62.        
  63.        
  64.     L=open("test.txt","r")
  65.     l=(L.read()).split(" ")
  66.     a = int(l[0])
  67.     b=[a]
  68.     Sortie=[0]*10
  69.     Sortie[a]=1
  70.        
  71.     listes=[0]*65
  72.     for loop in range(65):
  73.         listes[loop]=l[loop]
  74.     texttofind=' '.join(map(str, listes))+' '
  75.     exttoreplace=""
  76.     del listes[0]
  77.     lirecompter()    
  78.    
  79.    
  80.    
  81.  
  82.    
  83.     y=np.insert(y,q,b)
  84.  
  85.     # X = (hours studying, hours sleeping), y = score on test, xPredicted = 4 hours studying & 8 hours sleeping (input data for prediction)
  86.     E=listes
  87.     X=np.insert(X,i,E)
  88.  
  89.     i=i+1
  90.     q=q+1
  91.     X=X.reshape(-1, 64)
  92.     y=y.reshape(-1,1)
  93. y=np.delete(y,nb,0)
  94. X=np.delete(X,nb,0)
  95. y=y/10
  96.  
  97. X, y = shuffle(X, y)
  98.  
  99. xPredicted = np.array([0,1,1,1,1,1,1,0,0,1,1,1,1,1,1,0,0,1,1,1,1,1,1,0,0,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,0])
  100. # scale units
  101.  
  102. class Neural_Network(object):
  103.   def __init__(self):
  104.   #parameters
  105.     self.inputSize = x
  106.     self.outputSize = z
  107.     self.hiddenSize = Y
  108.  
  109.   #weights
  110.     self.W1 = np.random.randn(self.inputSize, self.hiddenSize) # (3x2) weight matrix from input to hidden layer
  111.     self.W2 = np.random.randn(self.hiddenSize, self.outputSize) # (3x1) weight matrix from hidden to output layer
  112.  
  113.   def forward(self, X):
  114.     #forward propagation through our network
  115.     self.z = np.dot(X, self.W1) # dot product of X (input) and first set of 3x2 weights
  116.    
  117.     self.z2 = self.sigmoid(self.z) # activation function
  118.    
  119.     self.z3 = np.dot(self.z2, self.W2) # dot product of hidden layer (z2) and second set of 3x1 weights
  120.  
  121.     o = self.sigmoid(self.z3) # final activation function
  122.     return o
  123.  
  124.   def sigmoid(self, s):
  125.     # activation function
  126.     return 1/(1+np.exp(-s))
  127.  
  128.   def sigmoidPrime(self, s):
  129.     #derivative of sigmoid
  130.     return s * (1 - s)
  131.  
  132.   def backward(self, X, y, o):
  133.     # backward propagate through the network
  134.     self.o_error = y - o # error in output
  135.    
  136.     self.o_delta = self.o_error*self.sigmoidPrime(o) # applying derivative of sigmoid to error
  137.  
  138.     self.z2_error = self.o_delta.dot(self.W2.T) # z2 error: how much our hidden layer weights contributed to output error
  139.    
  140.     self.z2_delta = self.z2_error*self.sigmoidPrime(self.z2) # applying derivative of sigmoid to z2 error
  141.     self.W1 += X.T.dot(self.z2_delta) # adjusting first set (input --> hidden) weights
  142.  
  143.     self.W2 += self.z2.T.dot(self.o_delta) # adjusting second set (hidden --> output) weights
  144.  
  145.   def train(self, X, y):
  146.     o = self.forward(X)
  147.     self.backward(X, y, o)
  148.  
  149.   def saveWeights(self):
  150.     np.savetxt("w1.txt", self.W1, fmt="%s")
  151.     np.savetxt("w2.txt", self.W2, fmt="%s")
  152.  
  153.   def predict(self):
  154.     print ("Predicted data based on trained weights Damn: ");
  155.     print ("Input (scaled): \n" + str(xPredicted));
  156.     print ("Output: \n" + str(self.forward(xPredicted)));
  157.  
  158. NN = Neural_Network()
  159. for i in range(1000): # trains the NN 1,000 times
  160.   print ("# " + str(i) + "\n")
  161.   print ("Input (scaled): \n" + str(X))
  162.   print ("Actual Output: \n" + str(y))
  163.   print ("Predicted Output: \n" + str(NN.forward(X)))
  164.   print ("Loss: \n" + str(np.mean(np.square(y - NN.forward(X))))) # mean sum squared loss
  165.   print ("\n")
  166.   NN.train(X, y)
  167.  
  168. NN.saveWeights()
  169. NN.predict()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement