Advertisement
Guest User

Untitled

a guest
Feb 24th, 2017
128
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 7.50 KB | None | 0 0
  1. """
  2. NeuralNet.py
  3.  
  4. CS460/660 Spring 2017
  5.  
  6. Programming Assignment 1
  7.  
  8. Team Members: Adonica Camp, Anthony Inzero, Haley Pereira
  9. """
  10.  
  11. import numpy as np
  12. import matplotlib.pyplot as plt
  13. import math
  14.  
  15.  
  16. # 8 by 8 number images is 64 inputes
  17. # linear & nonlinear have
  18.  
  19. class NeuralNet:
  20.     # This class implements a Neural Net with one hidden layer.
  21.  
  22.  
  23.     def __init__(self, input_dim, output_dim):
  24.         """
  25.       Initializes the parameters of the logistic regression classifier to
  26.       random values.
  27.  
  28.       args:
  29.           input_dim: Number of dimensions of the input data
  30.           output_dim: Number of classes
  31.       """
  32.         self.hiddenlayer = 2  # The number of nodes in the hidden layer
  33.  
  34.         # create a matrix of random weights for the input layer
  35.         inArray = np.random.randn(1, (self.hiddenlayer * input_dim)).reshape(input_dim, self.hiddenlayer)
  36.         self.w0 = np.asmatrix(inArray)
  37.  
  38.         # create a matrix of random weights for the hidden layer
  39.         hLarray = np.random.randn(1, (output_dim * input_dim)).reshape(input_dim, output_dim)
  40.         self.w1 = np.asmatrix(hLarray)
  41.  
  42.         # dimensions of bias means it needs to be addable to other matrixes
  43.         #self.bias = np.zeros((1, output_dim))  # random bias
  44.  
  45.     # --------------------------------------------------------------------------
  46.  
  47.     def forward_prop(self, X, y):
  48.         """
  49.        Implements forward propagation
  50.        """
  51.  
  52.         z = np.dot(X, self.w0)
  53.         sig = self.sigmoid(z)
  54.  
  55.         k = np.dot(sig, self.w1)
  56.         foroutput = self.softmax(k) #or use sigmoid????
  57.        
  58.         return foroutput
  59.  
  60.     # -------------------------------------------------------------------------
  61.  
  62.     def sigmoid(self, X):  # rewrite to be a sigmoid
  63.         """
  64.        Returns the sigmoid of a single given X value
  65.        """
  66.         s = 1 / (1 + np.exp(0 - X))
  67.         return s
  68.  
  69.     # --------------------------------------------------------------------------    
  70.    
  71.     def softmax(self, X):
  72.         """
  73.        Returns the softmax probability array
  74.        """
  75.         # print(self.theta)
  76.         # print(X)
  77.  
  78.         exp_z = np.exp(X)
  79.         # print(exp_z)
  80.         softmax_scores = exp_z / np.sum(exp_z, axis=1, keepdims=True)
  81.         return softmax_scores
  82.  
  83.     # -------------------------------------------------------------------------
  84.     def predict(self,X, input_dim):
  85.  
  86.        #input arg X should be softmax matrix from forward propogation
  87.        predictions = np.zeros(input_dim)
  88.        
  89.        for i in range(input_dim):
  90.            predictions[i] = np.argmax(i)
  91.    
  92.        return predictions  
  93.  
  94.     # -------------------------------------------------------------------------
  95.  
  96.     def compute_cost(self, X, y, input_dim):
  97.         """
  98.       Computes the total cost on the dataset.
  99.      
  100.       args:
  101.           X: Data array
  102.           y: Labels corresponding to input data
  103.      
  104.       returns:
  105.           cost: average cost per data sample
  106.       """
  107.         softmax_scores = self.h(X)
  108.         cost_mean = 0
  109.  
  110.         for i in range(len(X)):
  111.  
  112.             hot_y = np.array([0]*len(self.theta))
  113.             hot_y[int(y[i])] = 1
  114.  
  115.             '''
  116.           if y[i] == 1:
  117.               hot_y = np.array([0,1])
  118.           else:
  119.               hot_y = np.array([1,0])
  120.           '''
  121.  
  122.             cost_for_sample = -np.sum(hot_y * np.log(softmax_scores[i]))
  123.             #print(cost_for_sample)
  124.             cost_mean += cost_for_sample
  125.  
  126.  
  127.         return cost_mean / len(X)
  128.  
  129.     # --------------------------------------------------------------------------
  130.  
  131.     def fit(self, X, y, input_dim):
  132.         # Backwards Propogation
  133.        
  134.         #Set learning rate
  135.         learning_rate = 0.05
  136.        
  137.         cost = self.compute_cost(X, y)
  138.        
  139.         while cost < 0.10:
  140.             first = self.forward_prop(X, y) #get a matrix/array of softmax functions
  141.            
  142.             results = self.predict(first, input_dim) #translate those softmax functions into results
  143.            
  144.             costArray = np.zeros(len(results))   #make an array of the cost/error for each prediction
  145.             for i in range(len(results)):
  146.                 off = results[i] - y[i]   #prediction value minus ground truth value
  147.                 costArray[i] = off
  148.            
  149.             k = np.dot(self.w2, costArray)  #dot products the weights with total cost
  150.            
  151.             deriveR = np.zeros(len(results))   #make an empty array of size number of samples
  152.             for i in range (len(results)):   #put predictions * (1-predictions) into above array
  153.                 derivesigmoid = results[i] * (1 - results[i])
  154.                 deriveR[i] = derivesigmoid
  155.            
  156.             l = np.multiply(k, deriveR) #the array of the derived sigmoids times dot product of cost & weight
  157.                    
  158.             deltaW = learning_rate * l * X  # cahnges the weights b/w input & hidden layer
  159.                        
  160.             z = np.dot(X, self.w0)
  161.             sig = self.sigmoid(z)   #regenerates the output of the hidden layer
  162.             deltaW2 = learning_rate * costArray * sig  #changes the weights b/w hidden & output layers
  163.            
  164.             #NOTE: update all weights simultaneously
  165.             #the below subtracts deltaW from every weight b/w input & hidden layer
  166.             w1dim = self.w0.shape
  167.             for i in range(w1dim[0]):
  168.                 for j in range(w1dim[1]):
  169.                     self.w0[i,j] = self.w0[i,j] - deltaW
  170.            
  171.             #modify all the weights by the delta W
  172.             w1dim = self.w1.shape
  173.             for i in range(w1dim[0]):
  174.                 for j in range(w1dim[1]):
  175.                     self.w0[i,j] = self.w0[i,j] - deltaW2
  176.            
  177.             #check the updated cost
  178.             cost = self.compute_cost(X, y)
  179.  
  180.  
  181.  
  182. # --------------------------------------------------------------------------
  183. """
  184. def plot_decision_boundary(model, X, y, output_dim):
  185.  
  186.   Function to print the decision boundary given by model.
  187.  
  188.   args:
  189.       model: model, whose parameters are used to plot the decision boundary.
  190.       X: input data
  191.       y: input labels
  192.  
  193.  
  194.    x1_array, x2_array = np.meshgrid(np.arange(-4, 4, 0.01), np.arange(-4, 4, 0.01))
  195.    grid_coordinates = np.c_[x1_array.ravel(), x2_array.ravel()]
  196.    Z = model.predict(grid_coordinates, output_dim)
  197.    Z = Z.reshape(x1_array.shape)
  198.    plt.contourf(x1_array, x2_array, Z, cmap=plt.cm.bwr)
  199.    plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.bwr)
  200.    plt.show()
  201.    """
  202.  
  203.  
  204. ################################################################################
  205.  
  206. def main():
  207.     ##1. Load data
  208.  
  209.     X = (np.genfromtxt('DATA/Linear/X.csv',
  210.                        delimiter=','))  # https://docs.scipy.org/doc/numpy/reference/generated/numpy.genfromtxt.html
  211.     y = np.genfromtxt('DATA/Linear/y.csv', delimiter=',')
  212.  
  213.     ##2. plot data
  214.     # plt.scatter(X[:,0], X[:,1], c=y, cmap=plt.cm.bwr) #http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.scatter
  215.     # plt.show()
  216.  
  217.     # 3. Initialize Logistic Regression object
  218.     # For Linear and NonLinear Data Set input and output dimensions are 2
  219.     # For the Digit Classification input_dim is a 64 and output_dim is 10
  220.     input_dim = 2
  221.     output_dim = 2
  222.  
  223.     NN = NeuralNet(input_dim, output_dim)
  224.  
  225.     #plot_decision_boundary(NN, X, y, output_dim)
  226.  
  227.     NN.fit(X, y, output_dim)
  228.  
  229.     #plot_decision_boundary(NN, X, y, output_dim)
  230.  
  231.  
  232. if __name__ == '__main__':
  233.     main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement