Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- """
- NeuralNet.py
- CS460/660 Spring 2017
- Programming Assignment 1
- Team Members: Adonica Camp, Anthony Inzero, Haley Pereira
- """
- import numpy as np
- import matplotlib.pyplot as plt
- import math
- # 8 by 8 number images is 64 inputes
- # linear & nonlinear have
- class NeuralNet:
- # This class implements a Neural Net with one hidden layer.
- def __init__(self, input_dim, output_dim):
- """
- Initializes the parameters of the logistic regression classifier to
- random values.
- args:
- input_dim: Number of dimensions of the input data
- output_dim: Number of classes
- """
- self.hiddenlayer = 2 # The number of nodes in the hidden layer
- # create a matrix of random weights for the input layer
- inArray = np.random.randn(1, (self.hiddenlayer * input_dim)).reshape(input_dim, self.hiddenlayer)
- self.w0 = np.asmatrix(inArray)
- # create a matrix of random weights for the hidden layer
- hLarray = np.random.randn(1, (output_dim * input_dim)).reshape(input_dim, output_dim)
- self.w1 = np.asmatrix(hLarray)
- # dimensions of bias means it needs to be addable to other matrixes
- #self.bias = np.zeros((1, output_dim)) # random bias
- # --------------------------------------------------------------------------
- def forward_prop(self, X, y):
- """
- Implements forward propagation
- """
- z = np.dot(X, self.w0)
- sig = self.sigmoid(z)
- k = np.dot(sig, self.w1)
- foroutput = self.softmax(k) #or use sigmoid????
- return foroutput
- # -------------------------------------------------------------------------
- def sigmoid(self, X): # rewrite to be a sigmoid
- """
- Returns the sigmoid of a single given X value
- """
- s = 1 / (1 + np.exp(0 - X))
- return s
- # --------------------------------------------------------------------------
- def softmax(self, X):
- """
- Returns the softmax probability array
- """
- # print(self.theta)
- # print(X)
- exp_z = np.exp(X)
- # print(exp_z)
- softmax_scores = exp_z / np.sum(exp_z, axis=1, keepdims=True)
- return softmax_scores
- # -------------------------------------------------------------------------
- def predict(self,X, input_dim):
- #input arg X should be softmax matrix from forward propogation
- predictions = np.zeros(input_dim)
- for i in range(input_dim):
- predictions[i] = np.argmax(i)
- return predictions
- # -------------------------------------------------------------------------
- def compute_cost(self, X, y, input_dim):
- """
- Computes the total cost on the dataset.
- args:
- X: Data array
- y: Labels corresponding to input data
- returns:
- cost: average cost per data sample
- """
- softmax_scores = self.h(X)
- cost_mean = 0
- for i in range(len(X)):
- hot_y = np.array([0]*len(self.theta))
- hot_y[int(y[i])] = 1
- '''
- if y[i] == 1:
- hot_y = np.array([0,1])
- else:
- hot_y = np.array([1,0])
- '''
- cost_for_sample = -np.sum(hot_y * np.log(softmax_scores[i]))
- #print(cost_for_sample)
- cost_mean += cost_for_sample
- return cost_mean / len(X)
- # --------------------------------------------------------------------------
- def fit(self, X, y, input_dim):
- # Backwards Propogation
- #Set learning rate
- learning_rate = 0.05
- cost = self.compute_cost(X, y)
- while cost < 0.10:
- first = self.forward_prop(X, y) #get a matrix/array of softmax functions
- results = self.predict(first, input_dim) #translate those softmax functions into results
- costArray = np.zeros(len(results)) #make an array of the cost/error for each prediction
- for i in range(len(results)):
- off = results[i] - y[i] #prediction value minus ground truth value
- costArray[i] = off
- k = np.dot(self.w2, costArray) #dot products the weights with total cost
- deriveR = np.zeros(len(results)) #make an empty array of size number of samples
- for i in range (len(results)): #put predictions * (1-predictions) into above array
- derivesigmoid = results[i] * (1 - results[i])
- deriveR[i] = derivesigmoid
- l = np.multiply(k, deriveR) #the array of the derived sigmoids times dot product of cost & weight
- deltaW = learning_rate * l * X # cahnges the weights b/w input & hidden layer
- z = np.dot(X, self.w0)
- sig = self.sigmoid(z) #regenerates the output of the hidden layer
- deltaW2 = learning_rate * costArray * sig #changes the weights b/w hidden & output layers
- #NOTE: update all weights simultaneously
- #the below subtracts deltaW from every weight b/w input & hidden layer
- w1dim = self.w0.shape
- for i in range(w1dim[0]):
- for j in range(w1dim[1]):
- self.w0[i,j] = self.w0[i,j] - deltaW
- #modify all the weights by the delta W
- w1dim = self.w1.shape
- for i in range(w1dim[0]):
- for j in range(w1dim[1]):
- self.w0[i,j] = self.w0[i,j] - deltaW2
- #check the updated cost
- cost = self.compute_cost(X, y)
- # --------------------------------------------------------------------------
- """
- def plot_decision_boundary(model, X, y, output_dim):
- Function to print the decision boundary given by model.
- args:
- model: model, whose parameters are used to plot the decision boundary.
- X: input data
- y: input labels
- x1_array, x2_array = np.meshgrid(np.arange(-4, 4, 0.01), np.arange(-4, 4, 0.01))
- grid_coordinates = np.c_[x1_array.ravel(), x2_array.ravel()]
- Z = model.predict(grid_coordinates, output_dim)
- Z = Z.reshape(x1_array.shape)
- plt.contourf(x1_array, x2_array, Z, cmap=plt.cm.bwr)
- plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.bwr)
- plt.show()
- """
- ################################################################################
- def main():
- ##1. Load data
- X = (np.genfromtxt('DATA/Linear/X.csv',
- delimiter=',')) # https://docs.scipy.org/doc/numpy/reference/generated/numpy.genfromtxt.html
- y = np.genfromtxt('DATA/Linear/y.csv', delimiter=',')
- ##2. plot data
- # plt.scatter(X[:,0], X[:,1], c=y, cmap=plt.cm.bwr) #http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.scatter
- # plt.show()
- # 3. Initialize Logistic Regression object
- # For Linear and NonLinear Data Set input and output dimensions are 2
- # For the Digit Classification input_dim is a 64 and output_dim is 10
- input_dim = 2
- output_dim = 2
- NN = NeuralNet(input_dim, output_dim)
- #plot_decision_boundary(NN, X, y, output_dim)
- NN.fit(X, y, output_dim)
- #plot_decision_boundary(NN, X, y, output_dim)
- if __name__ == '__main__':
- main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement