Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #
- ## -*- coding: utf-8 -*-
- #"""
- #Created on Sun Mar 11 15:41:00 2018
- #
- #@author: UIMCC-Staff
- #"""
- #
- ##Libraries needed to run the tool
- #import numpy as np
- #import pandas as pd
- #from sklearn.neural_network import MLPRegressor
- #from sklearn.neural_network import MLPClassifier
- #from sklearn import preprocessing #to normalize the values
- #from sklearn.model_selection import train_test_split
- #from sklearn.model_selection import cross_val_score
- #import matplotlib.pyplot as plt
- #
- ##Ask for file name and read the file
- ##file_name = raw_input("Name of file:")
- #file_name = 'forestfiresw8'
- #data = pd.read_csv(file_name + '.csv', header=0)
- #
- #analysis_type = input("Analysis Type 'R' or 'C': ")
- #
- ##Print number of rows and colums read
- #print("{0} rows and {1} columns".format(len(data.index), len(data.columns)))
- #print("")
- #
- #data["month"] = data["month"].replace("jan",1)
- #data["month"] = data["month"].replace("feb",2)
- #data["month"] = data["month"].replace("mar",3)
- #data["month"] = data["month"].replace("apr",4)
- #data["month"] = data["month"].replace("may",5)
- #data["month"] = data["month"].replace("jun",6)
- #data["month"] = data["month"].replace("jul",7)
- #data["month"] = data["month"].replace("aug",8)
- #data["month"] = data["month"].replace("sep",9)
- #data["month"] = data["month"].replace("oct",10)
- #data["month"] = data["month"].replace("nov",11)
- #data["month"] = data["month"].replace("dec",12)
- #
- #
- #
- ##Defining X1, X2, and all the data X
- #X1 = data.FFMC
- #X2 = data.DMC
- #X3 = data.DC
- #X4 = data.ISI
- #X5 = data.temp
- #X6 = data.RH
- #X7 = data.wind
- #X8 = data.rain
- #X_raw = np.column_stack((X1, X2, X3, X4, X5, X6, X7, X8))
- #
- ##Normalizing or not the data
- #min_max_scaler = preprocessing.MinMaxScaler()
- #X = min_max_scaler.fit_transform(X_raw)
- ##X = X_raw
- ##print(X)
- #
- ##Defining Y variables depending on whether we have a regression or classification problem
- #if analysis_type == 'R':
- # Y = data.month
- #
- #
- ##Using Built in train test split function in sklearn
- #X_train, X_test, Y_train, Y_test = train_test_split(X, Y, train_size = 0.8)
- #
- #if analysis_type == 'R':
- # neuron_count = []
- # for i in range (1,201):
- #
- # #Fit the neural network for Regression purposes (i.e., you expect a continuous variable out)
- # #Note that 'sgd' and 'adam' require a batch_size and the function is not as clear
- # acti = ['logistic', 'tanh', 'relu', 'identity']
- # algo = ['lbfgs', 'sgd', 'adam']
- # learn = ['constant', 'invscaling', 'adaptive']
- # neural = MLPRegressor(activation=acti[0], solver=algo[0], batch_size = 1, learning_rate = learn[0], hidden_layer_sizes=(i,))
- #
- # #Cross validation
- # neural_scores = cross_val_score(neural, X_train, Y_train, cv=5)
- ## print("Cross Validation Accuracy: {0} (+/- {1})".format(neural_scores.mean().round(2), (neural_scores.std() * 2).round(2)))
- ## print("")
- # rounded = neural_scores.mean().round(2)
- # neuron_count.append(rounded)
- #
- # #Cross validation
- # neural_scores = cross_val_score(neural, X_train, Y_train, cv=15)
- # print("Cross Validation Accuracy: {0} (+/- {1})".format(neural_scores.mean().round(2), (neural_scores.std() * 2).round(2)))
- ## print("")
- #
- # #Fitting final neural network
- # neural.fit(X_train, Y_train)
- # neural_score = neural.score(X_test, Y_test)
- ## print("Shape of neural network: {0}".format([coef.shape for coef in neural.coefs_]))
- ## print("Coefs: ")
- ## print("")
- ## print(neural.coefs_[0].round(2))
- ## print("")
- ## print(neural.coefs_[1].round(2))
- ## print("")
- ## print("Intercepts: {0}".format(neural.intercepts_))
- ## print("")
- ## print("Loss: {0}".format(neural.loss_))
- ## print("")
- ## print("Iteration: {0}".format(neural.n_iter_))
- ## print("")
- ## print("Layers: {0}".format(neural.n_layers_))
- ## print("")
- ## print("Outputs: {0}".format(neural.n_outputs_))
- ## print("")
- ## print("Output Activation: {0}".format(neural.out_activation_)) #identity because we are looking for a value
- ## print("")
- #
- # #Assess the fitted Neural Network
- # print("Y test and predicted")
- # print(Y_test.values)
- # print(neural.predict(X_test).round(1))
- # print("")
- # print("Accuracy as Pearson's R2: {0}".format(neural_score.round(4)))
- # print("")
- #
- ##fix, ax = plt.subplots()
- ##ax.scatter(Y_test.values, neural.predict(X_test), edgecolors=(0, 0, 0))
- ##ax.plot([Y.min(0), Y.max(100)], [X.min(0), X.max(100)], 'k--', lw=3)
- ##ax.set_xlabel('Measured Grade')
- ##ax.set_xlabel('Predicted Grade')
- #
- #
- #fix, ax = plt.subplots()
- #ax.scatter(neural.predict(X_test), Y_test.values)
- #plt.axis([0, 100, 0, 100])
- #plt.grid(True)
- #ax.set_xlabel('Number of Neurons')
- #ax.set_ylabel('Accuracy')
- ##else:
- ## #Fit the neural network for Classification purposes (i.e., you don't expect a continuous variable out).
- ## #Note that 'sgd' and 'adam' require a batch_size and the function is not as clear
- ## acti = ['logistic', 'tanh', 'relu', 'identity']
- ## algo = ['lbfgs', 'sgd', 'adam']
- ## learn = ['constant', 'invscaling', 'adaptive']
- ## neural = MLPClassifier(activation=acti[2], solver=algo[2], batch_size = 1, learning_rate = learn[2], hidden_layer_sizes=(7,500))
- ##
- ## #Cross validation
- ## neural_scores = cross_val_score(neural, X_train, Y_train, cv=5)
- ## print("Cross Validation Accuracy: {0} (+/- {1})".format(neural_scores.mean().round(2), (neural_scores.std() * 2).round(2)))
- ## print("")
- ##
- ## #Fitting final neural network
- ## neural.fit(X_train, Y_train)
- ## neural_score = neural.score(X_test, Y_test)
- ## print("Classes: {0}".format(neural.classes_))
- ## print("")
- ## print("Shape of neural network: {0}".format([coef.shape for coef in neural.coefs_]))
- ## print("")
- ## print("Coefs: ")
- ## print(neural.coefs_[0].round(2))
- ## print("")
- ## print(neural.coefs_[1].round(2))
- ## print("")
- ## print("Intercepts: {0}".format(neural.intercepts_))
- ## print("")
- ## print("Loss: {0}".format(neural.loss_))
- ## print("")
- ## print("Iteration: {0}".format(neural.n_iter_))
- ## print("")
- ## print("Layers: {0}".format(neural.n_layers_))
- ## print("")
- ## print("Outputs: {0}".format(neural.n_outputs_))
- ## print("")
- ## print("Output Activation: {0}".format(neural.out_activation_)) #softmax to get a probability between 0 and 1
- ## print("")
- ##
- ## #Assess the fitted Neural Network
- ## print("Y test and predicted")
- ## print(Y_test.values)
- ## print(neural.predict(X_test))
- ## print("")
- ## print("Mean Accuracy: {0}".format(neural_score.round(4)))
- ## print("")
- ##
- ###fix, ax = plt.subplots()
- ###ax.scatter(Y_test, X_test, edgecolors=(0, 0, 0))
- ###ax.plot([y.min(0), y.max(100)], [x.min(0), x.max(100)], 'k--', lw=4)
- ###ax.set_xlabel('Measured')
- ###ax.set_xlabel('Predicted')
- ###
- ####fig, ax =plt.subplots
- ####ax.scatter(y_predicted, edgecolors=(0, 0, 0))
- ###ax.plot
- ###plt.xlabel('X values')
- ###plt.ylabel('Area')
- ###plt.savefig(file_name, dpi=300)
- ###plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement