Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- using System;
- using System.Collections.Generic;
- using System.ComponentModel;
- using System.Data;
- using System.Drawing;
- using System.Linq;
- using System.Text;
- using System.Windows.Forms;
- namespace MPApplication
- {
- public partial class Form1 : Form
- {
- public Form1()
- {
- InitializeComponent();
- }
- //// Data dependent settings ////
- const int numInputs = 238;
- const int numOutputs = 6;
- const int numPatterns = 1;
- const int numHidden = 20;
- const int numEpochs = 100;
- //// User defineable settings ////
- const double LR_IH = 0.7;
- const double LR_HO = 0.07;
- private void Go_Click(object sender, EventArgs e)
- {
- // initiate the weights
- initWeights();
- // load in the data
- initData();
- // train the network
- for (int j = 0; j <= numEpochs; j++)
- {
- /*
- richTextBox1.Text += "\n";
- for (int k = 0; k < 20; k++)
- {
- richTextBox1.Text += ", " + weightsIH[0, k];
- }
- richTextBox1.Text += "\n";
- */
- for (int i = 0; i < numPatterns; i++)
- {
- //select a pattern at random
- //patNum = randNum.Next();
- patNum = i;
- //calculate the current network output and error for this pattern
- calcNet();
- //change network weights
- WeightChangesHO();
- WeightChangesIH();
- }
- //display the overall network error after each epoch
- calcOverallError();
- //printf("epoch = %d RMS Error = %f\n",j,RMSerror);
- //if(j % 50 == 0)
- this.richTextBox1.Text +="\r\nepo = " + j.ToString() + " RMS error = " + RMSerror.ToString();
- /*this.richTextBox1.Text += "\r\n";
- for (int i = 0; i < numHidden; i++)
- {
- this.richTextBox1.Text += hiddenVal[i] + ", ";
- }*/
- if (RMSerror < 0.0001)
- break;
- }
- //training has finished //display the results
- displayResults();
- }
- /*
- * Q - 111110 - 62
- * R - 101110 - 46
- * S - 011010 - 26
- * T - 011110 - 30
- * U - 100011 - 35
- * V - 101011 - 43
- * W - 011101 - 29
- * X - 110011 - 51
- * 3 - 110000 - 48
- * 6 - 111000 - 56
- */
- string[] letters = { "Q", "R", "S", "T", "U", "V", "w", "X", "3", "6" };
- //double[] brailChars = { 0.10, 0.20, 0.30, 0.40, 0.50, 0.60, 0.70, 0.80, 0.90, 1.00 };
- //double[] brailChars = { 0.62, 0.46, 0.26, 0.30, 0.35, 0.43, 0.29, 0.51, 0.48, 0.56 };
- //int[] brailChars = { 62, 46, 26, 30, 35, 43, 29, 51, 48, 56 };
- double[,] brailChars = { {1.0, 1.0, 1.0, 1.0, 1.0, 0.0}, {1.0, 0.0, 1.0, 1.0, 1.0, 0.0},
- {0.0, 1.0, 1.0, 0.0, 1.0, 0.0}, {0.0, 1.0, 1.0, 1.0, 1.0, 0.0},
- {1.0,0.0, 0.0, 0.0, 1.0, 1.0,}, {1.0, 0.0, 1.0, 0.0, 1.0, 1.0},
- {0.0, 1.0, 1.0, 1.0, 0.0, 1.0}, {1.0, 1.0, 0.0, 0.0, 1.0, 1.0},
- {1.0, 1.0, 0.0, 0.0, 0.0, 0.0}, {1.0, 1.0, 1.0, 0.0, 0.0, 0.0}
- };
- //// variables ////
- int patNum = 0;
- double[] errThisPat = new double[numOutputs];
- double[] errThisPatFi = new double[numOutputs];
- double RMSerror = 0.0;
- double[] outPred = new double[numOutputs];
- // the outputs of the hidden neurons
- double[] hiddenVal = new double[numHidden];
- double[] outVal = new double[numOutputs];
- // the weights
- double[,] weightsIH = new double[numInputs, numHidden];
- double[,] weightsHO = new double[numHidden, numOutputs];
- // the data
- int[,] trainInputs = new int[numPatterns, numInputs];
- double[,] trainOutput = new double[numPatterns, numOutputs];
- Random randNum = new Random();
- //***********************************
- // calculates the network output
- private void calcNet()
- {
- //calculate the outputs of the hidden neurons
- //the hidden neurons are tanh
- int i = 0;
- for (i = 0; i < numHidden; i++)
- {
- hiddenVal[i] = 0.0;
- for (int j = 0; j < numInputs; j++)
- {
- hiddenVal[i] = hiddenVal[i] + (trainInputs[patNum, j] * weightsIH[j, i]);
- }
- hiddenVal[i] = Math.Tanh(hiddenVal[i]);
- // hiddenVal[i] = logistic(hiddenVal[i]);
- }
- //calculate the output of the network
- //the output neuron is linear
- for (int j = 0; j < numOutputs; j++)
- {
- outPred[j] = 0.0;
- for (i = 0; i < numHidden; i++)
- {
- outPred[j] = outPred[j] + hiddenVal[i] * weightsHO[i,j];
- }
- //calculate the error
- errThisPat[j] = trainOutput[patNum, j] - outPred[j];
- errThisPatFi[j] = outPred[j] * (1.0 - outPred[j]) * (trainOutput[patNum, j] - outPred[j]);
- }
- }
- //************************************
- //adjust the weights hidden-output
- private void WeightChangesHO()
- {
- for (int j = 0; j < numOutputs; j++)
- {
- for (int k = 0; k < numHidden; k++)
- {
- //double weightChange = LR_HO * errThisPat[j] * hiddenVal[k];
- double weightChange = LR_HO * errThisPatFi[j];
- weightsHO[k,j] = weightsHO[k,j] + weightChange;
- //regularisation on the output weights
- if (weightsHO[k,j] < -5)
- {
- weightsHO[k,j] = -5;
- }
- else if (weightsHO[k,j] > 5)
- {
- weightsHO[k,j] = 5;
- }
- }
- }
- }
- //************************************
- // adjust the weights input-hidden
- void WeightChangesIH()
- {
- for (int j = 0; j < numOutputs; j++)
- {
- for (int i = 0; i < numHidden; i++)
- {
- for (int k = 0; k < numInputs; k++)
- {
- //double x = 1 - (hiddenVal[i] * hiddenVal[i]);
- double weightChange = weightsHO[i, j] * errThisPatFi[j] * hiddenVal[i] * (1.0 - hiddenVal[i]);
- weightChange *= trainInputs[patNum, k] * LR_IH;
- weightsIH[k, i] += weightChange;
- }
- }
- }
- }
- //************************************
- // set weights to random numbers
- void initWeights()
- {
- for (int j = 0; j < numHidden; j++)
- {
- for (int k = 0; k < numOutputs; k++)
- {
- weightsHO[j, k] = (randNum.NextDouble() - 0.5) / 2;
- //weightsHO[j, k] = randNum.NextDouble();
- //if (randNum.NextDouble() > 0.5)
- /// weightsHO[j, k] = -weightsHO[j, k];
- }
- for (int i = 0; i < numInputs; i++)
- {
- weightsIH[i, j] = (randNum.NextDouble() - 0.5) / 5;
- //weightsIH[i, j] = randNum.NextDouble();
- //if (randNum.NextDouble() > 0.5)
- // weightsIH[i, j] = -weightsIH[i, j];
- //printf("Weight = %f\n", weightsIH[i, j]);
- //this.richTextBox1.Text +="\r\nWeight = " + weightsIH[i, j].ToString();
- }
- }
- }
- //************************************
- // read in the data
- void initData()
- {
- //printf("initialising data\n");
- //this.richTextBox1.Text +="\r\ninitialising data";
- // the data here is the XOR data
- // it has been rescaled to the range [-1, 1]
- // an extra input valued 1 is also added to act as the bias
- // the output must lie in the range -1 to 1
- for (int i = 0; i < numPatterns; i++)
- {
- string extension = ".bmp";
- string path = "res/";
- LetterImage l = new LetterImage(path + letters[i] + extension);
- int[] vec = l.getPixelVector();
- for (int j = 0; j < numInputs; j++ )
- trainInputs[i, j] = vec[j];
- for (int j = 0; j < numOutputs; j++)
- {
- trainOutput[i,j] = brailChars[i,j];
- }
- }
- }
- //************************************
- // display results
- void displayResults()
- {
- for (int i = 0; i < numPatterns; i++)
- {
- patNum = i;
- calcNet();
- //printf("pat = %d actual = %d neural model = %f\n",patNum+1,trainOutput[patNum],outPred);
- this.richTextBox1.Text += "\r\npat = " + (patNum + 1) + " actual = ";
- String finalText = "";
- for (int j = 0; j < numOutputs; j++)
- {
- this.richTextBox1.Text += trainOutput[patNum, j].ToString() + ", ";
- finalText += outPred[j].ToString() + ", ";
- }
- this.richTextBox1.Text += " neural model = " + finalText;
- }
- }
- //************************************
- // calculate the overall error
- void calcOverallError()
- {
- RMSerror = 0.0;
- for (int j = 0; j < numOutputs; j++)
- {
- for (int i = 0; i < numPatterns; i++)
- {
- patNum = i;
- calcNet();
- RMSerror = RMSerror + (errThisPat[j] * errThisPat[j]);
- }
- }
- RMSerror = RMSerror / numPatterns;
- RMSerror = Math.Sqrt(RMSerror);
- }
- }
- }
Add Comment
Please, Sign In to add comment