Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- using System;
- using System.Collections.Generic;
- using System.Collections;
- using System.Linq;
- using System.Text;
- using System.Threading.Tasks;
- namespace nNetwork
- {
- public class NeuralNetwork
- {
- private int inpCount { get; set; }
- private int hiddenCount { get; set; }
- private int outCount { get; set; }
- private int layersCount { get; set; }
- public double factor { get; set; }
- private NeuralLayer[] layers;
- public NeuralNetwork(int layers_count, int input_count, int output_count, int hidden_count)
- {
- layers = new NeuralLayer[layers_count];
- layersCount = layers_count;
- inpCount = input_count;
- outCount = output_count;
- hiddenCount = (layersCount - 2) * hidden_count;
- factor = 0.7 * Math.Pow((double)hiddenCount, (1 / (double)inpCount));
- layers[0] = new NeuralLayer(input_count, input_count, hidden_count, factor);
- layers[layers.Length - 1] = new NeuralLayer(output_count, hidden_count, output_count, factor);
- layers[layers.Length - 2] = new NeuralLayer(hidden_count, hidden_count, output_count, factor);
- if (layers_count > 3)
- {
- for (int i = 1; i < layers.Length - 2; i++)
- {
- layers[i] = new NeuralLayer(hidden_count, layers[i - 1].outCount, hidden_count, factor);
- }
- }
- layers[0].isInput = true;
- layers[layers.Length - 1].isOutput = true;
- }
- public double Run(double[] data)
- {
- this.Clean();
- for (int i = 0; i < layers[0].neurons.Length; i++)
- {
- layers[0].neurons[i].data = data[i];
- }
- for (int i = 1; i < layers.Length; i++)
- {
- NeuralLayer temp = layers[i];
- NeuralLayer prev = layers[i - 1];
- for (int j = 0; j < temp.nCount; j++)
- for (int k = 0; k < prev.nCount; k++)
- {
- if (prev.isInput)
- {
- temp.neurons[j].input(prev.neurons[k].output(false) * prev.weight[k, j] + prev.bias[k, j]);
- }
- else
- {
- temp.neurons[j].input(prev.neurons[k].output() * prev.weight[k, j] + prev.bias[k, j]);
- }
- }
- }
- return layers[layers.Length - 1].neurons[0].output();
- }
- public double Err(double[] data, double[] res)
- {
- double summ = 0;
- for (int i = 0; i < data.Length; i++)
- {
- summ += Math.Pow(res[i] - data[i], 2);
- }
- return summ / (double)res.Length;
- }
- /*public double train(double[] data, double []res)
- {
- double result = Run(data);
- double[] delta = new double[res.Length];
- for(int i = 0; i < outCount; i++)
- {
- delta[i] = res[i] - layers[layersCount - 1].neurons[i].activation_function(layers[layersCount - 1].neurons[i].data);
- }
- for (int i = layers.Length - 1; i > 1; i--)
- {
- NeuralLayer temp = layers[i];
- NeuralLayer prev = layers[i - 1];
- for (int j = 0; j < temp.nCount; j++)
- for (int k = 0; k < prev.nCount; k++)
- {
- double ErrSumm = 0;
- for (int t = 0; t < temp.neurons.Length; t++)
- {
- ErrSumm +=
- }
- for (int n = 0; n < prev.neurons.Length; n++)
- for (int m = 0; m < temp.neurons.Length; m++)
- {
- double learning_speed = 1;
- double data_delta = delta * temp.neurons[m].derivative_af();
- double weight_delta = learning_speed * prev.neurons[n].output() * data_delta;
- prev.weight[n, m] += weight_delta;
- prev.bias[n, m] += weight_delta;
- }
- }
- }
- return 0;
- }*/
- public void train(double[] data, double[] res)
- {
- double result = Run(data);
- double outDelta = res[0] - result;
- layers[layersCount - 1].neurons[0].delta = outDelta * layers[layersCount - 1].neurons[0].derivative_af();
- for (int i = layers.Length - 1; i > 0; i--)
- {
- NeuralLayer temp = layers[i];
- NeuralLayer prev = layers[i - 1];
- for (int n = 0; n < prev.nCount; n++)
- for (int m = 0; m < temp.nCount; m++)
- {
- double weight_delta = 0;
- double learning_speed = 4;
- if (temp.isOutput)
- {
- double derrivated_delta = temp.neurons[m].delta;
- weight_delta = learning_speed * prev.neurons[n].output() * derrivated_delta;
- prev.weight[n, m] += weight_delta;
- prev.bias[n, m] += learning_speed * derrivated_delta;
- prev.neurons[n].delta += prev.neurons[n].output() * derrivated_delta;
- }
- else
- {
- double summ_derrivated_delta = getNextLayoutErrorSumm(n, prev) * prev.neurons[n].derivative_af();
- if (prev.isInput)
- weight_delta = learning_speed * prev.neurons[n].output(false) * summ_derrivated_delta;
- else
- weight_delta = learning_speed * prev.neurons[n].output() * summ_derrivated_delta;
- prev.weight[n, m] += weight_delta;
- prev.bias[n, m] += learning_speed * summ_derrivated_delta;
- prev.neurons[n].delta += prev.neurons[n].output() * getNextLayoutErrorSumm(n, prev);
- }
- }
- }
- }
- private double getNextLayoutErrorSumm(int neuronIndex, NeuralLayer layer)
- {
- double result = 0;
- NeuralLayer next = null;
- for (int i = 0; i < layers.Length; i++)
- {
- if (layers[i] == layer)
- {
- next = layers[i + 1];
- break;
- }
- }
- for (int m = 0; m < next.nCount; m++)
- {
- result += next.neurons[m].delta * layer.weight[neuronIndex, m];
- }
- return result;
- }
- public void SafeToFile()
- {
- }
- public void LoadFromFile()
- {
- }
- private void Clean()
- {
- foreach (NeuralLayer layer in layers)
- foreach (Neuron neuron in layer)
- {
- neuron.Clean();
- }
- }
- private int getCountOfNeurons()
- {
- return inpCount + hiddenCount + outCount;
- }
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment