Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- class NeuralNetwork
- {
- List<Matrix<float>> w;
- List<Matrix<float>> b;
- public NeuralNetwork(params int[] layers)
- {
- w = new List<Matrix<float>>();
- b = new List<Matrix<float>>();
- for (int i = 0; i < layers.Count() - 1; i++) {
- w.Add(CreateMatrix.Random<float>(layers[i + 1], layers[i]));
- b.Add(CreateMatrix.Random<float>(layers[i + 1], 1));
- }
- }
- public Matrix<float> Forward(Matrix<float> input)
- {
- Matrix<float> r = input;
- for(int i = 0; i < w.Count(); i++) {
- r = Sigmoid(w[i] * r + b[i]);
- }
- return r;
- }
- public (Matrix<float> Output, List<Matrix<float>> Layers) ForwardWithLayers(Matrix<float> input)
- {
- Matrix<float> r = input;
- List<Matrix<float>> l = new List<Matrix<float>>();
- for (int i = 0; i < w.Count() - 1; i++) {
- r = Sigmoid(w[i] * r + b[i]);
- l.Add(r);
- }
- r = Sigmoid(w.Last() * r + b.Last());
- return (r, l);
- }
- public double Backprop(Matrix<float> target, Matrix<float> input, float learning_rate = .01f)
- {
- var (xL, x) = ForwardWithLayers(input);
- var error = (xL - target).FrobeniusNorm();
- Dictionary<int, Matrix<float>> delta = new Dictionary<int, Matrix<float>>();
- delta[w.Count() - 1] = (xL - target).PointwiseMultiply(Sigmoid(w.Last() * x.Last() + b.Last(), true));
- for(int i = w.Count() - 2; i > 0; i--) {
- delta[i] = w[i + 1].TransposeThisAndMultiply(delta[i + 1]).PointwiseMultiply(Sigmoid(w[i] * x[i - 1] + b[i], true));
- }
- delta[0] = w[1].TransposeThisAndMultiply(delta[1]).PointwiseMultiply(Sigmoid(w[0] * input + b[0], true));
- for(int i = 0; i < w.Count(); i++) {
- w[i] -= learning_rate * delta[i] * (i == 0 ? input.Transpose() : x[i - 1].Transpose());
- }
- return error;
- }
- private Matrix<float> Sigmoid(Matrix<float> x, bool derivative=false)
- {
- if (derivative)
- return x.PointwiseMultiply(1 - x);
- else
- return x.Map<float>(f => 1 / (1 + (float)Math.Exp(-f)));
- }
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement