Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- unit Unit1;
- interface
- uses
- System.Math;
- type
- TNN1DArray = Array [0 .. 1] of Double;
- T2DTNN1DArray = Array [0 .. 1] of TNN1DArray;
- TNN2DTNN1DArray = Array [0 .. 1] of Array [0 .. 1] of TNN1DArray;
- TNN2DArray = Array [0 .. 2] of Double;
- TNN2DMemory = Array [0 .. 1] of TNN2DArray;
- TNNInputs = packed record
- NNInputs: TNN2DArray;
- State : Double; // Excepted
- end;
- const
- TrainingData: packed array [0 .. 7] of TNNInputs = (
- (NNInputs : (0, 0, 0); State : 0),
- (NNInputs : (0, 0, 1); State : 1),
- (NNInputs : (0, 1, 0); State : 0),
- (NNInputs : (0, 1, 1); State : 0),
- (NNInputs : (1, 0, 0); State : 1),
- (NNInputs : (1, 0, 1); State : 1),
- (NNInputs : (1, 1, 0); State : 0),
- (NNInputs : (1, 1, 1); State : 1)
- );
- type
- NNTestClass = class
- private
- weights_0_1: TNN2DMemory;
- weights_1_2: TNN1DArray;
- learning_rate: Double;
- function RandomRangeF(min, max: Double): Double;
- function Sigmoid(X: Double): Double;
- function Dot(InArray: TNN2DMemory; InputArr: TNN2DArray): TNN1DArray; overload;
- function Dot(A, B: TNN1DArray): Double; overload;
- public
- procedure init;
- function Predict (InputData: TNNInputs): Double;
- function Train(Inputs: TNN2DArray; Expected_predict: Double): TNN2DMemory;
- end;
- var
- NN: NNTestClass;
- implementation
- function NNTestClass.Dot(InArray: TNN2DMemory; InputArr: TNN2DArray): TNN1DArray;
- begin
- Result[0] :=
- (InArray[0, 0] * InputArr[0]) +
- (InArray[0, 1] * InputArr[1]) +
- (InArray[0, 2] * InputArr[2]);
- Result[1] :=
- (InArray[1, 0] * InputArr[0]) +
- (InArray[1, 1] * InputArr[1]) +
- (InArray[1, 2] * InputArr[2]);
- end;
- function NNTestClass.Dot(A, B: TNN1DArray): Double;
- begin
- Result := (A[0] * B[0]) + (A[1] * B[1]);
- end;
- function NNTestClass.Train(Inputs: TNN2DArray; Expected_predict: Double): TNN2DMemory;
- var
- inputs_1, outputs_1: TNN1DArray;
- inputs_2, outputs_2: Double;
- Actual_predict: Double;
- K, E: Integer;
- error_layer_1: TNN1DArray;
- weights_delta_layer_1: Double;
- error_layer_2, gradient_layer_2, weights_delta_layer_2, gradient_layer_1 : Double;
- begin
- inputs_1 := Dot(weights_0_1, Inputs);
- for K := 0 to 1 do
- outputs_1[K] := Sigmoid(inputs_1[K]);
- inputs_2 := Dot(weights_1_2, outputs_1); //To Output
- outputs_2 := Sigmoid(inputs_2);
- Actual_predict := outputs_2;
- error_layer_2 := actual_predict - expected_predict;
- gradient_layer_2 := actual_predict * (1 - actual_predict);
- weights_delta_layer_2 := error_layer_2 * gradient_layer_2;
- for K := 0 to 1 do
- weights_1_2[K] := weights_1_2[K] - (outputs_1[K]* weights_delta_layer_2 * learning_rate);
- for K := 0 to 1 do
- begin
- error_layer_1[K] := weights_1_2[K] * weights_delta_layer_2;
- gradient_layer_1 := (outputs_1[K] * (1 - outputs_1[K])); //5 * 4 un 4 * 3
- weights_delta_layer_1 := error_layer_1[K] * gradient_layer_1;
- for E := 0 to 2 do
- Weights_0_1[K][E] := weights_0_1[K][E] - (Inputs[E] * weights_delta_layer_1 * learning_rate);
- end;
- Result := weights_0_1;
- end;
- function NNTestClass.Predict(InputData: TNNInputs): Double;
- var
- inputs_1, outputs_1 : TNN1DArray;
- inputs_2, outputs_2: Double;
- begin
- inputs_1 := Dot(weights_0_1, InputData.NNInputs);
- outputs_1[0] := Sigmoid(inputs_1[0]);
- outputs_1[1] := Sigmoid(inputs_1[1]);
- inputs_2 := Dot(weights_1_2, outputs_1);
- outputs_2 := Sigmoid(inputs_2);
- Result := outputs_2;
- end;
- procedure NNTestClass.init;
- var
- K, E: Integer;
- begin
- for E := 0 to 1 do
- for K := 0 to 2 do
- weights_0_1[E][K] := RandomRangeF(-0.1, 1.0);
- for K := 0 to 1 do
- weights_1_2[K] := Random - Random;
- learning_rate := 0.1;
- end;
- function NNTestClass.RandomRangeF(min, max: Double): Double;
- var
- Float : Double;
- begin
- Randomize;
- Float := Random;
- Result := (min + Float) * (max - min);
- end;
- function NNTestClass.Sigmoid(X: Double): Double;
- begin
- Result := 1 / (1 + Exp(- X))
- end;
- end.
- #-------
- unit Unit5;
- interface
- uses
- Winapi.Windows, Winapi.Messages, System.SysUtils, System.Variants, System.Classes, Vcl.Graphics,
- Vcl.Controls, Vcl.Forms, Vcl.Dialogs, Vcl.StdCtrls, Unit1;
- type
- TForm5 = class(TForm)
- Memo1: TMemo;
- Button1: TButton;
- Button2: TButton;
- Button3: TButton;
- procedure Button1Click(Sender: TObject);
- procedure Button2Click(Sender: TObject);
- procedure FormCreate(Sender: TObject);
- private
- public
- end;
- var
- Form5: TForm5;
- implementation
- {$R *.dfm}
- function Activation (X: Double): Double;
- begin
- Result := 0;
- if X >= 0.50 then
- Result := 1.0;
- end;
- procedure TForm5.Button1Click(Sender: TObject);
- var
- MMry: TNN2DMemory;
- K, E: Integer;
- begin
- for E := 0 to 10000 do
- begin
- for K := Low(TrainingData) to High(TrainingData) do
- MMry := NN.Train(TrainingData[K].NNInputs, TrainingData[K].State);
- end;
- Form5.Memo1.Lines.Add(Format('Brain: %f, %f, %f, - %f, %f, %f,',[MMry[0][0], MMry[0][1], MMry[0][2] ,MMry[1][0], MMry[1][1], MMry[1][2]]));
- end;
- procedure TForm5.Button2Click(Sender: TObject);
- var
- K: Integer;
- R: Double;
- begin
- for K := Low(TrainingData) to High(TrainingData) do
- begin
- R := NN.Predict(TrainingData[K]);
- Form5.Memo1.Lines.Add(Format('Excepted: %f Predicted : %f (%f)',[TrainingData[K].State, Activation(R), R]));
- end;
- end;
- procedure TForm5.FormCreate(Sender: TObject);
- begin
- NN := NNTestClass.create;
- NN.init;
- end;
- end.
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement