Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- void feedforward( NN * nn )
- {
- // i -> 0 - n
- // w -> layer [0, 1] -> neuron [0, 1, 2] -> i-th dimension [0, i]
- double a1 = nn->w[0][0][0];
- double a2 = nn->w[0][0][1];
- double a3 = nn->w[0][0][2];
- // For nad vrstvami
- for (int k = 1; k < nn->l; k++) {
- // For nad neurony ve vrstvě
- for(int i = 0; i < nn->n[k]; i++){
- double s_k_i = 0.0;
- for(int j = 0; j < nn->n[k - 1]; j++) {
- s_k_i += nn->w[k - 1][i][j] * nn->y[k - 1][j];
- }
- //double activation = neuron_activation(nn->w[k][i], inputs, nn->n[k]);
- //nn->y[k][i] = transfer(s_k_i);
- //new_inputs[i] = nn->y[k][i];
- double y_k_i = 1.0 / (1.0 + exp(-1.0 * LAMBDA * s_k_i));
- nn->y[k][i] = y_k_i;
- }
- }
- }
- double backpropagation( NN * nn, double * t ) {
- double error = 0.0;
- // Calculate ERROR
- for (int i = 0; i < 2; i++){
- error += powf(t[i] - nn->y[2][i], 2);
- }
- error *= 0.5f;
- // Calculate deltas for weight correction
- int output_k = nn->l - 1;
- for (int k = nn->l - 1; k >= 0; k--) {
- for(int i = 0; i < nn->n[k]; i++){
- double d_k_i = 0.0;
- if(output_k == k) {
- d_k_i += (t[i] - nn->y[k][i]) * LAMBDA * (1.0f - nn->y[k][i]);
- } else {
- for(int j = 0; j < nn->n[k + 1]; j++) {
- d_k_i += nn->d[k + 1][j] * nn->w[k][j][i]; // nn->w[k][i][j]
- }
- d_k_i *= LAMBDA * nn->y[k][i] * (1.0f - nn->y[k][i]);
- }
- nn->d[k][i] = d_k_i;
- }
- }
- // Calculate new weights with computed deltas
- for (int k = 0; k < nn->l - 1; k++) {
- for(int i = 0; i < nn->n[k + 1]; i++) { // Tím indexem [k + 1] si nejsem vůůůbec jistý
- for(int j = 0; j < nn->n[k]; j++) {
- nn->w[k][i][j] += ETA * nn->d[k + 1][i] * nn->y[k][j];
- }
- }
- }
- return error;
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement