Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #include <stdlib.h>
- #include <math.h>
- #include <stdint.h>
- #include <vector>
- struct Network {
- #ifdef TINYNN_USE_DOUBLE
- typedef double tinynnf;
- #else
- typedef float tinynnf;
- #endif
- const tinynnf limit = 3;
- struct Layer {
- std::vector<tinynnf> biases;
- std::vector<tinynnf> outputs;
- std::vector<tinynnf> errors;
- Layer(const uint64_t & n) {
- biases.resize(n);
- outputs.resize(n);
- errors.resize(n);
- }
- };
- std::vector<Layer> layers;
- std::vector<std::vector<tinynnf>> connections;
- Network(const std::vector<uint64_t> & spec) {
- for(const uint64_t & size : spec)
- layers.push_back(Layer(size));
- for(size_t i = 0; i+1 < layers.size(); i++)
- connections.push_back(std::vector<tinynnf>(layers[i].outputs.size() * layers[i+1].outputs.size()));
- for(auto & c : connections)
- for(auto & w : c)
- w = ((tinynnf)rand())/RAND_MAX-0.5; // -0.5 ~ +0.5
- for(auto & l : layers)
- for(auto & b : l.biases)
- b = ((tinynnf)rand())/RAND_MAX-0.5;
- }
- void pair_forwards(const std::vector<tinynnf> & connectors, const Layer & left, Layer & right, bool is_last) {
- for(size_t r = 0; r < right.outputs.size(); r++) {
- right.outputs[r] = right.biases[r];
- for(size_t l = 0; l < left.outputs.size(); l++)
- right.outputs[r] += left.outputs[l] * connectors[r*left.outputs.size() + l];
- if(!is_last)
- right.outputs[r] = tanh(right.outputs[r]);
- }
- }
- std::vector<tinynnf> forwards(std::vector<tinynnf> inputs) {
- layers[0].outputs = inputs;
- for(size_t i = 0; i+1 < layers.size(); i++)
- pair_forwards(connections[i], layers[i], layers[i+1], i+2 == layers.size());
- return layers[layers.size()-1].outputs;
- }
- void train(std::vector<tinynnf> output_errors, tinynnf rate) {
- if(layers.size() == 0 or output_errors.size() != layers[layers.size()-1].errors.size()) return; // invalid network or output_errors
- layers[layers.size()-1].errors = output_errors;
- for(size_t i = layers.size()-1; i > 0; i--) {
- auto & right = layers[i];
- auto & left = layers[i-1];
- auto & connectors = connections[i-1];
- for(size_t l = 0; l < left.outputs.size(); l++) { // loop over left nodes to modify each one's error
- left.errors[l] = 0;
- for(size_t r = 0; r < right.outputs.size(); r++)
- left.errors[l] += right.errors[r] * connectors[r*left.outputs.size() + l];
- left.errors[l] *= 1.0f - left.outputs[l]*left.outputs[l];
- for(size_t r = 0; r < right.outputs.size(); r++) { // modify the connections to the left-side node based on the errors of the right-side nodes
- connectors[r*left.outputs.size() + l] -= left.outputs[l] * right.errors[r] * rate;
- if(limit != 0 && i+1 < layers.size())
- {
- if(connectors[r*left.outputs.size() + l] > limit) // in a saner language: replace with max(min(...)...) or clamp(...). count all four lines as one line.
- connectors[r*left.outputs.size() + l] = limit;
- if(connectors[r*left.outputs.size() + l] < -limit)
- connectors[r*left.outputs.size() + l] = -limit;
- }
- }
- }
- for(size_t r = 0; r < right.outputs.size(); r++) { // adjust each right node's bias input -- note i have no idea if this section is correct lmao
- tinynnf error = right.errors[r];
- error *= 1.0f - right.biases[r];
- right.biases[r] -= error * rate;
- if(limit != 0 && i+1 < layers.size())
- {
- if(right.biases[r] > limit) // in a saner language: replace with max(min(...)...) or clamp(...). count all four lines as one line.
- right.biases[r] = limit;
- if(right.biases[r] < -limit)
- right.biases[r] = -limit;
- }
- }
- }
- }
- };
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement