Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #ifndef BRAIN_H
- #define BRAIN_H
- #pragma once
- #include <vector>
- #include <cmath>
- using namespace std;
- #define nn_eta 0.15
- #define nn_alpha 0.5
- #define nn_weight 0.1
- namespace nn
- {
- namespace nt
- {
- enum
- {
- hidden = 1,
- input,
- output,
- bias,
- memory,
- };
- }
- class neuron;
- class coord
- {
- public:
- unsigned short x; // neuron
- unsigned short y; // layer
- coord(unsigned short _y, unsigned short _x)
- {
- x = _x;
- y = _y;
- }
- coord() {}
- ~coord() {}
- };
- struct layers
- {
- vector<vector<neuron *>> map;
- };
- class axon
- {
- public:
- neuron *x;
- double weight;
- double delta;
- axon( neuron *In ) {
- x = In;
- weight = ((double)rand() / double(RAND_MAX)) * nn_weight;
- delta = 0.0;
- }
- axon() {
- x = 0;
- weight = ((double)rand() / double(RAND_MAX)) * nn_weight;
- delta = 0.0;
- }
- ~axon() {}
- };
- struct nHidden2
- {
- vector<axon> *axons;
- double x;
- double y;
- double xsum;
- neuron *mem;
- };
- struct nInput2
- {
- vector<axon> *axons;
- double x;
- neuron *mem;
- };
- struct nOutput2
- {
- double x;
- double y;
- double target;
- double xsum;
- neuron *mem;
- };
- struct nMemory2
- {
- vector<axon> *axons;
- double x;
- bool target;
- neuron *mem;
- };
- struct nHidden
- {
- vector<axon> *axons;
- double x;
- double y;
- double xsum;
- };
- struct nInput
- {
- vector<axon> *axons;
- double x;
- };
- struct nOutput
- {
- double x;
- double y;
- double target;
- double xsum;
- };
- struct nBias
- {
- vector<axon> *axons;
- };
- struct nMemory
- {
- vector<axon> *axons;
- double x;
- bool target;
- };
- class neuron
- {
- public:
- void *n;
- char type;
- void free()
- {
- switch (type)
- {
- case nt::input:
- delete ((nInput *)n);
- break;
- case nt::hidden:
- delete ((nHidden *)n);
- break;
- case nt::output:
- delete ((nOutput *)n);
- break;
- case -nt::input:
- delete ((nInput2 *)n);
- break;
- case -nt::hidden:
- delete ((nHidden2 *)n);
- break;
- case -nt::output:
- delete ((nOutput2 *)n);
- break;
- case nt::bias:
- delete ((nBias *)n);
- break;
- case nt::memory:
- delete ((nMemory *)n);
- break;
- case -nt::memory:
- delete ((nMemory2 *)n);
- break;
- }
- type = 0;
- }
- void setX(double in)
- {
- switch (type)
- {
- case nt::input:
- ((nInput *)n)->x = in;
- break;
- case nt::hidden:
- ((nHidden *)n)->x = in;
- break;
- case nt::output:
- ((nOutput *)n)->x = in;
- break;
- case -nt::input:
- ((nInput2 *)n)->x = in;
- break;
- case -nt::hidden:
- ((nHidden2 *)n)->x = in;
- break;
- case -nt::output:
- ((nOutput2 *)n)->x = in;
- break;
- case nt::memory:
- ((nMemory *)n)->x = in;
- break;
- case -nt::memory:
- ((nMemory2 *)n)->x = in;
- break;
- }
- }
- void SetTarget(double in) {
- if(type == -nt::output) ((nOutput2 *)n)->target = in;
- else if(type == nt::output) ((nOutput *)n)->target = in;
- }
- double GetTarget() {
- if (type == nt::output) return ((nOutput *)n)->target;
- else if (type == -nt::output) return ((nOutput2 *)n)->target;
- return 0.0;
- }
- void connectmem(neuron *with, bool target)
- {
- void *p;
- switch (type)
- {
- case nt::memory:
- ((nMemory *)n)->target = target;
- break;
- case -nt::memory:
- ((nMemory2 *)n)->target = target;
- break;
- default:
- return;
- }
- switch (with->type)
- {
- case nt::bias:
- return;
- case nt::input:
- p = with->n;
- with->n = new nInput2;
- ((nInput2 *)with->n)->axons = ((nInput *)p)->axons;
- ((nInput2 *)with->n)->mem = this;
- ((nInput2 *)with->n)->x = ((nInput *)p)->x;
- with->type = -with->type;
- delete p;
- break;
- case nt::output:
- p = with->n;
- with->n = new nOutput2;
- ((nOutput2*)with->n)->mem = this;
- ((nOutput2*)with->n)->target = ((nOutput *)p)->target;
- ((nOutput2*)with->n)->x = ((nOutput *)p)->x;
- ((nOutput2*)with->n)->xsum = ((nOutput *)p)->xsum;
- ((nOutput2*)with->n)->y = ((nOutput *)p)->y;
- with->type = -with->type;
- delete p;
- break;
- case nt::hidden:
- p = with->n;
- with->n = new nHidden2;
- ((nHidden2 *)with->n)->mem = this;
- ((nHidden2 *)with->n)->axons = ((nHidden *)p)->axons;
- ((nHidden2 *)with->n)->x = ((nHidden *)p)->x;
- ((nHidden2 *)with->n)->xsum = ((nHidden *)p)->xsum;
- ((nHidden2 *)with->n)->y = ((nHidden *)p)->y;
- with->type = -with->type;
- delete p;
- break;
- case nt::memory:
- p = with->n;
- with->n = new nMemory2;
- ((nMemory2 *)with->n)->mem = this;
- ((nMemory2 *)with->n)->target = ((nMemory *)p)->target;
- ((nMemory2 *)with->n)->x = ((nMemory *)p)->x;
- ((nMemory2 *)with->n)->axons = ((nMemory *)p)->axons;
- with->type = -with->type;
- delete p;
- break;
- case -nt::input:
- ((nInput2 *)with->n)->mem = this;
- break;
- case -nt::hidden:
- ((nHidden2 *)with->n)->mem = this;
- break;
- case -nt::output:
- ((nOutput2 *)with->n)->mem = this;
- break;
- case -nt::memory:
- ((nMemory2 *)with->n)->mem = this;
- break;
- }
- }
- void connect(neuron *with)
- {
- switch (with->type)
- {
- case nt::bias:
- return;
- case nt::input:
- return;
- case nt::memory:
- return;
- case -nt::input:
- return;
- case -nt::memory:
- return;
- }
- switch (type)
- {
- case nt::input:
- ((nInput*)n)->axons->push_back(axon(with));
- break;
- case nt::hidden:
- ((nHidden*)n)->axons->push_back(axon(with));
- break;
- case nt::bias:
- ((nBias*)n)->axons->push_back(axon(with));
- break;
- case nt::memory:
- ((nMemory*)n)->axons->push_back(axon(with));
- break;
- case -nt::input:
- ((nInput2*)n)->axons->push_back(axon(with));
- break;
- case -nt::hidden:
- ((nHidden2*)n)->axons->push_back(axon(with));
- break;
- case -nt::memory:
- ((nMemory2*)n)->axons->push_back(axon(with));
- break;
- }
- }
- void PrintNeuronName()
- {
- switch (type)
- {
- case nt::input:
- cout << "input";
- break;
- case nt::hidden:
- cout << "hidden";
- break;
- case nt::bias:
- cout << "bias";
- break;
- case nt::output:
- cout << "output";
- break;
- case -nt::output:
- cout << "-output";
- break;
- case nt::memory:
- cout << "memory";
- break;
- case -nt::input:
- cout << "-input";
- break;
- case -nt::hidden:
- cout << "-hidden";
- break;
- case -nt::memory:
- cout << "-memory";
- break;
- }
- }
- neuron(char Type)
- {
- n = 0;
- type = Type;
- switch (type)
- {
- case nt::input:
- n = new nInput;
- ((nInput *)n)->x = 0.0;
- ((nInput *)n)->axons = new vector<axon>();
- break;
- case nt::hidden:
- n = new nHidden;
- ((nHidden *)n)->x = 0.0;
- ((nHidden *)n)->xsum = 0.0;
- ((nHidden *)n)->y = 0.0;
- ((nHidden *)n)->axons = new vector<axon>();
- break;
- case nt::output:
- n = new nOutput;
- ((nOutput *)n)->target = 0.0;
- ((nOutput *)n)->x = 0.0;
- ((nOutput *)n)->xsum = 0.0;
- ((nOutput *)n)->y = 0.0;
- break;
- case nt::bias:
- n = new nBias;
- ((nBias *)n)->axons = new vector<axon>();
- break;
- case nt::memory:
- n = new nMemory;
- ((nMemory*)n)->target = 0;
- ((nMemory*)n)->x = 0;
- ((nMemory *)n)->axons = new vector<axon>();
- break;
- }
- }
- void ActivateW()
- {
- switch (type)
- {
- case nt::input:
- for (int a = 0; a < ((nInput *)n)->axons->size(); a++)
- {
- (* ((nInput *)n)->axons )[a].delta = nn_eta * ((nInput *)n)->x * (*((nInput *)n)->axons)[a].x->GetY() + (*((nInput *)n)->axons)[a].delta * nn_alpha;
- (*((nInput *)n)->axons)[a].weight += (*((nInput *)n)->axons)[a].delta;
- }
- break;
- case nt::hidden:
- for (int a = 0; a < ((nHidden *)n)->axons->size(); a++)
- {
- (*((nHidden *)n)->axons)[a].delta = nn_eta * ((nHidden*)n)->x * (*((nHidden *)n)->axons)[a].x->GetY() + (*((nHidden *)n)->axons)[a].delta * nn_alpha;
- (*((nHidden *)n)->axons)[a].weight += (*((nHidden *)n)->axons)[a].delta;
- }
- break;
- case nt::output:
- return;
- case nt::bias:
- for (int a = 0; a < ((nBias *)n)->axons->size(); a++)
- {
- cout << "target:"; (*((nBias *)n)->axons)[a].x->PrintNeuronName(); cout << endl;
- cout << " gradient:" << (*((nBias *)n)->axons)[a].x->GetY() << " ";
- cout << "old delta:" << (*((nBias *)n)->axons)[a].delta << endl;
- cout << " x:" << GetX() << endl;
- (*((nBias *)n)->axons)[a].delta = GetX() * nn_eta * (*((nBias *)n)->axons)[a].x->GetY() + (*((nBias *)n)->axons)[a].delta * nn_alpha;
- cout << "detla:" << (*((nBias *)n)->axons)[a].delta << " ";
- (*((nBias *)n)->axons)[a].weight += (*((nBias *)n)->axons)[a].delta;
- cout << "weight:" << (*((nBias *)n)->axons)[a].weight << endl;
- }
- break;
- case nt::memory:
- for (int a = 0; a < ((nMemory *)n)->axons->size(); a++)
- {
- (*((nMemory *)n)->axons)[a].delta = nn_eta * ((nMemory *)n)->x * (*((nMemory *)n)->axons)[a].x->GetY() + (*((nMemory *)n)->axons)[a].delta * nn_alpha;
- (*((nMemory *)n)->axons)[a].weight += (*((nMemory *)n)->axons)[a].delta;
- }
- break;
- case -nt::input:
- for (int a = 0; a < ((nInput2 *)n)->axons->size(); a++)
- {
- (*((nInput2 *)n)->axons)[a].delta = nn_eta * ((nInput2 *)n)->x * (*((nInput2 *)n)->axons)[a].x->GetY() + (*((nInput2 *)n)->axons)[a].delta * nn_alpha;
- (*((nInput2 *)n)->axons)[a].weight += (*((nInput2 *)n)->axons)[a].delta;
- }
- break;
- case -nt::hidden:
- for (int a = 0; a < ((nHidden2 *)n)->axons->size(); a++)
- {
- (*((nHidden2 *)n)->axons)[a].delta = nn_eta * ((nHidden2*)n)->x * (*((nHidden2 *)n)->axons)[a].x->GetY() + (*((nHidden2 *)n)->axons)[a].delta * nn_alpha;
- (*((nHidden2 *)n)->axons)[a].weight += (*((nHidden2 *)n)->axons)[a].delta;
- }
- break;
- case -nt::memory:
- for (int a = 0; a < ((nMemory2 *)n)->axons->size(); a++)
- {
- (*((nMemory2 *)n)->axons)[a].delta = nn_eta * ((nMemory2 *)n)->x * (*((nMemory2 *)n)->axons)[a].x->GetY() + (*((nMemory2 *)n)->axons)[a].delta * nn_alpha;
- (*((nMemory2 *)n)->axons)[a].weight += (*((nMemory2 *)n)->axons)[a].delta;
- }
- break;
- }
- }
- void ActivateY()
- {
- double sumY = 0.0;
- switch (type)
- {
- case nt::hidden:
- for (int a = 0; a < ((nHidden *)n)->axons->size(); a++) {
- sumY += (*((nHidden *)n)->axons)[a].weight * (*((nHidden *)n)->axons)[a].x->GetY();
- }
- ((nHidden *)n)->y = sumY * (1.0 - ((nHidden *)n)->x * ((nHidden *)n)->x);
- break;
- case nt::output:
- ((nOutput *)n)->y = (((nOutput *)n)->target - ((nOutput *)n)->x) * (1.0 - ((nOutput *)n)->x * ((nOutput *)n)->x);
- break;
- case -nt::hidden:
- for (int a = 0; a < ((nHidden2 *)n)->axons->size(); a++) {
- sumY += (*((nHidden2 *)n)->axons)[a].weight * (*((nHidden2 *)n)->axons)[a].x->GetY();
- }
- ((nHidden2 *)n)->y = sumY * (1.0 - ((nHidden2 *)n)->x * ((nHidden2 *)n)->x);
- break;
- case -nt::output:
- ((nOutput2 *)n)->y = (((nOutput2 *)n)->target - ((nOutput2 *)n)->x) * (1.0 - ((nOutput2 *)n)->x * ((nOutput2 *)n)->x);
- break;
- }
- }
- void ActivateX()
- {
- switch (type)
- {
- case nt::input:
- for (int a = 0; a < ((nInput *)n)->axons->size(); a++) {
- (*((nInput *)n)->axons)[a].x->fireX(GetX() * (*((nInput *)n)->axons)[a].weight);
- }
- break;
- case nt::hidden:
- ((nHidden *)n)->x = tanh(((nHidden *)n)->xsum);
- ((nHidden *)n)->xsum = 0.0;
- for (int a = 0; a < ((nHidden *)n)->axons->size(); a++) {
- (*((nHidden *)n)->axons)[a].x->fireX(((nHidden *)n)->x * (*((nHidden *)n)->axons)[a].weight);
- }
- break;
- case nt::output:
- ((nOutput *)n)->x = tanh(((nOutput *)n)->xsum);
- ((nOutput *)n)->xsum = 0.0;
- return;
- case nt::bias:
- for (int a = 0; a < ((nBias *)n)->axons->size(); a++) {
- (*((nBias *)n)->axons)[a].x->fireX( (*((nBias *)n)->axons)[a].weight);
- }
- break;
- case nt::memory:
- for (int a = 0; a < ((nMemory *)n)->axons->size(); a++) {
- (*((nMemory *)n)->axons)[a].x->fireX(((nMemory *)n)->x * (*((nMemory *)n)->axons)[a].weight);
- }
- break;
- case -nt::input:
- memSet(((nInput2 *)n)->mem, GetX());
- for (int a = 0; a < ((nInput2 *)n)->axons->size(); a++) {
- (*((nInput2 *)n)->axons)[a].x->fireX(GetX() * (*((nInput2 *)n)->axons)[a].weight);
- }
- break;
- case -nt::hidden:
- memSet(((nHidden2 *)n)->mem, GetX());
- ((nHidden2 *)n)->x = tanh(((nHidden2 *)n)->xsum);
- ((nHidden2 *)n)->xsum = 0.0;
- for (int a = 0; a < ((nHidden2 *)n)->axons->size(); a++) {
- (*((nHidden2 *)n)->axons)[a].x->fireX(((nHidden2 *)n)->x * (*((nHidden2 *)n)->axons)[a].weight);
- }
- break;
- case -nt::output:
- if (((nOutput2 *)n)->mem->type == nt::memory || ((nOutput2 *)n)->mem->type == -nt::memory)
- {
- if (((nMemory2 *)(((nOutput2 *)n)->mem->n))->target) memSet(((nOutput2 *)n)->mem, GetX_TargetEdition());
- else memSet(((nOutput2 *)n)->mem, GetX());
- }
- ((nOutput2 *)n)->x = tanh(((nOutput2 *)n)->xsum);
- ((nOutput2 *)n)->xsum = 0.0;
- return;
- case -nt::memory:
- for (int a = 0; a < ((nMemory2 *)n)->axons->size(); a++) {
- (*((nMemory2 *)n)->axons)[a].x->fireX(((nMemory2 *)n)->x * (*((nMemory2 *)n)->axons)[a].weight);
- }
- break;
- default:
- return;
- }
- }
- void memSet(neuron * target, double x)
- {
- if (!target) return;
- switch (target->type)
- {
- case nt::memory:
- ((nMemory *)target->n)->x = x;
- break;
- case -nt::memory:
- target->memSet(((nMemory2 *)target->n)->mem, target->GetX());
- ((nMemory2 *)target->n)->x = x;
- break;
- }
- }
- vector <axon> *GetAxons()
- {
- switch (type)
- {
- case nt::input:
- return ((nInput *)n)->axons;
- case nt::bias:
- return ((nBias *)n)->axons;
- case nt::hidden:
- return ((nHidden *)n)->axons;
- case nt::memory:
- return ((nMemory *)n)->axons;
- case -nt::input:
- return ((nInput2 *)n)->axons;
- case -nt::hidden:
- return ((nHidden2 *)n)->axons;
- case -nt::memory:
- return ((nMemory2 *)n)->axons;
- }
- return 0;
- }
- double GetX()
- {
- double x;
- switch (type)
- {
- case nt::input:
- return ((nInput *)n)->x;
- case nt::hidden:
- return ((nHidden *)n)->x;
- case nt::output:
- return ((nOutput *)n)->x;
- case nt::memory:
- return ((nMemory *)n)->x;
- case -nt::input:
- return ((nInput2 *)n)->x;
- case -nt::hidden:
- return ((nHidden2 *)n)->x;
- case -nt::output:
- return ((nOutput2 *)n)->x;
- case -nt::memory:
- return ((nMemory2 *)n)->x;
- }
- return 1.0;
- }
- double GetX_TargetEdition()
- {
- double x;
- switch (type)
- {
- case nt::input:
- return ((nInput *)n)->x;
- case nt::hidden:
- return ((nHidden *)n)->x;
- case nt::output:
- return ((nOutput *)n)->target;
- case nt::memory:
- return ((nMemory *)n)->x;
- case -nt::input:
- return ((nInput2 *)n)->x;
- case -nt::hidden:
- return ((nHidden2 *)n)->x;
- case -nt::output:
- return ((nOutput2 *)n)->target;
- case -nt::memory:
- return ((nMemory2 *)n)->x;
- }
- return 1.0;
- }
- double GetY()
- {
- switch (type)
- {
- case nt::hidden:
- return ((nHidden *)n)->y;
- case nt::output:
- return ((nOutput *)n)->y;
- case -nt::hidden:
- return ((nHidden2 *)n)->y;
- case -nt::output:
- return ((nOutput2 *)n)->y;
- }
- return 0.0;
- }
- void fireX(double signal)
- {
- switch (type)
- {
- case nt::hidden:
- ((nHidden *)n)->xsum += signal;
- break;
- case nt::output:
- ((nOutput*)n)->xsum += signal;
- break;
- case -nt::hidden:
- ((nHidden2 *)n)->xsum += signal;
- break;
- case -nt::output:
- ((nOutput2*)n)->xsum += signal;
- break;
- }
- }
- neuron() {}
- ~neuron() {}
- };
- class brain
- {
- public:
- layers layers;
- void PrintFull()
- {
- for (int y = 0; y < layers.map.size(); y++)
- {
- for (int x = 0; x < layers.map[y].size(); x++)
- {
- cout << "neuron" << x << ":";
- switch (layers.map[y][x]->type)
- {
- case nt::bias:
- cout << "bias";
- break;
- case nt::input:
- cout << "input";
- break;
- case nt::hidden:
- cout << "hidden";
- break;
- case nt::output:
- cout << "output";
- break;
- case nt::memory:
- cout << "memory";
- break;
- case -nt::input:
- cout << "-input";
- break;
- case -nt::hidden:
- cout << "-hidden";
- break;
- case -nt::output:
- cout << "-output";
- break;
- case -nt::memory:
- cout << "-memory";
- break;
- }
- cout << " Y:" << layers.map[y][x]->GetY() << " X:" << layers.map[y][x]->GetX() << endl;
- if (layers.map[y][x]->GetAxons())
- {
- for (int a = 0; a < layers.map[y][x]->GetAxons()->size(); a++)
- {
- cout << " Delta:" << (*layers.map[y][x]->GetAxons())[a].delta << " weight:" << (*layers.map[y][x]->GetAxons())[a].weight;
- cout << " TYPE:";
- (*layers.map[y][x]->GetAxons())[a].x->PrintNeuronName();
- cout << endl;
- }
- }
- }
- cout << endl << endl;
- }
- }
- void PrintTypes()
- {
- for (int y = 0; y < layers.map.size(); y++)
- {
- cout << "layer:" << y << endl;
- for (int x = 0; x < layers.map[y].size(); x++)
- {
- cout << "neuron" << x << ":";
- switch (layers.map[y][x]->type)
- {
- case nt::bias:
- cout << "bias";
- break;
- case nt::input:
- cout << "input";
- break;
- case nt::hidden:
- cout << "hidden";
- break;
- case nt::output:
- cout << "output";
- break;
- case nt::memory:
- cout << "memory";
- break;
- case -nt::input:
- cout << "-input";
- break;
- case -nt::hidden:
- cout << "-hidden";
- break;
- case -nt::output:
- cout << "-output";
- break;
- case -nt::memory:
- cout << "-memory";
- break;
- }
- cout << endl;
- }
- cout << endl << endl;
- }
- }
- double getTarget(coord n)
- {
- return GetNeuron(n)->GetTarget();
- }
- double getX(coord n){
- return GetNeuron(n)->GetX();
- }
- void setX(coord n, double in)
- {
- switch (GetNeuron(n)->type)
- {
- case nt::input:
- GetNeuron(n)->setX(in);
- break;
- case nt::output:
- GetNeuron(n)->SetTarget(in);
- break;
- default:
- GetNeuron(n)->setX(in);
- break;
- }
- }
- neuron *GetNeuron(coord coords)
- {
- return layers.map[coords.y][coords.x];
- }
- void connectlayers(unsigned short layerwhat, unsigned short layerwith)
- {
- for (int a = 0; a < layers.map[layerwhat].size(); a++)
- {
- connect(layers.map[layerwhat][a], layerwith);
- }
- }
- void connect(neuron *what, unsigned short layer)
- {
- for (int a = 0; a < layers.map[layer].size(); a++)
- {
- connect(what, layers.map[layer][a]);
- }
- }
- void connect(coord what, unsigned short layer)
- {
- for (int a = 0; a < layers.map[layer].size(); a++)
- {
- connect(what, layers.map[layer][a]);
- }
- }
- void connectmemTarget(coord what, unsigned short ywith, unsigned short xwith)
- {
- coord b;
- b.x = xwith;
- b.y = ywith;
- connectmem(what, b, 1);
- }
- void connectmemTarget(unsigned short ywhat, unsigned short xwhat, coord with)
- {
- coord a;
- a.x = xwhat;
- a.y = ywhat;
- connectmem(a, with, 1);
- }
- void connectmem(coord what, unsigned short ywith, unsigned short xwith)
- {
- coord b;
- b.x = xwith;
- b.y = ywith;
- connectmem(what, b);
- }
- void connectmem(unsigned short ywhat, unsigned short xwhat, coord with)
- {
- coord a;
- a.x = xwhat;
- a.y = ywhat;
- connectmem(a, with);
- }
- void connect(coord what, unsigned short ywith, unsigned short xwith)
- {
- coord b;
- b.x = xwith;
- b.y = ywith;
- GetNeuron(what)->connect(GetNeuron(b));
- }
- void connect(unsigned short ywhat, unsigned short xwhat, coord with)
- {
- coord a;
- a.x = xwhat;
- a.y = ywhat;
- GetNeuron(a)->connect(GetNeuron(with));
- }
- void connect(unsigned short ywhat, unsigned short xwhat, unsigned short ywith, unsigned short xwith)
- {
- coord a;
- coord b;
- a.x = xwhat;
- a.y = ywhat;
- b.x = xwith;
- b.y = ywith;
- GetNeuron(a)->connect(GetNeuron(b));
- }
- void connect( coord what, coord with)
- {
- GetNeuron(what)->connect(GetNeuron(with));
- }
- void connectmem(coord what, coord with, bool target = false)
- {
- GetNeuron(what)->connectmem(GetNeuron(with), target);
- }
- void connect(neuron *what, neuron *with)
- {
- what->connect(with);
- }
- void connect(coord what, neuron *with)
- {
- GetNeuron(what)->connect(with);
- }
- void AddNeuron(unsigned short layer, char type, unsigned short count)
- {
- for (int a = 0; a < count; a++) AddNeuron(layer, type);
- }
- coord AddNeuron(unsigned short layer, char type)
- {
- if (layer >= layers.map.size())
- {
- unsigned short needed = (layer - layers.map.size()) + 1;
- for (int a = 0; a < needed; a++) layers.map.push_back(vector<neuron *>());
- }
- layers.map[layer].push_back(new neuron(type));
- coord out( layer, layers.map[layer].size() - 1 );
- return out;
- }
- void ActivateX()
- {
- for (int y = 0; y < layers.map.size(); y++) {
- for (int x = 0; x < layers.map[y].size(); x++) {
- layers.map[y][x]->ActivateX();
- }
- }
- }
- void ActivateY()
- {
- for (int y = layers.map.size() - 1; y >= 0; y--) {
- for (int x = 0; x < layers.map[y].size(); x++) {
- layers.map[y][x]->ActivateY();
- }
- }
- // PrintFull();
- for (int y = layers.map.size() - 1; y >= 0; y--) {
- for (int x = 0; x < layers.map[y].size(); x++) {
- layers.map[y][x]->ActivateW();
- }
- }
- }
- brain() {}
- ~brain() {}
- };
- }
- #endif
Advertisement
Add Comment
Please, Sign In to add comment