Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #include <iostream>
- #include <vector>
- #include <cstdlib>
- #include <cassert>
- #include <cmath>
- #include <fstream>
- #include <string>
- using namespace std;
- struct Connection
- {
- double weight;
- double deltaWeight;
- };
- struct Sample
- {
- vector<int> inputs;
- vector<int> outputs;
- };
- void printVector( vector<auto> vec)
- {
- string buff;
- for (int i = 0; i < vec.size(); ++i) {
- cout << vec[i] << " ";
- }
- cout << "" << endl;
- }
- Sample makeSample(string line, int inputL, int outputL)
- {
- //cout << "Input L " << inputL << endl;
- Sample temp;
- int index = 1;
- string curr;
- for (int i = 0; i < line.length(); ++i){
- curr = line[i];
- if (curr != ";") {
- //cout << "At index " << index << " is char " << curr << endl;
- if (index <= inputL) {
- temp.inputs.push_back(atoi(curr.c_str()));
- } else {
- temp.outputs.push_back(atoi(curr.c_str()));
- }
- index += 1;
- }
- }
- //cout << "Inputs: ";
- //printVector(temp.inputs);
- //cout << "Outputs: ";
- //printVector(temp.outputs);
- return temp;
- }
- typedef vector<Sample> Samples;
- Samples parseFile(string path)
- {
- int length;
- string sTopology;
- ifstream myFile;
- myFile.open(path.c_str());
- getline(myFile, sTopology);
- cout << "Topology: " << sTopology << endl;
- string lBuff;
- int index = 0;
- while (sTopology.at(index) != ';'){
- lBuff += sTopology.at(index);
- index += 1;
- }
- string last;
- string curr;
- for (int i = 0; i < sTopology.length(); ++i){
- curr = sTopology[i];sTopology.at(index);
- if (curr == ";") {
- last = "";
- } else {
- last += curr;
- }
- }
- int inputL = atoi(lBuff.c_str());
- int outputL = atoi(last.c_str());
- length = inputL + outputL;
- vector <Sample> samples;
- string line;
- while (getline(myFile, line)) {
- samples.push_back( makeSample(line, inputL, outputL) );
- }
- myFile.close();
- return samples;
- }
- class Neuron;
- typedef vector<Neuron> Layer;
- // ++++++++++++++++++++++++ Class Neuron +++++++++++++++++++++++++//
- class Neuron
- {
- public:
- Neuron(unsigned numOutputs, unsigned myIndex);
- void setOutputVal(double val) {m_outputVal = val; }
- double getOutputVal(void) const {return m_outputVal; }
- void feedForward(const Layer &prevlayer);
- void calcOutputGradients(double targetVal);
- void calculateGradients(const Layer &nextLayer);
- void updateInputWeights(Layer &prevLayer);
- private:
- static double transferFunction(double x);
- static double transferFunctionDerivative(double x);
- static double randomWeight(void) {return rand() / double(RAND_MAX); }
- double m_outputVal;
- vector<Connection> m_outputWeights;
- unsigned m_myIndex;
- double m_gradient;
- double sumDOW(const Layer &nextLayer) const;
- static double eta;
- static double alpha;
- };
- double Neuron::eta = 0.15;
- double Neuron::alpha = 0.5;
- void Neuron::updateInputWeights(Layer &prevLayer)
- {
- for (unsigned n = 0; n < prevLayer.size(); ++n) {
- Neuron &neuron = prevLayer[n];
- double oldDeltaWeight = neuron.m_outputWeights[m_myIndex].deltaWeight;
- double newDeltaWeight = eta * neuron.getOutputVal() * m_gradient
- + alpha * oldDeltaWeight;
- neuron.m_outputWeights[m_myIndex].deltaWeight = newDeltaWeight;
- neuron.m_outputWeights[m_myIndex].weight += newDeltaWeight;
- }
- }
- double Neuron::sumDOW(const Layer &nextLayer) const
- {
- double sum = 0.0;
- for (unsigned n=0; n < nextLayer.size() - 1; ++n){
- sum += m_outputWeights[n].weight * nextLayer[n].m_gradient;
- }
- return sum;
- }
- void Neuron::calculateGradients(const Layer &nextLayer)
- {
- double dow = sumDOW(nextLayer);
- m_gradient = dow * Neuron::transferFunctionDerivative(m_outputVal);
- }
- void Neuron::calcOutputGradients(double targetVal)
- {
- double delta = targetVal - m_outputVal;
- m_gradient = delta * Neuron::transferFunctionDerivative(m_outputVal);
- }
- double Neuron::transferFunction(double x)
- {
- //tanh [-1; 1]
- return tanh(x);
- }
- double Neuron::transferFunctionDerivative(double x)
- {
- return 1.0 - x*x;
- }
- void Neuron::feedForward(const Layer &prevlayer)
- {
- double sum = 0.0;
- for (unsigned n = 0; n < prevlayer.size(); ++n) {
- sum += prevlayer[n].getOutputVal() * prevlayer[n].m_outputWeights[m_myIndex].weight;
- }
- m_outputVal = Neuron::transferFunction(sum);
- }
- Neuron::Neuron(unsigned numOutputs, unsigned myIndex)
- {
- for (unsigned c = 0; c < numOutputs; ++c){
- m_outputWeights.push_back(Connection());
- m_outputWeights.back().weight = randomWeight();
- }
- m_myIndex = myIndex;
- };
- // ++++++++++++++++++++++++ Class Net +++++++++++++++++++++++++//
- class Net
- {
- public:
- Net(const vector<unsigned> &topology);
- void feedForward(const vector<double> &inputVals);
- void backProp(const vector<double> &targetVals);
- void getResults(vector<double> &resultVals) const;
- private:
- vector<Layer> m_layers;
- double m_error;
- double m_recentAverageError;
- double m_recentAverageSmoothingFactor;
- };
- void Net::getResults(vector<double> &resultVals) const
- {
- resultVals.clear();
- for (unsigned n = 0; n < m_layers.back().size() - 1; ++n) {
- cout << m_layers.back()[n].getOutputVal() << endl;
- resultVals.push_back(m_layers.back()[n].getOutputVal());
- }
- }
- void Net::backProp(const vector<double> &targetVals)
- {
- // calc net error (RMS)
- Layer &outputLayer = m_layers.back();
- m_error = 0.0;
- for (unsigned n = 0; n < outputLayer.size() - 1; ++n){
- double delta = targetVals[n] - outputLayer[n].getOutputVal();
- m_error += delta * delta;
- }
- m_error /= outputLayer.size() -1;
- m_error = sqrt(m_error);
- m_recentAverageError = (m_recentAverageError * m_recentAverageSmoothingFactor) / (m_recentAverageSmoothingFactor + 1.0);
- //calc output gradients
- for (unsigned n=0; n < outputLayer.size() - 1; ++n) {
- outputLayer[n].calcOutputGradients(targetVals[n]);
- }
- for (unsigned layerNum = m_layers.size() - 1; layerNum > 0; --layerNum) {
- Layer &layer = m_layers[layerNum];
- Layer &prevLayer = m_layers[layerNum - 1];
- for (unsigned n = 0; n < layer.size() - 1; ++n) {
- layer[n].updateInputWeights(prevLayer);
- }
- }
- //update weights
- for (unsigned layerNum = m_layers.size() - 1; layerNum > 0; --layerNum) {
- Layer &layer = m_layers[layerNum];
- Layer &prevLayer = m_layers[layerNum - 1];
- for (unsigned n = 0; n < layer.size() - 1; ++n){
- layer[n].updateInputWeights(prevLayer);
- }
- }
- // more stuff
- }
- void Net::feedForward(const vector<double> &inputVals)
- {
- assert(inputVals.size() == m_layers[0].size()-1 );
- for (unsigned i = 0; i < inputVals.size(); ++i){
- cout << inputVals[i] << endl;
- m_layers[0][i].setOutputVal(inputVals[i]);
- }
- for (unsigned layerNum = 1; layerNum < m_layers.size(); ++layerNum){
- Layer &prevLayer = m_layers[layerNum - 1];
- for (unsigned n=0; n < m_layers[layerNum].size() - 1; ++n ){
- m_layers[layerNum][n].feedForward(prevLayer);
- }
- }
- };
- Net::Net(const vector<unsigned> &topology){
- unsigned numLayers = topology.size();
- for (unsigned layerNum = 0; layerNum < numLayers; ++layerNum){
- m_layers.push_back(Layer());
- unsigned numOutputs = layerNum == topology.size() -1 ? 0 : topology[layerNum+1];
- //Layer Creation
- for (unsigned neuronNum = 0; neuronNum <= topology[layerNum]; ++neuronNum){
- m_layers.back().push_back(Neuron(numOutputs, neuronNum));
- cout << "Made a Neuron!" << endl;
- }
- m_layers.back().back().setOutputVal(1.0);
- }
- }
- int main()
- {
- Samples samples = parseFile("test.txt");
- return 0;
- }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement