Advertisement
programcreator

Monstruosity

Jun 13th, 2016
186
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 7.41 KB | None | 0 0
  1. #include <iostream>
  2. #include <vector>
  3. #include <cstdlib>
  4. #include <cassert>
  5. #include <cmath>
  6. #include <fstream>
  7. #include <string>
  8.  
  9. using namespace std;
  10.  
  11. struct Connection
  12. {
  13.     double weight;
  14.     double deltaWeight;
  15. };
  16.  
  17. struct Sample
  18. {
  19.     vector<int> inputs;
  20.     vector<int> outputs;
  21. };
  22.  
  23. void printVector( vector<auto> vec)
  24. {
  25.     string buff;
  26.     for (int i = 0; i < vec.size(); ++i) {
  27.         cout << vec[i] << " ";
  28.     }
  29.     cout << "" << endl;
  30. }
  31.  
  32. Sample makeSample(string line, int inputL, int outputL)
  33. {
  34.     //cout << "Input L " << inputL << endl;
  35.     Sample temp;
  36.     int index = 1;
  37.     string curr;
  38.  
  39.     for (int i = 0; i < line.length(); ++i){
  40.         curr = line[i];
  41.         if (curr != ";") {
  42.             //cout << "At index " << index << " is char " << curr << endl;
  43.             if (index <= inputL) {
  44.                 temp.inputs.push_back(atoi(curr.c_str()));
  45.             } else {
  46.                 temp.outputs.push_back(atoi(curr.c_str()));
  47.             }
  48.         index += 1;
  49.         }
  50.     }
  51.  
  52.     //cout << "Inputs: ";
  53.     //printVector(temp.inputs);
  54.     //cout << "Outputs: ";
  55.     //printVector(temp.outputs);
  56.  
  57.     return temp;
  58. }
  59.  
  60. typedef vector<Sample> Samples;
  61.  
  62. Samples parseFile(string path)
  63. {
  64.     int length;
  65.     string sTopology;
  66.     ifstream myFile;
  67.     myFile.open(path.c_str());
  68.     getline(myFile, sTopology);
  69.     cout << "Topology: " << sTopology << endl;
  70.  
  71.     string lBuff;
  72.     int index = 0;
  73.  
  74.     while (sTopology.at(index) != ';'){
  75.         lBuff += sTopology.at(index);
  76.         index += 1;
  77.     }
  78.  
  79.     string last;
  80.     string curr;
  81.  
  82.     for (int i = 0; i < sTopology.length(); ++i){
  83.         curr = sTopology[i];sTopology.at(index);
  84.         if (curr == ";") {
  85.             last = "";
  86.         } else {
  87.             last += curr;
  88.         }
  89.     }
  90.  
  91.     int inputL = atoi(lBuff.c_str());
  92.     int outputL = atoi(last.c_str());
  93.  
  94.     length = inputL + outputL;
  95.  
  96.     vector <Sample> samples;
  97.  
  98.     string line;
  99.     while (getline(myFile, line)) {
  100.         samples.push_back( makeSample(line, inputL, outputL) );
  101.     }
  102.  
  103.     myFile.close();
  104.  
  105.     return samples;
  106. }
  107.  
  108. class Neuron;
  109.  
  110. typedef vector<Neuron> Layer;
  111.  
  112. // ++++++++++++++++++++++++ Class Neuron +++++++++++++++++++++++++//
  113.  
  114. class Neuron
  115. {
  116. public:
  117.     Neuron(unsigned numOutputs, unsigned myIndex);
  118.     void setOutputVal(double val) {m_outputVal = val; }
  119.     double getOutputVal(void) const {return m_outputVal; }
  120.     void feedForward(const Layer &prevlayer);
  121.     void calcOutputGradients(double targetVal);
  122.     void calculateGradients(const Layer &nextLayer);
  123.     void updateInputWeights(Layer &prevLayer);
  124.  
  125. private:
  126.     static double transferFunction(double x);
  127.     static double transferFunctionDerivative(double x);
  128.     static double randomWeight(void) {return rand() / double(RAND_MAX); }
  129.     double m_outputVal;
  130.     vector<Connection> m_outputWeights;
  131.     unsigned m_myIndex;
  132.     double m_gradient;
  133.     double sumDOW(const Layer &nextLayer) const;
  134.     static double eta;
  135.     static double alpha;
  136. };
  137.  
  138. double Neuron::eta = 0.15;
  139.  
  140. double Neuron::alpha = 0.5;
  141.  
  142. void Neuron::updateInputWeights(Layer &prevLayer)
  143. {
  144.     for (unsigned n = 0; n < prevLayer.size(); ++n) {
  145.         Neuron &neuron = prevLayer[n];
  146.         double oldDeltaWeight = neuron.m_outputWeights[m_myIndex].deltaWeight;
  147.  
  148.         double newDeltaWeight = eta * neuron.getOutputVal() * m_gradient
  149.             + alpha * oldDeltaWeight;
  150.  
  151.         neuron.m_outputWeights[m_myIndex].deltaWeight = newDeltaWeight;
  152.         neuron.m_outputWeights[m_myIndex].weight += newDeltaWeight;
  153.     }
  154. }
  155.  
  156. double Neuron::sumDOW(const Layer &nextLayer) const
  157. {
  158.     double sum = 0.0;
  159.  
  160.     for (unsigned n=0; n < nextLayer.size() - 1; ++n){
  161.         sum += m_outputWeights[n].weight * nextLayer[n].m_gradient;
  162.     }
  163.  
  164.     return sum;
  165. }
  166.  
  167. void Neuron::calculateGradients(const Layer &nextLayer)
  168. {
  169.     double dow = sumDOW(nextLayer);
  170.     m_gradient = dow * Neuron::transferFunctionDerivative(m_outputVal);
  171. }
  172.  
  173. void Neuron::calcOutputGradients(double targetVal)
  174. {
  175.     double delta = targetVal - m_outputVal;
  176.     m_gradient = delta * Neuron::transferFunctionDerivative(m_outputVal);
  177. }
  178.  
  179. double Neuron::transferFunction(double x)
  180. {
  181.     //tanh [-1; 1]
  182.     return tanh(x);
  183. }
  184.  
  185. double Neuron::transferFunctionDerivative(double x)
  186. {
  187.     return 1.0 - x*x;
  188. }
  189.  
  190. void Neuron::feedForward(const Layer &prevlayer)
  191. {
  192.     double sum = 0.0;
  193.  
  194.     for (unsigned n = 0; n < prevlayer.size(); ++n) {
  195.         sum += prevlayer[n].getOutputVal() * prevlayer[n].m_outputWeights[m_myIndex].weight;
  196.     }
  197.  
  198.     m_outputVal = Neuron::transferFunction(sum);
  199. }
  200.  
  201. Neuron::Neuron(unsigned numOutputs, unsigned myIndex)
  202. {
  203.     for (unsigned c = 0; c < numOutputs; ++c){
  204.         m_outputWeights.push_back(Connection());
  205.         m_outputWeights.back().weight = randomWeight();
  206.     }
  207.  
  208.     m_myIndex = myIndex;
  209. };
  210.  
  211. // ++++++++++++++++++++++++ Class Net +++++++++++++++++++++++++//
  212. class Net
  213. {
  214. public:
  215.     Net(const vector<unsigned> &topology);
  216.     void feedForward(const vector<double> &inputVals);
  217.     void backProp(const vector<double> &targetVals);
  218.     void getResults(vector<double> &resultVals) const;
  219. private:
  220.     vector<Layer> m_layers;
  221.     double m_error;
  222.     double m_recentAverageError;
  223.     double m_recentAverageSmoothingFactor;
  224. };
  225.  
  226. void Net::getResults(vector<double> &resultVals) const
  227. {
  228.     resultVals.clear();
  229.     for (unsigned n = 0; n < m_layers.back().size() - 1; ++n) {
  230.         cout << m_layers.back()[n].getOutputVal() << endl;
  231.         resultVals.push_back(m_layers.back()[n].getOutputVal());
  232.     }
  233. }
  234.  
  235. void Net::backProp(const vector<double> &targetVals)
  236. {
  237.     // calc net error (RMS)
  238.     Layer &outputLayer = m_layers.back();
  239.     m_error = 0.0;
  240.     for (unsigned n = 0; n < outputLayer.size() - 1; ++n){
  241.         double delta = targetVals[n] - outputLayer[n].getOutputVal();
  242.         m_error += delta * delta;
  243.     }
  244.     m_error /= outputLayer.size() -1;
  245.     m_error = sqrt(m_error);
  246.  
  247.     m_recentAverageError = (m_recentAverageError * m_recentAverageSmoothingFactor) / (m_recentAverageSmoothingFactor + 1.0);
  248.  
  249.     //calc output gradients
  250.  
  251.     for (unsigned n=0; n < outputLayer.size() - 1; ++n) {
  252.         outputLayer[n].calcOutputGradients(targetVals[n]);
  253.     }
  254.  
  255.     for (unsigned layerNum = m_layers.size() - 1; layerNum > 0; --layerNum) {
  256.         Layer &layer = m_layers[layerNum];
  257.         Layer &prevLayer = m_layers[layerNum - 1];
  258.  
  259.         for (unsigned n = 0; n < layer.size() - 1; ++n) {
  260.             layer[n].updateInputWeights(prevLayer);
  261.         }
  262.     }
  263.  
  264.  
  265.     //update weights
  266.  
  267.     for (unsigned layerNum = m_layers.size() - 1; layerNum > 0; --layerNum) {
  268.         Layer &layer = m_layers[layerNum];
  269.         Layer &prevLayer = m_layers[layerNum - 1];
  270.  
  271.         for (unsigned n = 0; n < layer.size() - 1; ++n){
  272.             layer[n].updateInputWeights(prevLayer);
  273.         }
  274.     }
  275.  
  276.     // more stuff
  277. }
  278.  
  279. void Net::feedForward(const vector<double> &inputVals)
  280. {
  281.     assert(inputVals.size() == m_layers[0].size()-1 );
  282.  
  283.     for (unsigned i = 0; i < inputVals.size(); ++i){
  284.         cout << inputVals[i] << endl;
  285.         m_layers[0][i].setOutputVal(inputVals[i]);
  286.     }
  287.     for (unsigned layerNum = 1; layerNum < m_layers.size(); ++layerNum){
  288.         Layer &prevLayer = m_layers[layerNum - 1];
  289.         for (unsigned n=0; n < m_layers[layerNum].size() - 1; ++n ){
  290.             m_layers[layerNum][n].feedForward(prevLayer);
  291.         }
  292.     }
  293. };
  294.  
  295. Net::Net(const vector<unsigned> &topology){
  296.     unsigned numLayers = topology.size();
  297.     for (unsigned layerNum = 0; layerNum < numLayers; ++layerNum){
  298.         m_layers.push_back(Layer());
  299.         unsigned numOutputs = layerNum == topology.size() -1 ? 0 : topology[layerNum+1];
  300.         //Layer Creation
  301.         for (unsigned neuronNum = 0; neuronNum <= topology[layerNum]; ++neuronNum){
  302.             m_layers.back().push_back(Neuron(numOutputs, neuronNum));
  303.             cout << "Made a Neuron!" << endl;
  304.         }
  305.  
  306.         m_layers.back().back().setOutputVal(1.0);
  307.     }
  308. }
  309.  
  310. int main()
  311. {
  312.     Samples samples = parseFile("test.txt");
  313.  
  314.  
  315.  
  316.     return 0;
  317. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement