Guest User

Untitled

a guest
May 27th, 2018
72
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.23 KB | None | 0 0
  1. #include <stdio.h>
  2. #include <stdlib.h>
  3.  
  4. /* Precision: leave uncommented which precision to use */
  5. /*#define PRECISION_SINGLE*/
  6. #define PRECISION_DOUBLE
  7.  
  8. #if defined PRECISION_SINGLE
  9. #define PRECISION_S true
  10. #define PRECISION_D false
  11. typedef float fp;
  12. #elif defined PRECISION_DOUBLE
  13. #define PRECISION_S false
  14. #define PRECISION_D true
  15. typedef double fp;
  16. #endif
  17.  
  18. enum NeuronType{
  19. NType_Linear,
  20. NType_BinaryThreshold,
  21. NType_Rectifier,
  22. NType_Sigmoid,
  23. NType_StochasticBinary
  24. };
  25.  
  26. class Neuron{
  27. public:
  28. Neuron(){}
  29. ~Neuron(){}
  30. virtual fp CalculateOutput(){return outputValue = inputValue;}
  31. fp inputValue;
  32. fp outputValue;
  33.  
  34. };
  35.  
  36. class NeuronHiddenLayer : public Neuron{
  37. public:
  38. struct Connection{
  39. Neuron* PrevLayerNeuronIndex;
  40. fp Weight;
  41. };
  42.  
  43. const unsigned int connectionAmount;
  44. Connection* connections;
  45.  
  46. private:
  47. bool isConnectionListAllocated;
  48.  
  49. public:
  50. NeuronHiddenLayer(unsigned int ConAmount, Connection* Cons = 0) : connectionAmount(ConAmount), connections(Cons){
  51. if(isConnectionListAllocated = (!connections)){
  52. connections = (Connection*)malloc(sizeof(Connection) * connectionAmount);
  53. }
  54. }
  55. ~NeuronHiddenLayer(){
  56. if(isConnectionListAllocated){
  57. free((void*)connections);
  58. }
  59. }
  60. fp CalculateInput(){
  61. inputValue = 0;
  62. for (int i = 0; i < connectionAmount; ++i){
  63. inputValue += connections[i].PrevLayerNeuronIndex->outputValue * connections[i].Weight;
  64. }
  65. return inputValue;
  66. }
  67. };
  68.  
  69. class Neuron_Linear : public NeuronHiddenLayer{
  70. public:
  71. Neuron_Linear(unsigned int ConAmount, Connection* Cons, fp M = 1, fp B = 1) : NeuronHiddenLayer(ConAmount, Cons), m(M), b(B){}
  72. fp m; //slope
  73. fp b; //bias
  74. fp CalculateOutput(){return outputValue = inputValue*m + b;}
  75. };
  76.  
  77. class Neuron_BinaryThreshold : public NeuronHiddenLayer{
  78. public:
  79. Neuron_BinaryThreshold(unsigned int ConAmount, Connection* Cons, fp threshold = 1) : NeuronHiddenLayer(ConAmount, Cons), Threshold(threshold){}
  80. fp Threshold;
  81. fp CalculateOutput(){return outputValue = (fp)(inputValue >= Threshold);}
  82. };
  83.  
  84.  
  85. Neuron input[2] = { //2 input neurons
  86. Neuron(), Neuron()
  87. };
  88. NeuronHiddenLayer::Connection inputToLayer1Neuron0[2] = { //2 connections between input layers and first neuron of first hidden layer
  89. {&input[0], 1}, {&input[1], 1}
  90. };
  91. NeuronHiddenLayer::Connection inputToLayer1Neuron1[2] = { //2 connections between input layers and second neuron of first hidden layer
  92. {&input[0], -1}, {&input[1], -1}
  93. };
  94. Neuron_BinaryThreshold layer1[2] = { //first hidden layer
  95. Neuron_BinaryThreshold(2, inputToLayer1Neuron0, 0.5),
  96. Neuron_BinaryThreshold(2, inputToLayer1Neuron1, -1.5)
  97. };
  98. NeuronHiddenLayer::Connection inputToLayer2[2] = { //connections between first and second hidden layers
  99. {&layer1[0], 1},
  100. {&layer1[1], 1}
  101. };
  102. Neuron_BinaryThreshold layer2[1] = { //second hidden layer
  103. Neuron_BinaryThreshold(2, inputToLayer2, 1.5)
  104. };
  105.  
  106.  
  107.  
  108. int main(int argc, char const *argv[]){
  109. printf("xor (input A and B, will output A^B):\n");
  110. while(1 != 2){
  111. scanf((PRECISION_D)?"%lf %lf":"%f %f", &input[0].outputValue, &input[1].outputValue);
  112.  
  113. layer1[0].CalculateInput();
  114. layer1[0].CalculateOutput();
  115.  
  116. layer1[1].CalculateInput();
  117. layer1[1].CalculateOutput();
  118.  
  119. layer2[0].CalculateInput();
  120. layer2[0].CalculateOutput();
  121.  
  122. printf("%d\n", (char)layer2[0].outputValue);
  123. }
  124. return 0;
  125. }
Add Comment
Please, Sign In to add comment