Guest User

Untitled

a guest
Feb 23rd, 2016
179
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 21.79 KB | None | 0 0
  1.  
  2. #ifndef BRAIN_H
  3. #define BRAIN_H
  4. #pragma once
  5.  
  6. #include <vector>
  7. #include <cmath>
  8. using namespace std;
  9.  
  10. #define nn_eta 0.15
  11. #define nn_alpha 0.5
  12. #define nn_weight 0.1
  13.  
  14. namespace nn
  15. {
  16. namespace nt
  17. {
  18. enum
  19. {
  20. hidden = 1,
  21. input,
  22. output,
  23. bias,
  24. memory,
  25. };
  26. }
  27.  
  28. class neuron;
  29.  
  30. class coord
  31. {
  32. public:
  33. unsigned short x; // neuron
  34. unsigned short y; // layer
  35. coord(unsigned short _y, unsigned short _x)
  36. {
  37. x = _x;
  38. y = _y;
  39. }
  40. coord() {}
  41. ~coord() {}
  42. };
  43.  
  44. struct layers
  45. {
  46. vector<vector<neuron *>> map;
  47. };
  48.  
  49. class axon
  50. {
  51. public:
  52. neuron *x;
  53. double weight;
  54. double delta;
  55. axon( neuron *In ) {
  56. x = In;
  57. weight = ((double)rand() / double(RAND_MAX)) * nn_weight;
  58. delta = 0.0;
  59. }
  60.  
  61. axon() {
  62. x = 0;
  63. weight = ((double)rand() / double(RAND_MAX)) * nn_weight;
  64. delta = 0.0;
  65. }
  66. ~axon() {}
  67.  
  68. };
  69.  
  70. struct nHidden2
  71. {
  72. vector<axon> *axons;
  73. double x;
  74. double y;
  75. double xsum;
  76. neuron *mem;
  77. };
  78.  
  79. struct nInput2
  80. {
  81. vector<axon> *axons;
  82. double x;
  83. neuron *mem;
  84. };
  85.  
  86. struct nOutput2
  87. {
  88. double x;
  89. double y;
  90. double target;
  91. double xsum;
  92. neuron *mem;
  93. };
  94.  
  95. struct nMemory2
  96. {
  97. vector<axon> *axons;
  98. double x;
  99. bool target;
  100. neuron *mem;
  101. };
  102.  
  103. struct nHidden
  104. {
  105. vector<axon> *axons;
  106. double x;
  107. double y;
  108. double xsum;
  109. };
  110.  
  111. struct nInput
  112. {
  113. vector<axon> *axons;
  114. double x;
  115. };
  116.  
  117. struct nOutput
  118. {
  119. double x;
  120. double y;
  121. double target;
  122. double xsum;
  123. };
  124.  
  125. struct nBias
  126. {
  127. vector<axon> *axons;
  128. };
  129.  
  130. struct nMemory
  131. {
  132. vector<axon> *axons;
  133. double x;
  134. bool target;
  135. };
  136.  
  137.  
  138. class neuron
  139. {
  140. public:
  141. void *n;
  142. char type;
  143.  
  144. void free()
  145. {
  146. switch (type)
  147. {
  148. case nt::input:
  149. delete ((nInput *)n);
  150. break;
  151. case nt::hidden:
  152. delete ((nHidden *)n);
  153. break;
  154. case nt::output:
  155. delete ((nOutput *)n);
  156. break;
  157. case -nt::input:
  158. delete ((nInput2 *)n);
  159. break;
  160. case -nt::hidden:
  161. delete ((nHidden2 *)n);
  162. break;
  163. case -nt::output:
  164. delete ((nOutput2 *)n);
  165. break;
  166. case nt::bias:
  167. delete ((nBias *)n);
  168. break;
  169. case nt::memory:
  170. delete ((nMemory *)n);
  171. break;
  172. case -nt::memory:
  173. delete ((nMemory2 *)n);
  174. break;
  175. }
  176. type = 0;
  177. }
  178.  
  179. void setX(double in)
  180. {
  181.  
  182. switch (type)
  183. {
  184. case nt::input:
  185. ((nInput *)n)->x = in;
  186. break;
  187. case nt::hidden:
  188. ((nHidden *)n)->x = in;
  189. break;
  190. case nt::output:
  191. ((nOutput *)n)->x = in;
  192. break;
  193. case -nt::input:
  194. ((nInput2 *)n)->x = in;
  195. break;
  196. case -nt::hidden:
  197. ((nHidden2 *)n)->x = in;
  198. break;
  199. case -nt::output:
  200. ((nOutput2 *)n)->x = in;
  201. break;
  202. case nt::memory:
  203. ((nMemory *)n)->x = in;
  204. break;
  205. case -nt::memory:
  206. ((nMemory2 *)n)->x = in;
  207. break;
  208. }
  209. }
  210.  
  211. void SetTarget(double in) {
  212. if(type == -nt::output) ((nOutput2 *)n)->target = in;
  213. else if(type == nt::output) ((nOutput *)n)->target = in;
  214. }
  215.  
  216. double GetTarget() {
  217. if (type == nt::output) return ((nOutput *)n)->target;
  218. else if (type == -nt::output) return ((nOutput2 *)n)->target;
  219. return 0.0;
  220. }
  221.  
  222. void connectmem(neuron *with, bool target)
  223. {
  224. void *p;
  225. switch (type)
  226. {
  227. case nt::memory:
  228. ((nMemory *)n)->target = target;
  229. break;
  230. case -nt::memory:
  231. ((nMemory2 *)n)->target = target;
  232. break;
  233. default:
  234. return;
  235. }
  236.  
  237. switch (with->type)
  238. {
  239. case nt::bias:
  240. return;
  241. case nt::input:
  242. p = with->n;
  243. with->n = new nInput2;
  244. ((nInput2 *)with->n)->axons = ((nInput *)p)->axons;
  245. ((nInput2 *)with->n)->mem = this;
  246. ((nInput2 *)with->n)->x = ((nInput *)p)->x;
  247. with->type = -with->type;
  248. delete p;
  249. break;
  250. case nt::output:
  251. p = with->n;
  252. with->n = new nOutput2;
  253. ((nOutput2*)with->n)->mem = this;
  254. ((nOutput2*)with->n)->target = ((nOutput *)p)->target;
  255. ((nOutput2*)with->n)->x = ((nOutput *)p)->x;
  256. ((nOutput2*)with->n)->xsum = ((nOutput *)p)->xsum;
  257. ((nOutput2*)with->n)->y = ((nOutput *)p)->y;
  258. with->type = -with->type;
  259. delete p;
  260. break;
  261. case nt::hidden:
  262. p = with->n;
  263. with->n = new nHidden2;
  264. ((nHidden2 *)with->n)->mem = this;
  265. ((nHidden2 *)with->n)->axons = ((nHidden *)p)->axons;
  266. ((nHidden2 *)with->n)->x = ((nHidden *)p)->x;
  267. ((nHidden2 *)with->n)->xsum = ((nHidden *)p)->xsum;
  268. ((nHidden2 *)with->n)->y = ((nHidden *)p)->y;
  269. with->type = -with->type;
  270. delete p;
  271. break;
  272. case nt::memory:
  273.  
  274. p = with->n;
  275. with->n = new nMemory2;
  276. ((nMemory2 *)with->n)->mem = this;
  277. ((nMemory2 *)with->n)->target = ((nMemory *)p)->target;
  278. ((nMemory2 *)with->n)->x = ((nMemory *)p)->x;
  279. ((nMemory2 *)with->n)->axons = ((nMemory *)p)->axons;
  280. with->type = -with->type;
  281. delete p;
  282. break;
  283. case -nt::input:
  284. ((nInput2 *)with->n)->mem = this;
  285. break;
  286. case -nt::hidden:
  287. ((nHidden2 *)with->n)->mem = this;
  288. break;
  289. case -nt::output:
  290. ((nOutput2 *)with->n)->mem = this;
  291. break;
  292. case -nt::memory:
  293. ((nMemory2 *)with->n)->mem = this;
  294. break;
  295. }
  296. }
  297.  
  298. void connect(neuron *with)
  299. {
  300. switch (with->type)
  301. {
  302. case nt::bias:
  303. return;
  304. case nt::input:
  305. return;
  306. case nt::memory:
  307. return;
  308. case -nt::input:
  309. return;
  310. case -nt::memory:
  311. return;
  312. }
  313.  
  314. switch (type)
  315. {
  316. case nt::input:
  317. ((nInput*)n)->axons->push_back(axon(with));
  318. break;
  319. case nt::hidden:
  320. ((nHidden*)n)->axons->push_back(axon(with));
  321. break;
  322. case nt::bias:
  323. ((nBias*)n)->axons->push_back(axon(with));
  324. break;
  325. case nt::memory:
  326. ((nMemory*)n)->axons->push_back(axon(with));
  327. break;
  328. case -nt::input:
  329. ((nInput2*)n)->axons->push_back(axon(with));
  330. break;
  331. case -nt::hidden:
  332. ((nHidden2*)n)->axons->push_back(axon(with));
  333. break;
  334. case -nt::memory:
  335. ((nMemory2*)n)->axons->push_back(axon(with));
  336. break;
  337. }
  338.  
  339. }
  340.  
  341. void PrintNeuronName()
  342. {
  343. switch (type)
  344. {
  345. case nt::input:
  346. cout << "input";
  347. break;
  348. case nt::hidden:
  349. cout << "hidden";
  350. break;
  351. case nt::bias:
  352. cout << "bias";
  353. break;
  354. case nt::output:
  355. cout << "output";
  356. break;
  357. case -nt::output:
  358. cout << "-output";
  359. break;
  360. case nt::memory:
  361. cout << "memory";
  362. break;
  363. case -nt::input:
  364. cout << "-input";
  365. break;
  366. case -nt::hidden:
  367. cout << "-hidden";
  368. break;
  369. case -nt::memory:
  370. cout << "-memory";
  371. break;
  372. }
  373. }
  374.  
  375.  
  376. neuron(char Type)
  377. {
  378. n = 0;
  379. type = Type;
  380. switch (type)
  381. {
  382. case nt::input:
  383. n = new nInput;
  384. ((nInput *)n)->x = 0.0;
  385. ((nInput *)n)->axons = new vector<axon>();
  386. break;
  387. case nt::hidden:
  388. n = new nHidden;
  389. ((nHidden *)n)->x = 0.0;
  390. ((nHidden *)n)->xsum = 0.0;
  391. ((nHidden *)n)->y = 0.0;
  392. ((nHidden *)n)->axons = new vector<axon>();
  393. break;
  394. case nt::output:
  395. n = new nOutput;
  396. ((nOutput *)n)->target = 0.0;
  397. ((nOutput *)n)->x = 0.0;
  398. ((nOutput *)n)->xsum = 0.0;
  399. ((nOutput *)n)->y = 0.0;
  400. break;
  401. case nt::bias:
  402. n = new nBias;
  403. ((nBias *)n)->axons = new vector<axon>();
  404. break;
  405.  
  406. case nt::memory:
  407. n = new nMemory;
  408. ((nMemory*)n)->target = 0;
  409. ((nMemory*)n)->x = 0;
  410. ((nMemory *)n)->axons = new vector<axon>();
  411. break;
  412. }
  413.  
  414. }
  415.  
  416. void ActivateW()
  417. {
  418. switch (type)
  419. {
  420. case nt::input:
  421. for (int a = 0; a < ((nInput *)n)->axons->size(); a++)
  422. {
  423. (* ((nInput *)n)->axons )[a].delta = nn_eta * ((nInput *)n)->x * (*((nInput *)n)->axons)[a].x->GetY() + (*((nInput *)n)->axons)[a].delta * nn_alpha;
  424. (*((nInput *)n)->axons)[a].weight += (*((nInput *)n)->axons)[a].delta;
  425. }
  426. break;
  427. case nt::hidden:
  428. for (int a = 0; a < ((nHidden *)n)->axons->size(); a++)
  429. {
  430. (*((nHidden *)n)->axons)[a].delta = nn_eta * ((nHidden*)n)->x * (*((nHidden *)n)->axons)[a].x->GetY() + (*((nHidden *)n)->axons)[a].delta * nn_alpha;
  431. (*((nHidden *)n)->axons)[a].weight += (*((nHidden *)n)->axons)[a].delta;
  432. }
  433. break;
  434. case nt::output:
  435. return;
  436. case nt::bias:
  437. for (int a = 0; a < ((nBias *)n)->axons->size(); a++)
  438. {
  439. cout << "target:"; (*((nBias *)n)->axons)[a].x->PrintNeuronName(); cout << endl;
  440. cout << " gradient:" << (*((nBias *)n)->axons)[a].x->GetY() << " ";
  441. cout << "old delta:" << (*((nBias *)n)->axons)[a].delta << endl;
  442. cout << " x:" << GetX() << endl;
  443. (*((nBias *)n)->axons)[a].delta = GetX() * nn_eta * (*((nBias *)n)->axons)[a].x->GetY() + (*((nBias *)n)->axons)[a].delta * nn_alpha;
  444. cout << "detla:" << (*((nBias *)n)->axons)[a].delta << " ";
  445. (*((nBias *)n)->axons)[a].weight += (*((nBias *)n)->axons)[a].delta;
  446. cout << "weight:" << (*((nBias *)n)->axons)[a].weight << endl;
  447. }
  448. break;
  449.  
  450. case nt::memory:
  451. for (int a = 0; a < ((nMemory *)n)->axons->size(); a++)
  452. {
  453. (*((nMemory *)n)->axons)[a].delta = nn_eta * ((nMemory *)n)->x * (*((nMemory *)n)->axons)[a].x->GetY() + (*((nMemory *)n)->axons)[a].delta * nn_alpha;
  454. (*((nMemory *)n)->axons)[a].weight += (*((nMemory *)n)->axons)[a].delta;
  455. }
  456. break;
  457.  
  458.  
  459. case -nt::input:
  460. for (int a = 0; a < ((nInput2 *)n)->axons->size(); a++)
  461. {
  462. (*((nInput2 *)n)->axons)[a].delta = nn_eta * ((nInput2 *)n)->x * (*((nInput2 *)n)->axons)[a].x->GetY() + (*((nInput2 *)n)->axons)[a].delta * nn_alpha;
  463. (*((nInput2 *)n)->axons)[a].weight += (*((nInput2 *)n)->axons)[a].delta;
  464. }
  465. break;
  466. case -nt::hidden:
  467. for (int a = 0; a < ((nHidden2 *)n)->axons->size(); a++)
  468. {
  469. (*((nHidden2 *)n)->axons)[a].delta = nn_eta * ((nHidden2*)n)->x * (*((nHidden2 *)n)->axons)[a].x->GetY() + (*((nHidden2 *)n)->axons)[a].delta * nn_alpha;
  470. (*((nHidden2 *)n)->axons)[a].weight += (*((nHidden2 *)n)->axons)[a].delta;
  471. }
  472. break;
  473. case -nt::memory:
  474. for (int a = 0; a < ((nMemory2 *)n)->axons->size(); a++)
  475. {
  476. (*((nMemory2 *)n)->axons)[a].delta = nn_eta * ((nMemory2 *)n)->x * (*((nMemory2 *)n)->axons)[a].x->GetY() + (*((nMemory2 *)n)->axons)[a].delta * nn_alpha;
  477. (*((nMemory2 *)n)->axons)[a].weight += (*((nMemory2 *)n)->axons)[a].delta;
  478. }
  479. break;
  480.  
  481. }
  482. }
  483.  
  484. void ActivateY()
  485. {
  486. double sumY = 0.0;
  487. switch (type)
  488. {
  489. case nt::hidden:
  490. for (int a = 0; a < ((nHidden *)n)->axons->size(); a++) {
  491. sumY += (*((nHidden *)n)->axons)[a].weight * (*((nHidden *)n)->axons)[a].x->GetY();
  492. }
  493. ((nHidden *)n)->y = sumY * (1.0 - ((nHidden *)n)->x * ((nHidden *)n)->x);
  494. break;
  495. case nt::output:
  496. ((nOutput *)n)->y = (((nOutput *)n)->target - ((nOutput *)n)->x) * (1.0 - ((nOutput *)n)->x * ((nOutput *)n)->x);
  497. break;
  498. case -nt::hidden:
  499. for (int a = 0; a < ((nHidden2 *)n)->axons->size(); a++) {
  500. sumY += (*((nHidden2 *)n)->axons)[a].weight * (*((nHidden2 *)n)->axons)[a].x->GetY();
  501. }
  502. ((nHidden2 *)n)->y = sumY * (1.0 - ((nHidden2 *)n)->x * ((nHidden2 *)n)->x);
  503. break;
  504. case -nt::output:
  505. ((nOutput2 *)n)->y = (((nOutput2 *)n)->target - ((nOutput2 *)n)->x) * (1.0 - ((nOutput2 *)n)->x * ((nOutput2 *)n)->x);
  506. break;
  507. }
  508. }
  509.  
  510. void ActivateX()
  511. {
  512. switch (type)
  513. {
  514. case nt::input:
  515. for (int a = 0; a < ((nInput *)n)->axons->size(); a++) {
  516. (*((nInput *)n)->axons)[a].x->fireX(GetX() * (*((nInput *)n)->axons)[a].weight);
  517. }
  518. break;
  519. case nt::hidden:
  520. ((nHidden *)n)->x = tanh(((nHidden *)n)->xsum);
  521. ((nHidden *)n)->xsum = 0.0;
  522. for (int a = 0; a < ((nHidden *)n)->axons->size(); a++) {
  523. (*((nHidden *)n)->axons)[a].x->fireX(((nHidden *)n)->x * (*((nHidden *)n)->axons)[a].weight);
  524. }
  525. break;
  526. case nt::output:
  527. ((nOutput *)n)->x = tanh(((nOutput *)n)->xsum);
  528. ((nOutput *)n)->xsum = 0.0;
  529. return;
  530. case nt::bias:
  531. for (int a = 0; a < ((nBias *)n)->axons->size(); a++) {
  532. (*((nBias *)n)->axons)[a].x->fireX( (*((nBias *)n)->axons)[a].weight);
  533. }
  534. break;
  535.  
  536. case nt::memory:
  537. for (int a = 0; a < ((nMemory *)n)->axons->size(); a++) {
  538. (*((nMemory *)n)->axons)[a].x->fireX(((nMemory *)n)->x * (*((nMemory *)n)->axons)[a].weight);
  539. }
  540. break;
  541.  
  542. case -nt::input:
  543. memSet(((nInput2 *)n)->mem, GetX());
  544. for (int a = 0; a < ((nInput2 *)n)->axons->size(); a++) {
  545. (*((nInput2 *)n)->axons)[a].x->fireX(GetX() * (*((nInput2 *)n)->axons)[a].weight);
  546. }
  547. break;
  548. case -nt::hidden:
  549. memSet(((nHidden2 *)n)->mem, GetX());
  550. ((nHidden2 *)n)->x = tanh(((nHidden2 *)n)->xsum);
  551. ((nHidden2 *)n)->xsum = 0.0;
  552. for (int a = 0; a < ((nHidden2 *)n)->axons->size(); a++) {
  553. (*((nHidden2 *)n)->axons)[a].x->fireX(((nHidden2 *)n)->x * (*((nHidden2 *)n)->axons)[a].weight);
  554. }
  555. break;
  556. case -nt::output:
  557. if (((nOutput2 *)n)->mem->type == nt::memory || ((nOutput2 *)n)->mem->type == -nt::memory)
  558. {
  559. if (((nMemory2 *)(((nOutput2 *)n)->mem->n))->target) memSet(((nOutput2 *)n)->mem, GetX_TargetEdition());
  560. else memSet(((nOutput2 *)n)->mem, GetX());
  561. }
  562.  
  563. ((nOutput2 *)n)->x = tanh(((nOutput2 *)n)->xsum);
  564. ((nOutput2 *)n)->xsum = 0.0;
  565. return;
  566.  
  567. case -nt::memory:
  568. for (int a = 0; a < ((nMemory2 *)n)->axons->size(); a++) {
  569. (*((nMemory2 *)n)->axons)[a].x->fireX(((nMemory2 *)n)->x * (*((nMemory2 *)n)->axons)[a].weight);
  570. }
  571. break;
  572.  
  573. default:
  574. return;
  575. }
  576.  
  577. }
  578.  
  579. void memSet(neuron * target, double x)
  580. {
  581. if (!target) return;
  582. switch (target->type)
  583. {
  584. case nt::memory:
  585. ((nMemory *)target->n)->x = x;
  586. break;
  587. case -nt::memory:
  588. target->memSet(((nMemory2 *)target->n)->mem, target->GetX());
  589. ((nMemory2 *)target->n)->x = x;
  590. break;
  591.  
  592. }
  593. }
  594.  
  595. vector <axon> *GetAxons()
  596. {
  597. switch (type)
  598. {
  599. case nt::input:
  600. return ((nInput *)n)->axons;
  601. case nt::bias:
  602. return ((nBias *)n)->axons;
  603. case nt::hidden:
  604. return ((nHidden *)n)->axons;
  605. case nt::memory:
  606. return ((nMemory *)n)->axons;
  607. case -nt::input:
  608. return ((nInput2 *)n)->axons;
  609. case -nt::hidden:
  610. return ((nHidden2 *)n)->axons;
  611. case -nt::memory:
  612. return ((nMemory2 *)n)->axons;
  613. }
  614. return 0;
  615. }
  616.  
  617. double GetX()
  618. {
  619. double x;
  620. switch (type)
  621. {
  622. case nt::input:
  623. return ((nInput *)n)->x;
  624. case nt::hidden:
  625. return ((nHidden *)n)->x;
  626. case nt::output:
  627. return ((nOutput *)n)->x;
  628. case nt::memory:
  629. return ((nMemory *)n)->x;
  630. case -nt::input:
  631. return ((nInput2 *)n)->x;
  632. case -nt::hidden:
  633. return ((nHidden2 *)n)->x;
  634. case -nt::output:
  635. return ((nOutput2 *)n)->x;
  636. case -nt::memory:
  637. return ((nMemory2 *)n)->x;
  638. }
  639. return 1.0;
  640. }
  641.  
  642. double GetX_TargetEdition()
  643. {
  644. double x;
  645. switch (type)
  646. {
  647. case nt::input:
  648. return ((nInput *)n)->x;
  649. case nt::hidden:
  650. return ((nHidden *)n)->x;
  651. case nt::output:
  652. return ((nOutput *)n)->target;
  653. case nt::memory:
  654. return ((nMemory *)n)->x;
  655. case -nt::input:
  656. return ((nInput2 *)n)->x;
  657. case -nt::hidden:
  658. return ((nHidden2 *)n)->x;
  659. case -nt::output:
  660. return ((nOutput2 *)n)->target;
  661. case -nt::memory:
  662. return ((nMemory2 *)n)->x;
  663. }
  664. return 1.0;
  665. }
  666.  
  667. double GetY()
  668. {
  669. switch (type)
  670. {
  671. case nt::hidden:
  672. return ((nHidden *)n)->y;
  673. case nt::output:
  674. return ((nOutput *)n)->y;
  675. case -nt::hidden:
  676. return ((nHidden2 *)n)->y;
  677. case -nt::output:
  678. return ((nOutput2 *)n)->y;
  679. }
  680. return 0.0;
  681. }
  682.  
  683. void fireX(double signal)
  684. {
  685. switch (type)
  686. {
  687. case nt::hidden:
  688. ((nHidden *)n)->xsum += signal;
  689. break;
  690. case nt::output:
  691. ((nOutput*)n)->xsum += signal;
  692. break;
  693. case -nt::hidden:
  694. ((nHidden2 *)n)->xsum += signal;
  695. break;
  696. case -nt::output:
  697. ((nOutput2*)n)->xsum += signal;
  698. break;
  699. }
  700. }
  701.  
  702. neuron() {}
  703. ~neuron() {}
  704. };
  705.  
  706. class brain
  707. {
  708. public:
  709. layers layers;
  710. void PrintFull()
  711. {
  712. for (int y = 0; y < layers.map.size(); y++)
  713. {
  714. for (int x = 0; x < layers.map[y].size(); x++)
  715. {
  716. cout << "neuron" << x << ":";
  717. switch (layers.map[y][x]->type)
  718. {
  719. case nt::bias:
  720. cout << "bias";
  721. break;
  722. case nt::input:
  723. cout << "input";
  724. break;
  725. case nt::hidden:
  726. cout << "hidden";
  727. break;
  728. case nt::output:
  729. cout << "output";
  730. break;
  731. case nt::memory:
  732. cout << "memory";
  733. break;
  734. case -nt::input:
  735. cout << "-input";
  736. break;
  737. case -nt::hidden:
  738. cout << "-hidden";
  739. break;
  740. case -nt::output:
  741. cout << "-output";
  742. break;
  743. case -nt::memory:
  744. cout << "-memory";
  745. break;
  746. }
  747. cout << " Y:" << layers.map[y][x]->GetY() << " X:" << layers.map[y][x]->GetX() << endl;
  748. if (layers.map[y][x]->GetAxons())
  749. {
  750. for (int a = 0; a < layers.map[y][x]->GetAxons()->size(); a++)
  751. {
  752. cout << " Delta:" << (*layers.map[y][x]->GetAxons())[a].delta << " weight:" << (*layers.map[y][x]->GetAxons())[a].weight;
  753. cout << " TYPE:";
  754. (*layers.map[y][x]->GetAxons())[a].x->PrintNeuronName();
  755. cout << endl;
  756. }
  757. }
  758. }
  759. cout << endl << endl;
  760. }
  761. }
  762.  
  763. void PrintTypes()
  764. {
  765. for (int y = 0; y < layers.map.size(); y++)
  766. {
  767. cout << "layer:" << y << endl;
  768. for (int x = 0; x < layers.map[y].size(); x++)
  769. {
  770. cout << "neuron" << x << ":";
  771. switch (layers.map[y][x]->type)
  772. {
  773. case nt::bias:
  774. cout << "bias";
  775. break;
  776. case nt::input:
  777. cout << "input";
  778. break;
  779. case nt::hidden:
  780. cout << "hidden";
  781. break;
  782. case nt::output:
  783. cout << "output";
  784. break;
  785. case nt::memory:
  786. cout << "memory";
  787. break;
  788. case -nt::input:
  789. cout << "-input";
  790. break;
  791. case -nt::hidden:
  792. cout << "-hidden";
  793. break;
  794. case -nt::output:
  795. cout << "-output";
  796. break;
  797. case -nt::memory:
  798. cout << "-memory";
  799. break;
  800. }
  801. cout << endl;
  802. }
  803. cout << endl << endl;
  804. }
  805. }
  806.  
  807.  
  808. double getTarget(coord n)
  809. {
  810. return GetNeuron(n)->GetTarget();
  811. }
  812.  
  813. double getX(coord n){
  814. return GetNeuron(n)->GetX();
  815. }
  816.  
  817. void setX(coord n, double in)
  818. {
  819. switch (GetNeuron(n)->type)
  820. {
  821. case nt::input:
  822. GetNeuron(n)->setX(in);
  823. break;
  824. case nt::output:
  825. GetNeuron(n)->SetTarget(in);
  826. break;
  827. default:
  828. GetNeuron(n)->setX(in);
  829. break;
  830. }
  831. }
  832.  
  833. neuron *GetNeuron(coord coords)
  834. {
  835. return layers.map[coords.y][coords.x];
  836. }
  837.  
  838. void connectlayers(unsigned short layerwhat, unsigned short layerwith)
  839. {
  840. for (int a = 0; a < layers.map[layerwhat].size(); a++)
  841. {
  842. connect(layers.map[layerwhat][a], layerwith);
  843. }
  844. }
  845.  
  846. void connect(neuron *what, unsigned short layer)
  847. {
  848. for (int a = 0; a < layers.map[layer].size(); a++)
  849. {
  850. connect(what, layers.map[layer][a]);
  851. }
  852. }
  853.  
  854. void connect(coord what, unsigned short layer)
  855. {
  856. for (int a = 0; a < layers.map[layer].size(); a++)
  857. {
  858. connect(what, layers.map[layer][a]);
  859. }
  860. }
  861.  
  862. void connectmemTarget(coord what, unsigned short ywith, unsigned short xwith)
  863. {
  864. coord b;
  865. b.x = xwith;
  866. b.y = ywith;
  867. connectmem(what, b, 1);
  868. }
  869.  
  870. void connectmemTarget(unsigned short ywhat, unsigned short xwhat, coord with)
  871. {
  872. coord a;
  873. a.x = xwhat;
  874. a.y = ywhat;
  875. connectmem(a, with, 1);
  876. }
  877.  
  878. void connectmem(coord what, unsigned short ywith, unsigned short xwith)
  879. {
  880. coord b;
  881. b.x = xwith;
  882. b.y = ywith;
  883. connectmem(what, b);
  884. }
  885.  
  886. void connectmem(unsigned short ywhat, unsigned short xwhat, coord with)
  887. {
  888. coord a;
  889. a.x = xwhat;
  890. a.y = ywhat;
  891. connectmem(a, with);
  892. }
  893.  
  894. void connect(coord what, unsigned short ywith, unsigned short xwith)
  895. {
  896. coord b;
  897. b.x = xwith;
  898. b.y = ywith;
  899. GetNeuron(what)->connect(GetNeuron(b));
  900. }
  901.  
  902. void connect(unsigned short ywhat, unsigned short xwhat, coord with)
  903. {
  904. coord a;
  905. a.x = xwhat;
  906. a.y = ywhat;
  907. GetNeuron(a)->connect(GetNeuron(with));
  908. }
  909.  
  910. void connect(unsigned short ywhat, unsigned short xwhat, unsigned short ywith, unsigned short xwith)
  911. {
  912. coord a;
  913. coord b;
  914. a.x = xwhat;
  915. a.y = ywhat;
  916. b.x = xwith;
  917. b.y = ywith;
  918. GetNeuron(a)->connect(GetNeuron(b));
  919. }
  920.  
  921. void connect( coord what, coord with)
  922. {
  923. GetNeuron(what)->connect(GetNeuron(with));
  924. }
  925.  
  926. void connectmem(coord what, coord with, bool target = false)
  927. {
  928.  
  929. GetNeuron(what)->connectmem(GetNeuron(with), target);
  930. }
  931.  
  932. void connect(neuron *what, neuron *with)
  933. {
  934. what->connect(with);
  935. }
  936.  
  937. void connect(coord what, neuron *with)
  938. {
  939. GetNeuron(what)->connect(with);
  940. }
  941.  
  942. void AddNeuron(unsigned short layer, char type, unsigned short count)
  943. {
  944. for (int a = 0; a < count; a++) AddNeuron(layer, type);
  945. }
  946.  
  947. coord AddNeuron(unsigned short layer, char type)
  948. {
  949. if (layer >= layers.map.size())
  950. {
  951. unsigned short needed = (layer - layers.map.size()) + 1;
  952. for (int a = 0; a < needed; a++) layers.map.push_back(vector<neuron *>());
  953. }
  954.  
  955. layers.map[layer].push_back(new neuron(type));
  956. coord out( layer, layers.map[layer].size() - 1 );
  957. return out;
  958. }
  959.  
  960. void ActivateX()
  961. {
  962. for (int y = 0; y < layers.map.size(); y++) {
  963. for (int x = 0; x < layers.map[y].size(); x++) {
  964. layers.map[y][x]->ActivateX();
  965. }
  966. }
  967. }
  968.  
  969. void ActivateY()
  970. {
  971. for (int y = layers.map.size() - 1; y >= 0; y--) {
  972. for (int x = 0; x < layers.map[y].size(); x++) {
  973. layers.map[y][x]->ActivateY();
  974. }
  975. }
  976. // PrintFull();
  977. for (int y = layers.map.size() - 1; y >= 0; y--) {
  978. for (int x = 0; x < layers.map[y].size(); x++) {
  979. layers.map[y][x]->ActivateW();
  980. }
  981. }
  982. }
  983.  
  984. brain() {}
  985. ~brain() {}
  986. };
  987.  
  988.  
  989.  
  990. }
  991. #endif
Advertisement
Add Comment
Please, Sign In to add comment