Advertisement
Guest User

Untitled

a guest
Aug 4th, 2016
244
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 28.06 KB | None | 0 0
  1. test and training data loaded
  2. ('train input: ', 100)
  3. ('train output: ', 100)
  4. ('trains_length: ', 100)
  5. ('sequence: ', array([[ 1.50000000e+01],
  6. [ 1.69000000e+02],
  7. [ 1.97500000e+03],
  8. [ 2.30450000e+04],
  9. [ 2.68983000e+05],
  10. [ 3.13952900e+06],
  11. [ 3.66442430e+07],
  12. [ 4.27707523e+08],
  13. [ 4.99215480e+09],
  14. [ 5.82678772e+10],
  15. [ 6.80096202e+11],
  16. [ 7.93800746e+12],
  17. [ 9.26515428e+13],
  18. [ 1.08141853e+15],
  19. [ 1.26221972e+16],
  20. [ 1.47324886e+17],
  21. [ 0.00000000e+00],
  22. [ 0.00000000e+00],
  23. [ 0.00000000e+00],
  24. [ 0.00000000e+00],
  25. [ 0.00000000e+00],
  26. [ 0.00000000e+00],
  27. [ 0.00000000e+00],
  28. [ 0.00000000e+00],
  29. [ 0.00000000e+00],
  30. [ 0.00000000e+00],
  31. [ 0.00000000e+00],
  32. [ 0.00000000e+00],
  33. [ 0.00000000e+00],
  34. [ 0.00000000e+00],
  35. [ 0.00000000e+00],
  36. [ 0.00000000e+00],
  37. [ 0.00000000e+00],
  38. [ 0.00000000e+00],
  39. [ 0.00000000e+00],
  40. [ 0.00000000e+00],
  41. [ 0.00000000e+00],
  42. [ 0.00000000e+00],
  43. [ 0.00000000e+00],
  44. [ 0.00000000e+00],
  45. [ 0.00000000e+00],
  46. [ 0.00000000e+00],
  47. [ 0.00000000e+00],
  48. [ 0.00000000e+00],
  49. [ 0.00000000e+00],
  50. [ 0.00000000e+00],
  51. [ 0.00000000e+00],
  52. [ 0.00000000e+00],
  53. [ 0.00000000e+00],
  54. [ 0.00000000e+00],
  55. [ 0.00000000e+00],
  56. [ 0.00000000e+00],
  57. [ 0.00000000e+00],
  58. [ 0.00000000e+00],
  59. [ 0.00000000e+00],
  60. [ 0.00000000e+00],
  61. [ 0.00000000e+00],
  62. [ 0.00000000e+00],
  63. [ 0.00000000e+00],
  64. [ 0.00000000e+00],
  65. [ 0.00000000e+00],
  66. [ 0.00000000e+00],
  67. [ 0.00000000e+00],
  68. [ 0.00000000e+00],
  69. [ 0.00000000e+00],
  70. [ 0.00000000e+00],
  71. [ 0.00000000e+00],
  72. [ 0.00000000e+00],
  73. [ 0.00000000e+00],
  74. [ 0.00000000e+00],
  75. [ 0.00000000e+00],
  76. [ 0.00000000e+00],
  77. [ 0.00000000e+00],
  78. [ 0.00000000e+00],
  79. [ 0.00000000e+00],
  80. [ 0.00000000e+00],
  81. [ 0.00000000e+00],
  82. [ 0.00000000e+00],
  83. [ 0.00000000e+00],
  84. [ 0.00000000e+00],
  85. [ 0.00000000e+00],
  86. [ 0.00000000e+00],
  87. [ 0.00000000e+00],
  88. [ 0.00000000e+00],
  89. [ 0.00000000e+00],
  90. [ 0.00000000e+00],
  91. [ 0.00000000e+00],
  92. [ 0.00000000e+00],
  93. [ 0.00000000e+00],
  94. [ 0.00000000e+00],
  95. [ 0.00000000e+00],
  96. [ 0.00000000e+00],
  97. [ 0.00000000e+00],
  98. [ 0.00000000e+00],
  99. [ 0.00000000e+00],
  100. [ 0.00000000e+00],
  101. [ 0.00000000e+00],
  102. [ 0.00000000e+00],
  103. [ 0.00000000e+00],
  104. [ 0.00000000e+00],
  105. [ 0.00000000e+00],
  106. [ 0.00000000e+00],
  107. [ 0.00000000e+00],
  108. [ 0.00000000e+00],
  109. [ 0.00000000e+00],
  110. [ 0.00000000e+00],
  111. [ 0.00000000e+00],
  112. [ 0.00000000e+00],
  113. [ 0.00000000e+00],
  114. [ 0.00000000e+00],
  115. [ 0.00000000e+00],
  116. [ 0.00000000e+00],
  117. [ 0.00000000e+00],
  118. [ 0.00000000e+00],
  119. [ 0.00000000e+00],
  120. [ 0.00000000e+00],
  121. [ 0.00000000e+00],
  122. [ 0.00000000e+00],
  123. [ 0.00000000e+00],
  124. [ 0.00000000e+00],
  125. [ 0.00000000e+00],
  126. [ 0.00000000e+00],
  127. [ 0.00000000e+00],
  128. [ 0.00000000e+00],
  129. [ 0.00000000e+00],
  130. [ 0.00000000e+00],
  131. [ 0.00000000e+00],
  132. [ 0.00000000e+00],
  133. [ 0.00000000e+00],
  134. [ 0.00000000e+00],
  135. [ 0.00000000e+00],
  136. [ 0.00000000e+00],
  137. [ 0.00000000e+00],
  138. [ 0.00000000e+00],
  139. [ 0.00000000e+00],
  140. [ 0.00000000e+00],
  141. [ 0.00000000e+00],
  142. [ 0.00000000e+00],
  143. [ 0.00000000e+00],
  144. [ 0.00000000e+00],
  145. [ 0.00000000e+00],
  146. [ 0.00000000e+00],
  147. [ 0.00000000e+00],
  148. [ 0.00000000e+00],
  149. [ 0.00000000e+00],
  150. [ 0.00000000e+00],
  151. [ 0.00000000e+00],
  152. [ 0.00000000e+00],
  153. [ 0.00000000e+00],
  154. [ 0.00000000e+00],
  155. [ 0.00000000e+00],
  156. [ 0.00000000e+00],
  157. [ 0.00000000e+00],
  158. [ 0.00000000e+00],
  159. [ 0.00000000e+00],
  160. [ 0.00000000e+00],
  161. [ 0.00000000e+00],
  162. [ 0.00000000e+00],
  163. [ 0.00000000e+00],
  164. [ 0.00000000e+00],
  165. [ 0.00000000e+00],
  166. [ 0.00000000e+00],
  167. [ 0.00000000e+00],
  168. [ 0.00000000e+00],
  169. [ 0.00000000e+00],
  170. [ 0.00000000e+00],
  171. [ 0.00000000e+00],
  172. [ 0.00000000e+00],
  173. [ 0.00000000e+00],
  174. [ 0.00000000e+00],
  175. [ 0.00000000e+00],
  176. [ 0.00000000e+00],
  177. [ 0.00000000e+00],
  178. [ 0.00000000e+00],
  179. [ 0.00000000e+00],
  180. [ 0.00000000e+00],
  181. [ 0.00000000e+00],
  182. [ 0.00000000e+00],
  183. [ 0.00000000e+00],
  184. [ 0.00000000e+00],
  185. [ 0.00000000e+00],
  186. [ 0.00000000e+00],
  187. [ 0.00000000e+00],
  188. [ 0.00000000e+00],
  189. [ 0.00000000e+00],
  190. [ 0.00000000e+00],
  191. [ 0.00000000e+00],
  192. [ 0.00000000e+00],
  193. [ 0.00000000e+00],
  194. [ 0.00000000e+00],
  195. [ 0.00000000e+00],
  196. [ 0.00000000e+00],
  197. [ 0.00000000e+00],
  198. [ 0.00000000e+00],
  199. [ 0.00000000e+00],
  200. [ 0.00000000e+00],
  201. [ 0.00000000e+00],
  202. [ 0.00000000e+00],
  203. [ 0.00000000e+00]]))
  204. ('sequence next: ', array([ 1.71955974e+18]))
  205. ('bias: ', TensorShape([Dimension(10)]))
  206. ('weight: ', TensorShape([Dimension(10), Dimension(24), Dimension(1)]))
  207. ('targets ', TensorShape([Dimension(10), Dimension(1)]))
  208. ('RNN input ', <type 'list'>)
  209. ('RNN input len()', 10)
  210. ('RNN input[0] ', TensorShape([Dimension(1), Dimension(199)]))
  211. ('prediction ', TensorShape([Dimension(10), Dimension(1), Dimension(10)]))
  212. ('train input: ', (100, 199, 1))
  213. ('train output: ', (100, 1))
  214. ('test input: ', (100, 199, 1))
  215. ('test output: ', (100, 1))
  216. ('count ',)
  217. ('eval w: ', array([[[ 0.6780529 ],
  218. [-0.62772405],
  219. [ 1.43263149],
  220. [-0.06046852],
  221. [-0.40063885],
  222. [-0.63365704],
  223. [ 0.32900822],
  224. [-1.56359458],
  225. [-1.4543438 ],
  226. [ 0.29543656],
  227. [ 0.11557844],
  228. [ 0.06433676],
  229. [-0.93673533],
  230. [ 0.37765288],
  231. [ 1.85943401],
  232. [ 0.69051743],
  233. [ 1.48547053],
  234. [ 1.21398771],
  235. [ 1.16389644],
  236. [-0.47306699],
  237. [-1.04250693],
  238. [ 0.30758113],
  239. [ 1.44136548],
  240. [-0.16404566]],
  241.  
  242. [[ 1.09186888],
  243. [ 0.61953568],
  244. [ 0.47149807],
  245. [ 1.05650818],
  246. [ 0.1497826 ],
  247. [ 0.21729505],
  248. [-0.23989114],
  249. [ 0.49522603],
  250. [-0.05506746],
  251. [-0.76100421],
  252. [-0.03572866],
  253. [-0.77177012],
  254. [ 0.14625576],
  255. [ 0.26402348],
  256. [ 0.15476602],
  257. [-0.45487222],
  258. [-0.81681985],
  259. [ 0.95228022],
  260. [ 0.75505209],
  261. [ 0.24045894],
  262. [-1.99094367],
  263. [ 0.86331189],
  264. [-1.25572407],
  265. [-0.15769085]],
  266.  
  267. [[-0.2170978 ],
  268. [ 0.99359363],
  269. [ 0.08329104],
  270. [-0.44589433],
  271. [-0.0074301 ],
  272. [ 0.43931434],
  273. [-0.89878929],
  274. [-0.00597486],
  275. [-1.0550462 ],
  276. [ 1.26670015],
  277. [-1.49008381],
  278. [ 0.25823355],
  279. [ 0.91044253],
  280. [ 1.30052471],
  281. [-0.07370567],
  282. [-1.59677112],
  283. [-0.24343117],
  284. [ 0.55244446],
  285. [-0.47574499],
  286. [-0.75210196],
  287. [-1.11500525],
  288. [ 1.14440048],
  289. [-0.16253868],
  290. [ 0.81475759]],
  291.  
  292. [[-0.52553755],
  293. [-0.25045747],
  294. [ 0.51028144],
  295. [ 0.85520238],
  296. [ 0.24492969],
  297. [ 0.24745861],
  298. [-0.49923328],
  299. [-0.32793853],
  300. [ 0.4918271 ],
  301. [ 0.94357324],
  302. [-1.43645036],
  303. [ 1.28429878],
  304. [-0.02717163],
  305. [ 0.45429832],
  306. [-0.3894603 ],
  307. [-0.84764457],
  308. [ 0.58401638],
  309. [ 0.63909745],
  310. [ 0.91186684],
  311. [-0.26117355],
  312. [-0.35472623],
  313. [ 0.30447119],
  314. [-0.28932446],
  315. [-0.29642314]],
  316.  
  317. [[-0.62272668],
  318. [ 0.17077157],
  319. [-1.15703702],
  320. [-0.34692514],
  321. [-0.49770269],
  322. [ 1.3127079 ],
  323. [ 0.33837658],
  324. [ 0.11421093],
  325. [ 0.09630407],
  326. [-0.61225772],
  327. [-0.42042875],
  328. [-0.66095632],
  329. [ 1.74599814],
  330. [-0.39623511],
  331. [-1.1621114 ],
  332. [ 0.3944903 ],
  333. [-0.52521384],
  334. [ 1.85385013],
  335. [-0.20027749],
  336. [-0.12721874],
  337. [ 0.30783302],
  338. [ 0.66958702],
  339. [ 0.80242592],
  340. [-0.19398098]],
  341.  
  342. [[ 0.68718916],
  343. [-0.89191109],
  344. [-1.01574671],
  345. [ 0.46177045],
  346. [-0.76428401],
  347. [-1.6139276 ],
  348. [ 0.25495371],
  349. [-0.66821688],
  350. [ 0.92095137],
  351. [-1.24905705],
  352. [ 0.23799349],
  353. [ 0.11558618],
  354. [ 0.55710983],
  355. [ 0.28203514],
  356. [ 1.57006788],
  357. [ 0.30440497],
  358. [-0.02641625],
  359. [-0.10577686],
  360. [-1.97824872],
  361. [ 0.57219768],
  362. [ 0.31783292],
  363. [-0.68342268],
  364. [ 0.39676824],
  365. [-0.61900085]],
  366.  
  367. [[-1.51109111],
  368. [ 1.20024538],
  369. [-1.06665683],
  370. [-0.57676172],
  371. [ 0.48849913],
  372. [ 1.16882837],
  373. [-0.4061963 ],
  374. [-0.10978858],
  375. [ 0.01942817],
  376. [-1.05497432],
  377. [-1.13054359],
  378. [-1.91514218],
  379. [-0.74483579],
  380. [ 0.97409779],
  381. [ 1.83105016],
  382. [-1.1888237 ],
  383. [ 0.06629396],
  384. [-0.32451561],
  385. [ 0.66307342],
  386. [ 1.58056164],
  387. [-0.59118527],
  388. [ 0.48737472],
  389. [-0.45447165],
  390. [ 1.16384733]],
  391.  
  392. [[-0.19243515],
  393. [ 0.8973335 ],
  394. [-0.24837333],
  395. [-0.57183516],
  396. [-0.43124622],
  397. [-0.30850297],
  398. [ 0.27571136],
  399. [ 0.1440938 ],
  400. [ 0.87737834],
  401. [-0.46938181],
  402. [ 1.76197755],
  403. [-1.02972758],
  404. [ 0.97680008],
  405. [ 0.57078886],
  406. [ 1.75641572],
  407. [ 1.71081495],
  408. [-0.50339961],
  409. [ 0.47870159],
  410. [-0.12870139],
  411. [-0.36684793],
  412. [ 1.26416111],
  413. [-0.58092874],
  414. [-1.2828002 ],
  415. [-0.10664451]],
  416.  
  417. [[ 0.1369158 ],
  418. [-0.20918509],
  419. [ 0.41935548],
  420. [-0.34970853],
  421. [-1.23928714],
  422. [ 0.68425214],
  423. [ 0.44757187],
  424. [-0.51022202],
  425. [ 0.83621401],
  426. [ 0.69877982],
  427. [-0.19111311],
  428. [-0.23446049],
  429. [-0.25964534],
  430. [ 0.58278507],
  431. [-0.48400652],
  432. [-1.74262011],
  433. [-0.29251724],
  434. [ 0.44853559],
  435. [-0.1703219 ],
  436. [-0.50700468],
  437. [ 1.12362754],
  438. [-0.37175286],
  439. [ 0.73058009],
  440. [ 0.85413861]],
  441.  
  442. [[ 0.40156969],
  443. [ 1.70107245],
  444. [ 0.39195645],
  445. [-1.35192084],
  446. [-0.36767882],
  447. [-1.11448956],
  448. [-1.23878682],
  449. [ 1.13503706],
  450. [ 0.01929384],
  451. [-0.3215695 ],
  452. [-0.20471331],
  453. [ 1.01068878],
  454. [-1.00549078],
  455. [ 0.42239895],
  456. [-1.01143742],
  457. [-0.58622301],
  458. [ 1.82871997],
  459. [ 1.44895315],
  460. [ 0.31908533],
  461. [ 0.56530863],
  462. [ 0.82414061],
  463. [ 1.25701332],
  464. [-0.40722558],
  465. [-0.94708872]]], dtype=float32))
  466. ('count ',)
  467. ('result: ', <tf.Tensor 'OutputData:0' shape=(10, 1) dtype=float32>)
  468. ('result len: ', TensorShape([Dimension(10), Dimension(1)]))
  469. ('prediction: ', <tf.Tensor 'Print_3:0' shape=(10, 1, 10) dtype=float32>)
  470. ('prediction len: ', TensorShape([Dimension(10), Dimension(1), Dimension(10)]))
  471. Validation cost: 0.0, on Epoch 0
  472. ('eval w: ', array([[[ -1.76794586e+36],
  473. [ -6.27724051e-01],
  474. [ 1.43263149e+00],
  475. [ -6.04685172e-02],
  476. [ -4.00638849e-01],
  477. [ -1.76794586e+36],
  478. [ 3.29008222e-01],
  479. [ -1.56359458e+00],
  480. [ -1.45434380e+00],
  481. [ -1.76794586e+36],
  482. [ 1.15578435e-01],
  483. [ 6.43367618e-02],
  484. [ -1.76794586e+36],
  485. [ 3.77652884e-01],
  486. [ 1.85943401e+00],
  487. [ 6.90517426e-01],
  488. [ 1.48547053e+00],
  489. [ 1.03630001e+36],
  490. [ 7.56502506e+22],
  491. [ -4.73066986e-01],
  492. [ -1.76794586e+36],
  493. [ 1.76794586e+36],
  494. [ 1.44136548e+00],
  495. [ -1.64045662e-01]],
  496.  
  497. [[ -2.23786980e+36],
  498. [ 6.19535685e-01],
  499. [ 4.71498072e-01],
  500. [ 1.05650818e+00],
  501. [ 1.49782598e-01],
  502. [ -2.23786980e+36],
  503. [ -2.39891142e-01],
  504. [ 1.76794586e+36],
  505. [ -5.50674610e-02],
  506. [ -1.76794586e+36],
  507. [ -3.57286558e-02],
  508. [ -7.71770120e-01],
  509. [ -2.23786980e+36],
  510. [ 2.64023483e-01],
  511. [ 1.54766023e-01],
  512. [ -4.54872221e-01],
  513. [ -8.16819847e-01],
  514. [ 9.52280223e-01],
  515. [ 1.76794586e+36],
  516. [ 2.40458935e-01],
  517. [ 1.76794586e+36],
  518. [ 8.63311887e-01],
  519. [ 1.76794586e+36],
  520. [ -1.76794586e+36]],
  521.  
  522. [[ -2.17097804e-01],
  523. [ 1.76794586e+36],
  524. [ -1.76794586e+36],
  525. [ -4.45894331e-01],
  526. [ -7.43009849e-03],
  527. [ -2.23786980e+36],
  528. [ 1.76794586e+36],
  529. [ -5.97485714e-03],
  530. [ -1.76794586e+36],
  531. [ 1.26670015e+00],
  532. [ -1.76794586e+36],
  533. [ 2.58233547e-01],
  534. [ -2.23786980e+36],
  535. [ 1.30052471e+00],
  536. [ -7.37056658e-02],
  537. [ -1.59677112e+00],
  538. [ -2.43431166e-01],
  539. [ 5.52444458e-01],
  540. [ -4.75744992e-01],
  541. [ -7.52101958e-01],
  542. [ -1.11500525e+00],
  543. [ 1.14440048e+00],
  544. [ 1.76794586e+36],
  545. [ 8.14757586e-01]],
  546.  
  547. [[ -5.25537550e-01],
  548. [ 2.23786980e+36],
  549. [ -2.23786980e+36],
  550. [ 8.55202377e-01],
  551. [ 2.44929686e-01],
  552. [ -1.76794586e+36],
  553. [ -4.99233276e-01],
  554. [ 2.23786980e+36],
  555. [ 4.91827101e-01],
  556. [ 1.76794586e+36],
  557. [ -1.76794586e+36],
  558. [ 1.28429878e+00],
  559. [ -2.23786980e+36],
  560. [ 4.54298317e-01],
  561. [ -3.89460295e-01],
  562. [ 1.76794586e+36],
  563. [ 5.84016383e-01],
  564. [ 6.39097452e-01],
  565. [ 9.11866844e-01],
  566. [ -2.61173546e-01],
  567. [ -3.54726225e-01],
  568. [ 3.04471195e-01],
  569. [ -2.89324462e-01],
  570. [ 1.76794586e+36]],
  571.  
  572. [[ 4.44404945e+35],
  573. [ -1.38045421e+32],
  574. [ -1.71466857e+36],
  575. [ -6.72560396e+31],
  576. [ -4.00305049e+32],
  577. [ -1.21160016e+36],
  578. [ -8.43221135e+35],
  579. [ 2.24263356e+35],
  580. [ -8.71221892e+34],
  581. [ -1.75529107e+36],
  582. [ 7.43133385e+35],
  583. [ 1.02741544e+36],
  584. [ -2.22468862e+36],
  585. [ -4.22656715e+32],
  586. [ 3.70073293e+33],
  587. [ 4.92692965e+35],
  588. [ -6.96827931e+35],
  589. [ 2.21541924e+36],
  590. [ 2.11090170e+32],
  591. [ 8.80682824e+34],
  592. [ -1.37917438e+36],
  593. [ 1.29158147e+35],
  594. [ 3.69604256e+34],
  595. [ -1.55385141e+32]],
  596.  
  597. [[ 1.93985323e+36],
  598. [ -8.91911089e-01],
  599. [ -1.01574671e+00],
  600. [ -7.77630527e+32],
  601. [ -7.64284015e-01],
  602. [ -1.76794586e+36],
  603. [ 2.54953712e-01],
  604. [ -6.68216884e-01],
  605. [ 1.76794586e+36],
  606. [ -1.24905705e+00],
  607. [ 2.37993494e-01],
  608. [ 1.02776262e+36],
  609. [ 5.57109833e-01],
  610. [ 2.82035142e-01],
  611. [ -3.01635116e+31],
  612. [ 3.04404974e-01],
  613. [ -1.13939502e+36],
  614. [ 1.76794586e+36],
  615. [ -1.97824872e+00],
  616. [ 2.23705375e+36],
  617. [ -2.16687123e+36],
  618. [ 1.95276077e+36],
  619. [ 3.96768242e-01],
  620. [ -6.19000852e-01]],
  621.  
  622. [[ -1.51109111e+00],
  623. [ -1.76794586e+36],
  624. [ -1.06665683e+00],
  625. [ -5.76761723e-01],
  626. [ 4.88499135e-01],
  627. [ 1.16882837e+00],
  628. [ -4.06196296e-01],
  629. [ -1.09788582e-01],
  630. [ 1.94281694e-02],
  631. [ -1.76794586e+36],
  632. [ -1.13054359e+00],
  633. [ 1.02776262e+36],
  634. [ -7.44835794e-01],
  635. [ 9.74097788e-01],
  636. [ 1.76794586e+36],
  637. [ -1.76794586e+36],
  638. [ 6.62939623e-02],
  639. [ -3.24515611e-01],
  640. [ 6.63073421e-01],
  641. [ -1.76794586e+36],
  642. [ -2.29984603e+36],
  643. [ 5.14316708e+35],
  644. [ -4.54471648e-01],
  645. [ 1.16384733e+00]],
  646.  
  647. [[ 1.25484313e+36],
  648. [ 5.47351078e+34],
  649. [ 1.54362626e+35],
  650. [ 6.12608632e+35],
  651. [ 1.40672633e+35],
  652. [ -5.31255729e+34],
  653. [ -9.57406030e+35],
  654. [ -7.89830850e+34],
  655. [ 1.55279820e+34],
  656. [ -4.08534434e+35],
  657. [ -3.83596989e+34],
  658. [ -3.52012113e+35],
  659. [ -5.37084426e+35],
  660. [ 1.13723421e+35],
  661. [ 8.45152004e+35],
  662. [ -1.56734034e+35],
  663. [ 5.01022014e+34],
  664. [ 1.12598700e+36],
  665. [ 1.44813025e+34],
  666. [ -5.56660675e+35],
  667. [ -2.84954545e+35],
  668. [ -2.82103341e+35],
  669. [ 2.84528733e+34],
  670. [ 2.71685709e+35]],
  671.  
  672. [[ -1.76794586e+36],
  673. [ -2.09185094e-01],
  674. [ -1.69298176e+36],
  675. [ -6.86711408e+24],
  676. [ -1.23928714e+00],
  677. [ -1.05993331e+33],
  678. [ 4.47571874e-01],
  679. [ 1.48852984e+36],
  680. [ 8.36214006e-01],
  681. [ -1.69664893e+03],
  682. [ -7.57821202e-02],
  683. [ -4.38656506e+35],
  684. [ -1.76794586e+36],
  685. [ 5.82785070e-01],
  686. [ -4.84006524e-01],
  687. [ -1.74262011e+00],
  688. [ 1.76794586e+36],
  689. [ 3.10740086e+30],
  690. [ -1.70321897e-01],
  691. [ -2.49787914e+20],
  692. [ 1.12362754e+00],
  693. [ -5.07394741e+35],
  694. [ 7.30580091e-01],
  695. [ 8.54138613e-01]],
  696.  
  697. [[ 4.01569694e-01],
  698. [ 3.81988312e+02],
  699. [ -1.69298176e+36],
  700. [ 1.76794586e+36],
  701. [ -3.67678821e-01],
  702. [ -1.76794586e+36],
  703. [ -1.23878682e+00],
  704. [ 1.13503706e+00],
  705. [ 1.92938428e-02],
  706. [ -3.21569502e-01],
  707. [ -2.04713315e-01],
  708. [ 1.01068878e+00],
  709. [ 1.76794586e+36],
  710. [ 4.22398955e-01],
  711. [ -1.01143742e+00],
  712. [ -1.76794586e+36],
  713. [ 1.76794586e+36],
  714. [ -1.76794586e+36],
  715. [ 3.19085330e-01],
  716. [ 5.65308630e-01],
  717. [ -2.23786980e+36],
  718. [ 1.25701332e+00],
  719. [ -4.07225579e-01],
  720. [ -1.76794586e+36]]], dtype=float32))
  721. ('count ',)
  722. ('result: ', <tf.Tensor 'OutputData:0' shape=(10, 1) dtype=float32>)
  723. ('result len: ', TensorShape([Dimension(10), Dimension(1)]))
  724. ('prediction: ', <tf.Tensor 'Print_3:0' shape=(10, 1, 10) dtype=float32>)
  725. ('prediction len: ', TensorShape([Dimension(10), Dimension(1), Dimension(10)]))
  726. Validation cost: 0.0, on Epoch 0
  727. ('eval w: ', array([[[ nan],
  728. [ nan],
  729. [ nan],
  730. [ nan],
  731. [ nan],
  732. [ nan],
  733. [ nan],
  734. [ nan],
  735. [ nan],
  736. [ nan],
  737. [ nan],
  738. [ nan],
  739. [ nan],
  740. [ nan],
  741. [ nan],
  742. [ nan],
  743. [ nan],
  744. [ nan],
  745. [ nan],
  746. [ nan],
  747. [ nan],
  748. [ nan],
  749. [ nan],
  750. [ nan]],
  751.  
  752. [[ nan],
  753. [ nan],
  754. [ nan],
  755. [ nan],
  756. [ nan],
  757. [ nan],
  758. [ nan],
  759. [ nan],
  760. [ nan],
  761. [ nan],
  762. [ nan],
  763. [ nan],
  764. [ nan],
  765. [ nan],
  766. [ nan],
  767. [ nan],
  768. [ nan],
  769. [ nan],
  770. [ nan],
  771. [ nan],
  772. [ nan],
  773. [ nan],
  774. [ nan],
  775. [ nan]],
  776.  
  777. [[ nan],
  778. [ nan],
  779. [ nan],
  780. [ nan],
  781. [ nan],
  782. [ nan],
  783. [ nan],
  784. [ nan],
  785. [ nan],
  786. [ nan],
  787. [ nan],
  788. [ nan],
  789. [ nan],
  790. [ nan],
  791. [ nan],
  792. [ nan],
  793. [ nan],
  794. [ nan],
  795. [ nan],
  796. [ nan],
  797. [ nan],
  798. [ nan],
  799. [ nan],
  800. [ nan]],
  801.  
  802. [[ nan],
  803. [ nan],
  804. [ nan],
  805. [ nan],
  806. [ nan],
  807. [ nan],
  808. [ nan],
  809. [ nan],
  810. [ nan],
  811. [ nan],
  812. [ nan],
  813. [ nan],
  814. [ nan],
  815. [ nan],
  816. [ nan],
  817. [ nan],
  818. [ nan],
  819. [ nan],
  820. [ nan],
  821. [ nan],
  822. [ nan],
  823. [ nan],
  824. [ nan],
  825. [ nan]],
  826.  
  827. [[ nan],
  828. [ nan],
  829. [ nan],
  830. [ nan],
  831. [ nan],
  832. [ nan],
  833. [ nan],
  834. [ nan],
  835. [ nan],
  836. [ nan],
  837. [ nan],
  838. [ nan],
  839. [ nan],
  840. [ nan],
  841. [ nan],
  842. [ nan],
  843. [ nan],
  844. [ nan],
  845. [ nan],
  846. [ nan],
  847. [ nan],
  848. [ nan],
  849. [ nan],
  850. [ nan]],
  851.  
  852. [[ nan],
  853. [ nan],
  854. [ nan],
  855. [ nan],
  856. [ nan],
  857. [ nan],
  858. [ nan],
  859. [ nan],
  860. [ nan],
  861. [ nan],
  862. [ nan],
  863. [ nan],
  864. [ nan],
  865. [ nan],
  866. [ nan],
  867. [ nan],
  868. [ nan],
  869. [ nan],
  870. [ nan],
  871. [ nan],
  872. [ nan],
  873. [ nan],
  874. [ nan],
  875. [ nan]],
  876.  
  877. [[ nan],
  878. [ nan],
  879. [ nan],
  880. [ nan],
  881. [ nan],
  882. [ nan],
  883. [ nan],
  884. [ nan],
  885. [ nan],
  886. [ nan],
  887. [ nan],
  888. [ nan],
  889. [ nan],
  890. [ nan],
  891. [ nan],
  892. [ nan],
  893. [ nan],
  894. [ nan],
  895. [ nan],
  896. [ nan],
  897. [ nan],
  898. [ nan],
  899. [ nan],
  900. [ nan]],
  901.  
  902. [[ nan],
  903. [ nan],
  904. [ nan],
  905. [ nan],
  906. [ nan],
  907. [ nan],
  908. [ nan],
  909. [ nan],
  910. [ nan],
  911. [ nan],
  912. [ nan],
  913. [ nan],
  914. [ nan],
  915. [ nan],
  916. [ nan],
  917. [ nan],
  918. [ nan],
  919. [ nan],
  920. [ nan],
  921. [ nan],
  922. [ nan],
  923. [ nan],
  924. [ nan],
  925. [ nan]],
  926.  
  927. [[ nan],
  928. [ nan],
  929. [ nan],
  930. [ nan],
  931. [ nan],
  932. [ nan],
  933. [ nan],
  934. [ nan],
  935. [ nan],
  936. [ nan],
  937. [ nan],
  938. [ nan],
  939. [ nan],
  940. [ nan],
  941. [ nan],
  942. [ nan],
  943. [ nan],
  944. [ nan],
  945. [ nan],
  946. [ nan],
  947. [ nan],
  948. [ nan],
  949. [ nan],
  950. [ nan]],
  951.  
  952. [[ nan],
  953. [ nan],
  954. [ nan],
  955. [ nan],
  956. [ nan],
  957. [ nan],
  958. [ nan],
  959. [ nan],
  960. [ nan],
  961. [ nan],
  962. [ nan],
  963. [ nan],
  964. [ nan],
  965. [ nan],
  966. [ nan],
  967. [ nan],
  968. [ nan],
  969. [ nan],
  970. [ nan],
  971. [ nan],
  972. [ nan],
  973. [ nan],
  974. [ nan],
  975. [ nan]]], dtype=float32))
  976. ('count ',)
  977. ('result: ', <tf.Tensor 'OutputData:0' shape=(10, 1) dtype=float32>)
  978. ('result len: ', TensorShape([Dimension(10), Dimension(1)]))
  979. ('prediction: ', <tf.Tensor 'Print_3:0' shape=(10, 1, 10) dtype=float32>)
  980. ('prediction len: ', TensorShape([Dimension(10), Dimension(1), Dimension(10)]))
  981. Validation cost: 0.0, on Epoch 0
  982. ('eval w: ', array([[[ nan],
  983. [ nan],
  984. [ nan],
  985. [ nan],
  986. [ nan],
  987. [ nan],
  988. [ nan],
  989. [ nan],
  990. [ nan],
  991. [ nan],
  992. [ nan],
  993. [ nan],
  994. [ nan],
  995. [ nan],
  996. [ nan],
  997. [ nan],
  998. [ nan],
  999. [ nan],
  1000. [ nan],
  1001. [ nan],
  1002. [ nan],
  1003. [ nan],
  1004. [ nan],
  1005. [ nan]],
  1006.  
  1007. [[ nan],
  1008. [ nan],
  1009. [ nan],
  1010. [ nan],
  1011. [ nan],
  1012. [ nan],
  1013. [ nan],
  1014. [ nan],
  1015. [ nan],
  1016. [ nan],
  1017. [ nan],
  1018. [ nan],
  1019. [ nan],
  1020. [ nan],
  1021. [ nan],
  1022. [ nan],
  1023. [ nan],
  1024. [ nan],
  1025. [ nan],
  1026. [ nan],
  1027. [ nan],
  1028. [ nan],
  1029. [ nan],
  1030. [ nan]],
  1031.  
  1032. [[ nan],
  1033. [ nan],
  1034. [ nan],
  1035. [ nan],
  1036. [ nan],
  1037. [ nan],
  1038. [ nan],
  1039. [ nan],
  1040. [ nan],
  1041. [ nan],
  1042. [ nan],
  1043. [ nan],
  1044. [ nan],
  1045. [ nan],
  1046. [ nan],
  1047. [ nan],
  1048. [ nan],
  1049. [ nan],
  1050. [ nan],
  1051. [ nan],
  1052. [ nan],
  1053. [ nan],
  1054. [ nan],
  1055. [ nan]],
  1056.  
  1057. [[ nan],
  1058. [ nan],
  1059. [ nan],
  1060. [ nan],
  1061. [ nan],
  1062. [ nan],
  1063. [ nan],
  1064. [ nan],
  1065. [ nan],
  1066. [ nan],
  1067. [ nan],
  1068. [ nan],
  1069. [ nan],
  1070. [ nan],
  1071. [ nan],
  1072. [ nan],
  1073. [ nan],
  1074. [ nan],
  1075. [ nan],
  1076. [ nan],
  1077. [ nan],
  1078. [ nan],
  1079. [ nan],
  1080. [ nan]],
  1081.  
  1082. [[ nan],
  1083. [ nan],
  1084. [ nan],
  1085. [ nan],
  1086. [ nan],
  1087. [ nan],
  1088. [ nan],
  1089. [ nan],
  1090. [ nan],
  1091. [ nan],
  1092. [ nan],
  1093. [ nan],
  1094. [ nan],
  1095. [ nan],
  1096. [ nan],
  1097. [ nan],
  1098. [ nan],
  1099. [ nan],
  1100. [ nan],
  1101. [ nan],
  1102. [ nan],
  1103. [ nan],
  1104. [ nan],
  1105. [ nan]],
  1106.  
  1107. [[ nan],
  1108. [ nan],
  1109. [ nan],
  1110. [ nan],
  1111. [ nan],
  1112. [ nan],
  1113. [ nan],
  1114. [ nan],
  1115. [ nan],
  1116. [ nan],
  1117. [ nan],
  1118. [ nan],
  1119. [ nan],
  1120. [ nan],
  1121. [ nan],
  1122. [ nan],
  1123. [ nan],
  1124. [ nan],
  1125. [ nan],
  1126. [ nan],
  1127. [ nan],
  1128. [ nan],
  1129. [ nan],
  1130. [ nan]],
  1131.  
  1132. [[ nan],
  1133. [ nan],
  1134. [ nan],
  1135. [ nan],
  1136. [ nan],
  1137. [ nan],
  1138. [ nan],
  1139. [ nan],
  1140. [ nan],
  1141. [ nan],
  1142. [ nan],
  1143. [ nan],
  1144. [ nan],
  1145. [ nan],
  1146. [ nan],
  1147. [ nan],
  1148. [ nan],
  1149. [ nan],
  1150. [ nan],
  1151. [ nan],
  1152. [ nan],
  1153. [ nan],
  1154. [ nan],
  1155. [ nan]],
  1156.  
  1157. [[ nan],
  1158. [ nan],
  1159. [ nan],
  1160. [ nan],
  1161. [ nan],
  1162. [ nan],
  1163. [ nan],
  1164. [ nan],
  1165. [ nan],
  1166. [ nan],
  1167. [ nan],
  1168. [ nan],
  1169. [ nan],
  1170. [ nan],
  1171. [ nan],
  1172. [ nan],
  1173. [ nan],
  1174. [ nan],
  1175. [ nan],
  1176. [ nan],
  1177. [ nan],
  1178. [ nan],
  1179. [ nan],
  1180. [ nan]],
  1181.  
  1182. [[ nan],
  1183. [ nan],
  1184. [ nan],
  1185. [ nan],
  1186. [ nan],
  1187. [ nan],
  1188. [ nan],
  1189. [ nan],
  1190. [ nan],
  1191. [ nan],
  1192. [ nan],
  1193. [ nan],
  1194. [ nan],
  1195. [ nan],
  1196. [ nan],
  1197. [ nan],
  1198. [ nan],
  1199. [ nan],
  1200. [ nan],
  1201. [ nan],
  1202. [ nan],
  1203. [ nan],
  1204. [ nan],
  1205. [ nan]],
  1206.  
  1207. [[ nan],
  1208. [ nan],
  1209. [ nan],
  1210. [ nan],
  1211. [ nan],
  1212. [ nan],
  1213. [ nan],
  1214. [ nan],
  1215. [ nan],
  1216. [ nan],
  1217. [ nan],
  1218. [ nan],
  1219. [ nan],
  1220. [ nan],
  1221. [ nan],
  1222. [ nan],
  1223. [ nan],
  1224. [ nan],
  1225. [ nan],
  1226. [ nan],
  1227. [ nan],
  1228. [ nan],
  1229. [ nan],
  1230. [ nan]]], dtype=float32))
  1231. ('count ',)
  1232. ('result: ', <tf.Tensor 'OutputData:0' shape=(10, 1) dtype=float32>)
  1233. ('result len: ', TensorShape([Dimension(10), Dimension(1)]))
  1234. ('prediction: ', <tf.Tensor 'Print_3:0' shape=(10, 1, 10) dtype=float32>)
  1235. ('prediction len: ', TensorShape([Dimension(10), Dimension(1), Dimension(10)]))
  1236. Validation cost: 0.0, on Epoch 0
  1237. ('eval w: ', array([[[ nan],
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement