Advertisement
Guest User

Error DL

a guest
Sep 20th, 2017
884
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 11.38 KB | None | 0 0
  1. #-*- coding: utf-8 -*- 
  2. #引用函數庫
  3.  
  4. import tensorflow as tf
  5. import numpy as np
  6. import matplotlib.pyplot as plt
  7.  
  8. ngds = [[0.688162116230982, 0.732446489604642, 0.755601910225769, 0.705199494796785, 0.708428954296666, 0.711153359228192],
  9. [0.743378431417018, 0.751733027569939, 0.76350367355135, 0.702446939846307, 0.75887792524199, 0.783269315256947],
  10. [0.780217962816102, 0.735379587624859, 0.76003529197603, 0.731215574077459, 0.747571121018975, 0.711153359228192],
  11. [0.701079639493405, 0.708435709059543, 0.758715313961785, 0.698413660213797, 0.777344044733561, 0.752003206548303],
  12. [0.724923492940796, 0.732386820013346, 0.745191287095998, 0.693565122949664, 0.748130261331821, 0.786480502809536],
  13. [0.731987413663261, 0.726794691209169, 0.736127616512208, 0.674452635990363, 0.765265625257208, 0.798694934767121],
  14. [0.740212784348503, 0.758805659863885, 0.769903094573739, 0.761743475480735, 0.75896388295821, 0.77290888226539],
  15. [0.672536610824651, 0.751062942798772, 0.715666021063673, 0.657377394501527, 0.754812757012904, 0.738785095114754],
  16. [0.704560445384988, 0.690220116005732, 0.7409698673091, 0.674085689724403, 0.710298729463551, 0.72652697049242],
  17. [0.753402891690361, 0.707679203673663, 0.771169733382919, 0.694956503389763, 0.806928070025996, 0.842085881717505],
  18. [0.698912550958784, 0.733400541521229, 0.724168309730765, 0.697691715102669, 0.730556702588771, 0.747308229047929],
  19. [0.720563312980938, 0.747951259624833, 0.729612310203481, 0.698464354996372, 0.761703949760201, 0.796767603558889],
  20. [0.771892110261537, 0.728899692958041, 0.742306489674558, 0.742437572899725, 0.760373930366387, 0.758121684690409],
  21. [0.687521681282177, 0.72128288948711, 0.72728105861029, 0.675033467303512, 0.725441504066296, 0.740128502519245],
  22. [0.741651580334838, 0.719576673212029, 0.75167943408459, 0.739178058904972, 0.784608191153729, 0.802022612654394],
  23. [0.722831639992797, 0.746957995747708, 0.733631618118581, 0.713168347517917, 0.773464933047562, 0.781906139031856],
  24. [0.71244969162804, 0.690315137276706, 0.702331878860473, 0.689922129496504, 0.713963858223464, 0.731220330778854],
  25. [0.708235258888634, 0.719503717941342, 0.753456527507628, 0.705574135087116, 0.745745018975492, 0.750421964263242],
  26. [0.781298407575192, 0.72402942882735, 0.759841501602342, 0.717646212758491, 0.787779550835618, 0.811000515959128],
  27. [0.719543172627685, 0.702343305578695, 0.74139486267297, 0.700350237839106, 0.727358454183233, 0.732989873235383],
  28. [0.680128135356768, 0.716975666191809, 0.72609401786307, 0.670695750228591, 0.687060340396688, 0.706417167252926],
  29. [0.677532852626355, 0.685909333411646, 0.704777103393177, 0.675880251990045, 0.694435086071062, 0.680749299259125],
  30. [0.703506944039205, 0.711548622795287, 0.728609930666901, 0.701055410394743, 0.765927817679745, 0.758053056636177],
  31. [0.715011269063208, 0.709085949756289, 0.771312160404559, 0.720609693230771, 0.758170168934422, 0.780975938642132],
  32. [0.678195836353772, 0.662187560260362, 0.717575055098692, 0.653312042807961, 0.693290256930799, 0.722824271648459],
  33. [0.665824983952087, 0.704546907041927, 0.691246893253422, 0.667114045601751, 0.682297764045644, 0.711115509933017],
  34. [0.731527677958336, 0.761737679417304, 0.738209753797342, 0.701787640796024, 0.764393728401008, 0.772172855108191],
  35. [0.66537332210211, 0.732410917788602, 0.68978652109656, 0.656382736435416, 0.707597079303623, 0.699826048141957],
  36. [0.729565149609762, 0.778299737813463, 0.721758003329484, 0.708316562677284, 0.736955238134713, 0.717153186507178],
  37. [0.710485505910538, 0.745954875299151, 0.727701388875646, 0.676292794288597, 0.754732825343501, 0.763653853255496],
  38. [0.700488603126165, 0.693817808488289, 0.715695603484826, 0.680541023806334, 0.729596093052887, 0.740221765409833],
  39. [0.732891988146543, 0.764712745807148, 0.76189041999319, 0.697616178767865, 0.791845124328794, 0.748321102774927],
  40. [0.683778520623498, 0.77390010515179, 0.701497722108162, 0.689232176534956, 0.692283038039346, 0.699319463280446],
  41. [0.701948139318546, 0.70146088655732, 0.68834156143794, 0.705631013400578, 0.695777855768726, 0.700876059867261],
  42. [0.67842196490346, 0.695751028473093, 0.677338817755789, 0.644216455575831, 0.693817661490721, 0.676179987560934],
  43. [0.70598689580778, 0.756580348757392, 0.72928754651399, 0.679693541668322, 0.7218993680225, 0.730707505028277],
  44. [0.710264438031596, 0.71093562884508, 0.685739570256289, 0.691572221941633, 0.694117579384115, 0.727234216217734],
  45. [0.737650829624196, 0.692082312694413, 0.734726369680403, 0.670687282904882, 0.710692373094609, 0.758782658566216],
  46. [0.677813364965619, 0.72767969348901, 0.722985473436039, 0.759242627306081, 0.69363950743643, 0.769062771549878],
  47. [0.688790740157708, 0.683439381144381, 0.689279531270003, 0.699950239474169, 0.686454564721754, 0.715752182799398],
  48. [0.730546416477126, 0.71911086540595, 0.751802992041718, 0.709061714131869, 0.734178246419767, 0.790239180612073],
  49. [0.713902002010936, 0.698263395155858, 0.733353155086449, 0.710873955717662, 0.750026171245627, 0.750460292063927],
  50. [0.69476852142192, 0.687784550899905, 0.719212341154276, 0.662028839362699, 0.723650736257307, 0.716621849614952],
  51. [0.704085928687114, 0.650743856476869, 0.657651092591001, 0.68462868250052, 0.66534547016413, 0.678758542882808],
  52. [0.73365601116657, 0.762560810559873, 0.733517175471946, 0.704380563267615, 0.743958523113321, 0.759330985981121],
  53. [0.710233085989598, 0.736889114463434, 0.756273694876313, 0.715003467309213, 0.741309653584279, 0.744949710905885],
  54. [0.658186109519053, 0.655453235033462, 0.675965172773302, 0.655288265511888, 0.699567581250329, 0.672229872844564],
  55. [0.69230493609093, 0.695411045740233, 0.748506823666869, 0.708601925309764, 0.734281437286556, 0.711351880954814],
  56. [0.683792430351096, 0.673823191613456, 0.677270990515019, 0.704753172673526, 0.66297219596305, 0.680526195981014],
  57. [0.740810794168669, 0.723978693248989, 0.778431298313754, 0.689136029350635, 0.730565676589876, 0.730765982121075],
  58. [0.726454871778305, 0.75650520966785, 0.75273108340699, 0.720012349574543, 0.79292447858189, 0.795275173391034],
  59. [0.738562517375911, 0.691740284674876, 0.755130923444765, 0.71064148693419, 0.731409566846807, 0.767598131799258],
  60. [0.703466349802082, 0.702355195277619, 0.731136833989463, 0.708623671375379, 0.73227180074309, 0.715262074874914],
  61. [0.742831401322789, 0.752303807972559, 0.718198699615094, 0.689882449346349, 0.738695451048926, 0.714325003267776],
  62. [0.731805180828981, 0.731998283344445, 0.76237809689406, 0.702194596019348, 0.76430192729394, 0.778641506085595],
  63. [0.682310107843743, 0.696000406948153, 0.761678863751588, 0.675317942246275, 0.703549515383236, 0.7202623403365],
  64. [0.67414384778017, 0.676064272359795, 0.723060121394273, 0.681808788108025, 0.68008788111121, 0.70364220706236],
  65. [0.736920871386378, 0.757636108317171, 0.762376037634983, 0.702822087247755, 0.774920394438824, 0.793956640842588],
  66. [0.699703965962635, 0.778754144289363, 0.712936614723601, 0.663781734218834, 0.728028694353535, 0.765763753031763],
  67. [0.672005874616208, 0.695147284193033, 0.688065549191707, 0.69027346466967, 0.694680940147472, 0.69210514409988]]
  68. def lrelu(x, alpha=0.1):
  69.       return tf.nn.relu(x) - alpha * tf.nn.relu(-x)
  70. #定義layer
  71. def add_layer(inputs, in_size, out_size, activation_function=None):
  72.     Weights = tf.Variable(tf.random_normal([in_size, out_size]))
  73.     biases = tf.Variable(tf.zeros([1, out_size]) + 0.1)
  74.     Wx_plus_b = tf.matmul(inputs, Weights) + biases
  75.  
  76.     #自由選擇激活函數
  77.     if activation_function is None:
  78.         outputs = Wx_plus_b
  79.     else:
  80.         outputs = activation_function(Wx_plus_b)
  81.     return outputs
  82.  
  83. # 給tensorflow 一個placeholder 隨時置換數據 None 表示會自己計算出放了多少組數據
  84. # 像這裡 None 就會自動放入300組 因為我們等等會放入300組數據訓練
  85. xs = tf.placeholder(tf.float32, [None, 6])
  86. ys = tf.placeholder(tf.float32, [None, 1])
  87.  
  88. #組裝神經網路
  89. # add hidden layer
  90. l1 = add_layer(xs, 6, 30, activation_function=tf.nn.relu)
  91. l2 = add_layer(l1, 30, 30, activation_function=tf.nn.relu)
  92. l3 = add_layer(l2, 30, 30, activation_function=tf.nn.relu)
  93. l4 = add_layer(l3, 30, 30, activation_function=tf.nn.relu)
  94. l5 = add_layer(l4, 30, 30, activation_function=tf.nn.relu)
  95. l6 = add_layer(l5, 30, 30, activation_function=tf.nn.relu)
  96. l7 = add_layer(l6, 30, 30, activation_function=tf.nn.relu)
  97. l8 = add_layer(l7, 30, 30, activation_function=tf.nn.relu)
  98. l9 = add_layer(l8, 30, 30, activation_function=tf.nn.relu)
  99.  
  100. #add output layer
  101. prediction = add_layer(l9, 30, 1, activation_function=tf.sigmoid)
  102.  
  103. #接下來製造一些數據和雜訊吧
  104. #製造出範圍為-1~1之間的 row:300 col:1 矩陣
  105. #49 13 57 19 34 22 36 21 5 1 46 25 3 44 30 9 52 33 38 35 54 17 45 8 28 2 47 4 59 56
  106. x_data = [ngds[49], ngds[13], ngds[57], ngds[19], ngds[34], ngds[22], ngds[36], ngds[21], ngds[5], ngds[1], ngds[46], ngds[25], ngds[3], ngds[44], ngds[30], ngds[9], ngds[52], ngds[33], ngds[38], ngds[35], ngds[54], ngds[17], ngds[45], ngds[8], ngds[28], ngds[2], ngds[47], ngds[4], ngds[59], ngds[56]]
  107. #noise = np.random.normal(0, 0.05, x_data.shape)
  108.  
  109. #製造出要讓網路學習的Y 並加上雜訊
  110. #y_data = [[1,0], [1,0], [1,0], [1,0], [1,0], [1,0], [1,0], [1,0], [0,1], [0,1], [0,1], [1,0], [0,1], [0,1], [0,1], [1,0], [1,0], [0,1], [1,0], [0,1], [0,1], [1,0], [0,1], [0,1], [0,1], [0,1], [0,1], [1,0], [0,1], [0,1]]
  111. y_data = [[1], [1], [1], [1], [1], [1], [1], [1], [0], [0], [0], [1], [0], [0], [0], [1], [1], [0], [1], [0], [0], [1], [0], [0], [0], [0], [0], [1], [0], [0]]
  112. y_a= [[1], [1], [1], [1], [1], [1], [1], [1], [0], [0], [0], [1], [0], [0], [0], [1], [1], [0], [1], [0], [0], [1], [0], [0], [0], [0], [0], [1], [0], [0]]
  113.  
  114. # 定義loss function 並且選擇減低loss 的函數 這裡選擇GradientDescentOptimizer
  115. # 其他方法再這裡可以找到 https://www.tensorflow.org/versions/r0.10/api_docs/python/train.html
  116. loss = tf.reduce_mean(tf.reduce_sum((ys - prediction)))
  117. #loss = tf.reduce_sum( tf.square( ys - prediction ) )
  118. train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
  119.  
  120. #全部設定好了之後 記得初始化喔
  121. init = tf.initialize_all_variables()
  122. sess = tf.Session()
  123. sess.run(init)
  124.  
  125. # 為了可以可視化我們訓練的結果
  126. fig = plt.figure()
  127. ax = fig.add_subplot(1,1,1)
  128. x_a= [[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30]]
  129. ax.scatter(x_a, y_a)
  130. plt.ion()
  131. plt.show()
  132.  
  133. # 之後就可以用for迴圈訓練了
  134. for i in range(10000):
  135.  
  136.      # 整個訓練最核心的code , feed_dict 表示餵入 輸入與輸出
  137.      # x_data:[300,1]   y_data:[300,1]
  138.     #print(i)
  139.     sess.run(train_step, feed_dict={xs: x_data, ys: y_data})
  140.     if i % 1000 == 0:
  141.         # 畫出下一條線之前 必須把前一條線刪除掉 不然會看不出學習成果
  142.         try:
  143.             # ax.lines.remove(lines[0])
  144.             plt.cla()
  145.         except Exception:
  146.             pass
  147.  
  148.         # 要取出預測的數值 必須再run 一次才能取出
  149.         prediction_value = sess.run(prediction, feed_dict={xs: x_data})
  150.         #print(prediction_value)
  151.         #print(prediction.Weights)
  152.         # print("Q")
  153.         #l8Q = sess.run(l2,feed_dict={xs: x_data})
  154.         #
  155.         # print(l8Q)
  156.         #print("Q")
  157.         # 每隔0.1 秒畫出來
  158.         #lines = ax.plot(x_a, prediction_value, 'r-', lw=5)
  159.         plt.xlabel('QAQ')
  160.         plt.ylabel('TAT')
  161.         ax.scatter(x_a, y_a)
  162.         ax.scatter(x_a, prediction_value)
  163.         # plt.ion()
  164.         # plt.show()
  165.         plt.pause(0.1)
  166. input("END")
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement