Guest User

Untitled

a guest
Jan 18th, 2019
119
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 5.11 KB | None | 0 0
  1. 12.1
  2. 42.2
  3. 32.2
  4. 15.2
  5.  
  6. # -*- coding: utf-8 -*-
  7.  
  8. import tensorflow as tf #NN用ライブラリであるtensorflowをインポート
  9.  
  10. import numpy as np #計算用ライブラリであるnumpyをインポート
  11.  
  12. '''----------------------モデル定義---------------------------------'''
  13. #入力層のユニット数
  14. num_units1 = 10 #中間層1のユニット数
  15.  
  16. x=tf.placeholder(tf.float32,[1, 720]) #入力層
  17.  
  18. w_enc = tf.Variable(tf.random_normal([720, num_units1], stddev=0.01)) #入力側の重み
  19. b_enc = tf.Variable(tf.zeros([num_units1])) #入力側のバイアス
  20.  
  21. w_dec = tf.Variable(tf.random_normal([num_units1,720], stddev=0.01)) #出力側の重み
  22. b_dec = tf.Variable(tf.zeros([720])) #出力側のバイアス
  23.  
  24. encoded = tf.nn.relu(tf.matmul(x, w_enc) + b_enc) #入力側活性化関数(ReLU)
  25.  
  26. #drop=tf.nn.dropout(encoded,0.5) #dropout
  27.  
  28. decoded = tf.matmul(encoded, w_dec) + b_dec #出力側活性化関数(恒等写像)
  29.  
  30. lambda2 = 0.1 #正則化係数
  31. l2_norm = tf.nn.l2_loss(w_enc) + tf.nn.l2_loss(w_dec) #L2正則化
  32. loss1 = tf.reduce_sum(tf.square(x - decoded)) + lambda2 * l2_norm #損失関数(二乗誤差関数)
  33.  
  34. loss2= tf.abs(tf.reduce_sum(decoded - x) / tf.reduce_sum(x)) * 100 #百分率で表した誤差
  35.  
  36. train_step = tf.train.AdamOptimizer(learning_rate=0.0001).minimize(loss1) #勾配降下法(Adam)
  37.  
  38. '''--------------変数の初期化とVLFデータの読み込み---------------'''
  39.  
  40. sess = tf.InteractiveSession() #セッション開始を変数に定義
  41. sess.run(tf.global_variables_initializer()) #変数初期化
  42.  
  43. a = np.loadtxt("train_datasin125.txt") #訓練データを読み込む
  44.  
  45. b = np.loadtxt("sintest_data11.txt") #テストデータ1を読み込む
  46.  
  47. c = np.loadtxt("sintast_data2.txt") #テストデータ2を読み込む
  48.  
  49. '''-------------------------学習開始---------------------------'''
  50.  
  51. for step in range(1000000): #学習回数
  52. r = a[np.random.randint(0, 2000, [1])] #訓練データの中からランダムに1日分のデータを選出
  53.  
  54. sess.run(train_step, feed_dict={x:r}) #学習セッション開始
  55. if step%49==0: #50回学習するごとに
  56. loss_val = sess.run(loss2, feed_dict={x:r}) #百分率で表した誤差を算出
  57. print('Step: %d, Loss: %f'%(step,loss_val)) #現在の学習回数と誤差を表示
  58.  
  59. if step%499==0: #学習回数500回ごとに
  60.  
  61. '''-------------------現在の学習回数を格納---------------------'''
  62.  
  63. if step==0:
  64. out_g=step+1
  65. else:
  66. out_g=np.column_stack([out_g,step+1])
  67.  
  68. '''------------------教師データの最大誤差算出------------------'''
  69. max = 0
  70. for i in range(2000):
  71. r = a[i, :]
  72. r = r.reshape(1, 720)
  73. loss_val = sess.run(loss2, feed_dict={x: r})
  74. if max<loss_val:
  75. max=loss_val
  76. if i==1999:
  77. if step == 0:
  78. out1 = max
  79. else:
  80. out1 = np.column_stack([out1, max])
  81.  
  82. '''--------------テストデータ1の最大誤差算出------------------'''
  83.  
  84. max = 0
  85. for q in range(30):
  86. r = c[q, :]
  87. r = r.reshape(1, 720)
  88. loss_val = sess.run(loss2, feed_dict={x: r})
  89. if max < loss_val:
  90. max = loss_val
  91. if q == 29:
  92. if step == 0:
  93. out3 = max
  94. else:
  95. out3 = np.column_stack([out3, max])
  96.  
  97. '''--------テストデータ2の最大誤差と推定日数の算出--------'''
  98.  
  99. max = out3
  100. count=0
  101. for p in range(48):
  102. r = b[p, :]
  103. r = r.reshape(1, 720)
  104. loss_val = sess.run(loss2, feed_dict={x: r})
  105. if max<loss_val:
  106. count=count+1
  107. if p==47:
  108. if step == 0:
  109. out2 = max
  110. out4=count
  111. else:
  112. out2 = np.column_stack([out2, max])
  113. out4 = np.column_stack([out4, count])
  114.  
  115. '''------------------テキストファイルを出力--------------------'''
  116.  
  117. out=np.column_stack([out_g.T,out1.T,out2.T,out3.T])
  118. np.savetxt("sinoutput110.txt",out)
  119.  
  120. out=np.column_stack([out4.T])
  121. np.savetxt("sinoutput010.txt",out)
  122.  
  123. /Users/ishidatakuma/PycharmProjects/sotsuken/venv/bin/python /Users/ishidatakuma/PycharmProjects/sotsuken/jikkenn2.py
  124. Step: 0, Loss: 147.256897
  125. Step: 49, Loss: 2897.626221
  126. Step: 98, Loss: 18581.408203
  127. Step: 147, Loss: 33576.679688
  128. Step: 196, Loss: 36586.777344
  129. Step: 245, Loss: 20837.001953
  130. Step: 294, Loss: 101844.562500
  131. Step: 343, Loss: 16776.875000
  132. Step: 392, Loss: 5594.307129
  133. Step: 441, Loss: 20111.017578
  134. Step: 490, Loss: 5526.880859
  135. Traceback (most recent call last):
  136. File "/Users/ishidatakuma/PycharmProjects/sotsuken/jikkenn2.py", line 107, in <module>
  137. if yosino<loss_val:
  138. ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
Add Comment
Please, Sign In to add comment