Guest User

Untitled

a guest
Oct 19th, 2018
68
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.36 KB | None | 0 0
  1. from __future__ import print_function
  2. import tensorflow as tf
  3. from tensorflow.contrib import rnn
  4. import csv
  5. import bisect
  6. import glob
  7. import re
  8. import numpy as np
  9. import random
  10. import data
  11. import config
  12.  
  13. cfg = config.Config()
  14. graph_data = data.Graph_data(cfg)
  15.  
  16. X = tf.placeholder("float", [None, cfg.N_movies])
  17. Y = tf.placeholder("float", [None, cfg.N_movies])
  18.  
  19. def Dense(x):
  20. hidden_layer_1 = tf.layers.dense(inputs=x, units=500, activation=tf.nn.relu)
  21. hidden_layer_2 = tf.layers.dense(inputs=hidden_layer_1, units=50, activation=tf.nn.relu)
  22. output_layer = tf.layers.dense(inputs = hidden_layer_2, units= cfg.N_movies, activation=tf.nn.softmax)
  23. return output_layer
  24.  
  25. logits = Dense(X)
  26. cross_entropy = tf.reduce_sum(- Y * tf.log(logits), 1)
  27. loss_op = tf.reduce_mean(cross_entropy)
  28. optimizer = tf.train.GradientDescentOptimizer(learning_rate=cfg.learning_rate)
  29. train_op = optimizer.minimize(loss_op)
  30.  
  31. init = tf.global_variables_initializer()
  32.  
  33. with tf.Session() as sess:
  34. sess.run(init)
  35.  
  36. for step in range(1, cfg.training_steps+1):
  37. batch_x, batch_y = graph_data.train_next_batch(cfg.batch_size)
  38. sess.run(train_op, feed_dict={X: batch_x, Y: batch_y})
  39. if step % cfg.display_step == 0 or step == 1:
  40. loss = sess.run(loss_op, feed_dict={X: batch_x,
  41. Y: batch_y})
  42. print("loss = ",loss)
Add Comment
Please, Sign In to add comment