Advertisement
Guest User

Untitled

a guest
Jan 29th, 2020
137
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.84 KB | None | 0 0
  1. import tensorflow as tf
  2. tf.enable_eager_execution() # if using TF 1.15.x
  3. from tensorflow.keras import layers
  4.  
  5. import numpy as np
  6. from numpy.random import rand
  7. from numpy import hstack
  8.  
  9. from matplotlib import pyplot
  10.  
  11. class GanPointGraph(object):
  12.  
  13. def __init__(self):
  14. self.latent_dim = 5
  15. self.generator = self.make_generator()
  16. self.discriminator = self.make_discriminator()
  17.  
  18. self.cross_entropy = tf.keras.losses.BinaryCrossentropy(from_logits=True)
  19. self.generator_optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)
  20. self.discriminator_optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)
  21.  
  22. def make_generator(self):
  23. model = tf.keras.Sequential()
  24. model.add(layers.Dense(15, activation='relu', input_dim=self.latent_dim))
  25. model.add(layers.Dense(2))
  26. return model
  27.  
  28. def make_discriminator(self):
  29. model = tf.keras.Sequential()
  30. model.add(layers.Dense(25, activation='relu', input_dim=2))
  31. model.add(layers.Dense(1, activation='sigmoid')) # (-infinity, infinity) -> (0, 1)
  32. return model
  33.  
  34. def generator_loss(self, fake_output):
  35. #return self.cross_entropy(tf.ones_like(fake_output), fake_output)
  36. return tf.reduce_mean(tf.math.log(1-fake_output))
  37.  
  38. def discriminator_loss(self, real_output, fake_output):
  39. #real_loss = self.cross_entropy(tf.ones_like(real_output), real_output)
  40. #fake_loss = self.cross_entropy(tf.zeros_like(fake_output), fake_output)
  41. #total_loss = real_loss + fake_loss
  42. #return total_loss
  43. loss_real = tf.reduce_mean(-tf.math.log(real_output))
  44. loss_fake = tf.reduce_mean(-tf.math.log(1-fake_output))
  45. D_loss = loss_real + loss_fake
  46. return D_loss
  47.  
  48. def generate_real_samples(self, n):
  49. X1 = rand(n) - 0.5
  50. X2 = X1 * X1
  51. X1 = X1.reshape(n, 1)
  52. X2 = X2.reshape(n, 1)
  53. x_train = hstack((X1, X2))
  54. return x_train
  55.  
  56. def generate_fake_samples(self, n):
  57. z_sample = np.random.normal(0, 1.0, size=[n, self.latent_dim]).astype(np.float32)
  58. return self.generator(z_sample, training=False).numpy()
  59.  
  60. def train(self):
  61. images = self.generate_real_samples(128);
  62. noise = tf.random.normal([images.shape[0], self.latent_dim])
  63.  
  64. with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
  65. generated_images = self.generator(noise, training=True)
  66.  
  67. real_output = self.discriminator(images, training=True)
  68. fake_output = self.discriminator(generated_images, training=True)
  69.  
  70. gen_loss = self.generator_loss(fake_output)
  71. disc_loss = self.discriminator_loss(real_output, fake_output)
  72.  
  73. gradients_of_generator = gen_tape.gradient(gen_loss, self.generator.trainable_variables)
  74. gradients_of_discriminator = disc_tape.gradient(disc_loss, self.discriminator.trainable_variables)
  75.  
  76. self.generator_optimizer.apply_gradients(zip(gradients_of_generator, self.generator.trainable_variables))
  77. self.discriminator_optimizer.apply_gradients(zip(gradients_of_discriminator, self.discriminator.trainable_variables))
  78.  
  79. if __name__ == "__main__":
  80. g = GanPointGraph();
  81.  
  82. for epoch in range(10000):
  83. print('Epoch', epoch)
  84. g.train()
  85. if epoch % 1000 == 0:
  86. g_objects = g.generate_fake_samples(100)
  87. r_objects = g.generate_real_samples(100)
  88.  
  89. pyplot.clf()
  90. pyplot.title('Tensorflow iteration ' + str(epoch))
  91. pyplot.scatter([i[0] for i in r_objects], [i[1] for i in r_objects], c='black')
  92. pyplot.scatter([i[0] for i in g_objects], [i[1] for i in g_objects], c='red')
  93. pyplot.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement