Advertisement
Guest User

Untitled

a guest
Mar 21st, 2019
65
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.22 KB | None | 0 0
  1. # set basic hyperparameters
  2. learning_rate = 0.001
  3. n_epochs = 2000
  4.  
  5. # define loss and accuracy
  6. bce_loss = tf.keras.losses.BinaryCrossentropy()
  7. accuracy = tf.keras.metrics.Accuracy()
  8.  
  9. # define optimizer
  10. optimizer = tf.optimizers.Adam(learning_rate = learning_rate)
  11.  
  12. # save training progress in lists
  13. loss_history = []
  14. accuracy_history = []
  15.  
  16. for epoch in range(n_epochs):
  17.  
  18. with tf.GradientTape() as tape:
  19.  
  20. # take binary cross-entropy (bce_loss)
  21. current_loss = bce_loss(model(df), classification)
  22.  
  23. # Update weights based on the gradient of the loss function
  24. gradients = tape.gradient(current_loss, model.trainable_variables)
  25. optimizer.apply_gradients(zip(gradients, model.trainable_variables))
  26.  
  27. # save in history vectors
  28. current_loss = current_loss.numpy()
  29. loss_history.append(current_loss)
  30.  
  31. accuracy.update_state(model(df), classification)
  32. current_accuracy = accuracy.result().numpy()
  33. accuracy_history.append(current_accuracy)
  34.  
  35. # print loss and accuracy scores each 100 epochs
  36. if (epoch+1) % 100 == 0:
  37. print(str(epoch+1) + '.tTrain Loss: ' + str(current_loss) + ',tAccuracy: ' + str(current_accuracy))
  38.  
  39. print('nTraining complete.')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement