Advertisement
Guest User

Untitled

a guest
Apr 26th, 2019
70
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.30 KB | None | 0 0
  1. def main():
  2.  
  3. tf.random.set_seed(1234)
  4. assert tf.__Version__.startswith('2.')
  5.  
  6. (x_train, y_train), (x_val, y_val) = keras.datasets.boston_housing.load_data()
  7.  
  8. x_train, x_val = x_train.astype(np.float32), x_val.astype(np.float32)
  9. print(x_train.shape, y_train.shape, x_val.shape, y_val.shape)
  10.  
  11. db_train = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(64)
  12. db_val = tf.data.Dataset.from_tensor_slices((x_val, y_val)).batch(102)
  13.  
  14. model = Linear_regressor()
  15. criterion = keras.losses.MeanSquaredError()
  16. optimizer = keras.optimizer.Adam(learning_rate=1e-2)
  17.  
  18. for epoch in range(100):
  19.  
  20. for step, (x, y) in enumerate(db_train):
  21.  
  22. with tf.GradientTape() as tape:
  23. logits = model(x)
  24. logits = tf.squeeze(logits, axis=1)
  25. loss = criterion(y, logits)
  26.  
  27. grads = tape.gradient(loss, model.trainable_variables)
  28. optimizer.apply_gradients(zip(grads, model.trainable_variables))
  29. print('[INFO] Epoch: {}, Train loss: {}'.format(epoch, loss.numpy()))
  30.  
  31. if epoch % 10 == 0:
  32. for x, y in db_val:
  33. logits = model(x)
  34. logits = tf.squeeze(logits, axis=1)
  35.  
  36. loss = criterion(y, logits)
  37. print('[INFO] Epoch: {}, Test loss: {}'.format(epoch, loss.numpy()))
  38.  
  39. if __name__ == '__main__':
  40. main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement