Advertisement
Guest User

Untitled

a guest
Feb 22nd, 2017
60
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.61 KB | None | 0 0
  1. self.optimizer = optimizer(self.model.parameters(), lr=self.learning_rate) # This is in class init function
  2.  
  3. # Training
  4. self.model.train()
  5. for iteration, batch in enumerate(train_set):
  6.  
  7. # Next four lines of code prepare the inputs and targets
  8. landmark_batch = np.nan_to_num(batch[2].numpy())
  9. noisy_landmark_batch = add_noise_batch(landmark_batch)
  10. landmark_batch = np.array([np.concatenate((l[:,0],l[:,1])) for l in landmark_batch ])
  11. noisy_landmark_batch = np.array([np.concatenate((l[:,0],l[:,1])) for l in noisy_landmark_batch ])
  12.  
  13. self.input.data.resize_(landmark_batch.shape).copy_(torch.from_numpy(noisy_landmark_batch) )
  14. self.target.data.resize_(landmark_batch.shape).copy_(torch.from_numpy(landmark_batch))
  15.  
  16.  
  17. outputs = self.model.forward(self.input)
  18. loss = self.loss_criterion(outputs,self.target)
  19. criterion_outputs += loss.data[0]
  20. loss.backward()
  21. self.optimizer.step()
  22.  
  23.  
  24. #Testing
  25. self.model.eval()
  26. for iteration, batch in enumerate(test_set):
  27.  
  28. # Next four lines of code prepare the inputs and targets
  29. landmark_batch = np.nan_to_num(batch[2].numpy())
  30. noisy_landmark_batch = add_noise_batch(landmark_batch)
  31. landmark_batch = np.array([np.concatenate((l[:,0],l[:,1])) for l in landmark_batch ])
  32. noisy_landmark_batch = np.array([np.concatenate((l[:,0],l[:,1])) for l in noisy_landmark_batch ])
  33.  
  34. self.test_input.data.resize_(landmark_batch.shape).copy_(torch.from_numpy(noisy_landmark_batch) )
  35. self.target.data.resize_(landmark_batch.shape).copy_(torch.from_numpy(landmark_batch))
  36.  
  37.  
  38. outputs = self.model.forward(self.test_input)
  39. loss = self.loss_criterion(outputs,self.target)
  40. criterion_outputs += loss.data[0]
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement