Advertisement
nolog1n

Untitled

Apr 18th, 2019
110
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.30 KB | None | 0 0
  1. def predict(self, inputs):
  2. inputs_1 = tc.mm(inputs,self.weights_0_1) + self.bias1
  3. outputs_1 = self.relu(inputs_1)
  4.  
  5. inputs_2 = tc.mm(outputs_1, self.weights_1_2) + self.bias2
  6. outputs_2 = inputs_2.softmax(1)
  7.  
  8. return outputs_2
  9.  
  10. def check_los(self,inputs,clas):
  11. inputs_1 = tc.mm(inputs, self.weights_0_1) + self.bias1
  12. outputs_1 = self.relu(inputs_1)
  13.  
  14. inputs_2 = tc.mm(outputs_1, self.weights_1_2) + self.bias2
  15. outputs_2 = inputs_2.softmax(1)
  16.  
  17. L = -tc.mean(tc.log(outputs_2[tc.arange(len(outputs_2)), clas]))
  18. self.test_costs.append(L)
  19.  
  20. return outputs_2
  21.  
  22. def train(self, inputs, clas):
  23.  
  24. inputs_1 = tc.mm(inputs, self.weights_0_1) + self.bias1
  25. outputs_1 = self.relu(inputs_1)
  26.  
  27. inputs_2 = tc.mm(outputs_1, self.weights_1_2) + self.bias2
  28. outputs_2 = inputs_2.softmax(1)
  29.  
  30. with tc.no_grad():
  31. if self.weights_0_1.grad is not None:
  32. self.weights_0_1.grad.zero_()
  33.  
  34. if self.weights_1_2.grad is not None:
  35. self.weights_1_2.grad.zero_()
  36.  
  37. if self.bias1.grad is not None:
  38. self.bias1.grad.zero_()
  39.  
  40. if self.bias2.grad is not None:
  41. self.bias2.grad.zero_()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement