Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def predict(self, inputs):
- inputs_1 = tc.mm(inputs,self.weights_0_1) + self.bias1
- outputs_1 = self.relu(inputs_1)
- inputs_2 = tc.mm(outputs_1, self.weights_1_2) + self.bias2
- outputs_2 = inputs_2.softmax(1)
- return outputs_2
- def check_los(self,inputs,clas):
- inputs_1 = tc.mm(inputs, self.weights_0_1) + self.bias1
- outputs_1 = self.relu(inputs_1)
- inputs_2 = tc.mm(outputs_1, self.weights_1_2) + self.bias2
- outputs_2 = inputs_2.softmax(1)
- L = -tc.mean(tc.log(outputs_2[tc.arange(len(outputs_2)), clas]))
- self.test_costs.append(L)
- return outputs_2
- def train(self, inputs, clas):
- inputs_1 = tc.mm(inputs, self.weights_0_1) + self.bias1
- outputs_1 = self.relu(inputs_1)
- inputs_2 = tc.mm(outputs_1, self.weights_1_2) + self.bias2
- outputs_2 = inputs_2.softmax(1)
- with tc.no_grad():
- if self.weights_0_1.grad is not None:
- self.weights_0_1.grad.zero_()
- if self.weights_1_2.grad is not None:
- self.weights_1_2.grad.zero_()
- if self.bias1.grad is not None:
- self.bias1.grad.zero_()
- if self.bias2.grad is not None:
- self.bias2.grad.zero_()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement