Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def forward(self, x):
- # Set initial hidden and cell states
- h0 = torch.zeros(self.num_layers, self.batch_size, self.hidden_size1).to(device)
- c0 = torch.zeros(self.num_layers, self.batch_size, self.hidden_size1).to(device)
- # print(h0)
- # print(x.shape)
- # print(c0)
- #y = np.zeros((self.batch_size, input_size, input_size))
- x = x.view(self.batch_size, x.shape[1], 1)
- hidden = (h0, c0)
- # Forward propagate LSTM
- #print("h0.shape and c0.shape" + str(h0.shape))
- #print("x.shape" + str(x.shape))
- out, _ = self.lstm(x, hidden) # out: tensor of shape (batch_size, seq_length, hidden_size)
- print("out.shape:")
- print(out.shape)
- # Decode the hidden state of the last time step
- #print("after lstm:" + str(out.shape))
- out = self.dense1(out)
- out = self.tanh1(out)
- #print("after first dense layer:" + str(out.shape))
- out = out.view(batch_size, x.shape[1])
- out = self.dense2(out)
- out = self.tanh2(out)
- #print("after second dense layer:" + str(out.shape))
- #print(out.view(-1).shape)
- out = self.dense3(out)
- out = out.view(batch_size)
- return out
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement