Guest User

Untitled

a guest
Dec 16th, 2017
85
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.32 KB | None | 0 0
  1. import torch
  2. import torch.nn as nn
  3. from torch.autograd import Variable
  4.  
  5. torch.manual_seed(1)
  6.  
  7. class LSTM(nn.Module):
  8. def __init__(self, config):
  9. super(LSTM, self).__init__()
  10. self.config = config
  11. self.lstm = nn.LSTM(input_size=config["input_size"],
  12. hidden_size=config["hidden_size"],
  13. num_layers=config["num_layers"],
  14. dropout=config["dropout"],
  15. bidirectional=config["bidirectional"])
  16.  
  17. def forward(self, inputs, lengths=None):
  18. batch_size = inputs.size()[0]
  19.  
  20. if lengths is not None:
  21. inputs = torch.nn.utils.rnn.pack_padded_sequence(inputs, lengths, True)
  22.  
  23. state_shape = self.config["num_cells"], batch_size, self.config["hidden_size"]
  24. h0 = c0 = Variable(inputs.data.data.new(*state_shape).zero_())
  25.  
  26. outputs, (ht, ct) = self.lstm(inputs, (h0, c0))
  27. outputs, o_lengths = torch.nn.utils.rnn.pad_packed_sequence(outputs, True)
  28.  
  29. o_lengths = Variable(torch.FloatTensor([o_lengths])).transpose(0, 1)
  30. if torch.cuda.is_available():
  31. o_lengths = o_lengths.cuda()
  32.  
  33. if self.config["bidirectional"]:
  34. output = torch.sum(outputs, dim=1)
  35. output = output/o_lengths
  36. return output
  37. return ht[-1]
Add Comment
Please, Sign In to add comment