Advertisement
Guest User

Untitled

a guest
Jul 21st, 2019
66
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.19 KB | None | 0 0
  1. # Defining a method to generate the next character
  2. def predict(net, char, h=None, top_k=None):
  3. ''' Given a character, predict the next character.
  4. Returns the predicted character and the hidden state.
  5. '''
  6. # tensor inputs
  7. x = np.array([[net.char2int[char]]])
  8. x = one_hot_encode(x, len(net.chars))
  9. inputs = torch.from_numpy(x)
  10. if(train_on_gpu):
  11. inputs = inputs.cuda()
  12. # detach hidden state from history
  13. h = tuple([each.data for each in h])
  14. # get the output of the model
  15. out, h = net(inputs, h)
  16. # get the character probabilities
  17. p = F.softmax(out, dim=1).data
  18. if(train_on_gpu):
  19. p = p.cpu() # move to cpu
  20. # get top characters
  21. if top_k is None:
  22. top_ch = np.arange(len(net.chars))
  23. else:
  24. p, top_ch = p.topk(top_k)
  25. top_ch = top_ch.numpy().squeeze()
  26. # select the likely next character with some element of randomness
  27. p = p.numpy().squeeze()
  28. char = np.random.choice(top_ch, p=p/p.sum())
  29. # return the encoded value of the predicted char and the hidden state
  30. return net.int2char[char], h
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement