Advertisement
Guest User

Untitled

a guest
Jul 21st, 2019
102
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.32 KB | None | 0 0
  1. # Defining method to make mini-batches for training
  2. def get_batches(arr, batch_size, seq_length):
  3. '''Create a generator that returns batches of size
  4. batch_size x seq_length from arr.
  5.  
  6. Arguments
  7. ---------
  8. arr: Array you want to make batches from
  9. batch_size: Batch size, the number of sequences per batch
  10. seq_length: Number of encoded chars in a sequence
  11. '''
  12.  
  13. batch_size_total = batch_size * seq_length
  14. # total number of batches we can make
  15. n_batches = len(arr)//batch_size_total
  16.  
  17. # Keep only enough characters to make full batches
  18. arr = arr[:n_batches * batch_size_total]
  19. # Reshape into batch_size rows
  20. arr = arr.reshape((batch_size, -1))
  21.  
  22. # iterate through the array, one sequence at a time
  23. for n in range(0, arr.shape[1], seq_length):
  24. # The features
  25. x = arr[:, n:n+seq_length]
  26. # The targets, shifted by one
  27. y = np.zeros_like(x)
  28. try:
  29. y[:, :-1], y[:, -1] = x[:, 1:], arr[:, n+seq_length]
  30. except IndexError:
  31. y[:, :-1], y[:, -1] = x[:, 1:], arr[:, 0]
  32. yield x, y
  33.  
  34. # Check if GPU is available
  35. train_on_gpu = torch.cuda.is_available()
  36. if(train_on_gpu):
  37. print('Training on GPU!')
  38. else:
  39. print('No GPU available, training on CPU; consider making n_epochs very small.')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement