Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def forward(self, batch):
- # batch shape [batch_size, seq_len, embedding_dim]
- # batch - dict('tokens': tokens[batch_size, seq_len, embedding_dim],
- # 'head': head[batch_size, seq_len, char_pad_len,],
- # 'tail': tail[batch_size, seq_len, char_pad_len,]
- # )
- tokens, head, tail = batch['tokens'], batch['head'], batch['tail']
- ...
- def training_step(self, data_batch, batch_nb):
- texts, labels = data_batch
- texts['tokens'] = self.token_vectorizer.vectorize(texts['tokens']) # [batch_size, seq_len, embedding_dim]
- texts['head'] = self.char_vectorizer.vectorize_batches(texts['head']) # [batch_size, seq_len, char_pad_len, char_embedding_dim]
- texts['tail'] = self.char_vectorizer.vectorize_batches(texts['tail']) # [batch_size, seq_len, char_pad_len, char_embedding_dim]
- scores = self.forward(texts)
- labels = labels.contiguous().view(-1)
- loss = self.my_loss(scores, labels)
- return {'loss': loss}
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement