Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def prepare_sequence(seq, to_ix):
- idxs = [to_ix[w] for w in seq]
- return torch.tensor(idxs, dtype=torch.long)
- training_data = dataset[0:900]
- word_to_ix = {}
- for sent, tags in dataset:
- for word in sent:
- if word not in word_to_ix:
- word_to_ix[word] = len(word_to_ix)
- tag_to_ix = {}
- for ix, tag in enumerate(unique_tags):
- tag_to_ix[tag] = ix
- EMBEDDING_DIM = 64
- HIDDEN_DIM = 64
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement