Guest User

Untitled

a guest
May 25th, 2018
110
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.65 KB | None | 0 0
  1. from keras.layers.embeddings import Embedding
  2.  
  3. def pretrained_embedding_layer(word_to_vec_map, word_to_index):
  4. """
  5. Creates a Keras Embedding() layer and loads in pre-trained GloVe 50-dimensional vectors.
  6.  
  7. Arguments:
  8. word_to_vec_map -- dictionary mapping words to their GloVe vector representation.
  9. word_to_index -- dictionary mapping from words to their indices in the vocabulary (400,001 words)
  10.  
  11. Returns:
  12. embedding_layer -- pretrained layer Keras instance
  13. """
  14.  
  15. vocab_len = len(word_to_index) + 1 # adding 1 to fit Keras embedding (requirement)
  16. emb_dim = word_to_vec_map["cucumber"].shape[0] # define dimensionality of your GloVe word vectors (= 50)
  17.  
  18. # Initialize the embedding matrix as a numpy array of zeros of shape (vocab_len, dimensions of word vectors = emb_dim)
  19. emb_matrix = np.zeros((vocab_len, emb_dim))
  20.  
  21. # Set each row "index" of the embedding matrix to be the word vector representation of the "index"th word of the vocabulary
  22. for word, index in word_to_index.items():
  23. emb_matrix[index, :] = word_to_vec_map[word]
  24.  
  25. # Define Keras embedding layer with the correct output/input sizes, make it trainable. Use Embedding(...). Make sure to set trainable=False.
  26. embedding_layer = Embedding(vocab_len, emb_dim, trainable=False)
  27. # Build the embedding layer, it is required before setting the weights of the embedding layer. Do not modify the "None".
  28. embedding_layer.build((None,))
  29.  
  30. # Set the weights of the embedding layer to the embedding matrix. Your layer is now pretrained.
  31. embedding_layer.set_weights([emb_matrix])
  32.  
  33. return embedding_layer
Add Comment
Please, Sign In to add comment