Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- model_qn1 = Sequential()
- model_qn1.add(Embedding(len(word_indices) + 1,
- 300,
- weights=[input_layer_matrix],
- input_length=40,
- trainable=False))
- model_qn1.add(LSTM(300, dropout_W=0.2, dropout_U=0.2))
- model_qn2 = Sequential()
- model_qn2.add(Embedding(len(word_indices) + 1,
- 300,
- weights=[input_layer_matrix],
- input_length=40,
- trainable=False))
- model_qn2.add(LSTM(300, dropout_W=0.2, dropout_U=0.2))
- mixed_model = Sequential()
- mixed_model.add(Merge([model_qn1, model_qn2], mode='concat'))
- print(mixed_model.layers[-1].output_shape)
- mixed_model.add(BatchNormalization())
- print(mixed_model.layers[-1].output_shape)
- mixed_model.add(Reshape((1, 600)))
- print(mixed_model.layers[-1].output_shape)
- mixed_model.add(LSTM(600, dropout_W=0.2, dropout_U=0.2))
- mixed_model.add(Dense(1))
- mixed_model.add(Activation('sigmoid'))
- mixed_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
- mixed_model.fit([qn1, qn2], y=responses, batch_size=300, nb_epoch=1, verbose=1, validation_split=0.1, shuffle=True)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement