Advertisement
Guest User

Untitled

a guest
Jun 17th, 2019
65
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.35 KB | None | 0 0
  1. claim_input = Input(shape=(train_max_length,), name='claim_input')
  2. claim_emb = Embedding(output_dim=50, input_dim=num_words,
  3. input_length=train_max_length)(claim_input)
  4. claim_out = LSTM(128, dropout=0.3, recurrent_dropout=0.3,
  5. kernel_regularizer=regularizers.l2(0.01))(claim_emb)
  6.  
  7. evidence_input = Input(shape=(evid_max_length,), name='evidence_input')
  8. evidence_emb = Embedding(output_dim=50, input_dim=num_words,
  9. input_length=evid_max_length)(evidence_input)
  10. evidence_out = LSTM(128, dropout=0.3, recurrent_dropout=0.3,
  11. kernel_regularizer=regularizers.l2(0.01))(evidence_emb)
  12.  
  13. x = concatenate([claim_out, evidence_out])
  14. x = Dense(1, activation='sigmoid')(x)
  15. sum_model = Model(inputs=[claim_input , evidence_input], outputs=[x])
  16. # summarize layers
  17. print(sum_model.summary())
  18.  
  19. optimizer = Adam(lr=1e-3)
  20. sum_model.compile(loss='binary_crossentropy',
  21. optimizer=optimizer,
  22. metrics=['accuracy'])
  23. earlystopping = EarlyStopping(monitor='val_loss', patience=3, verbose=1)
  24. lr_reduction = ReduceLROnPlateau(monitor='val_loss',
  25. factor=0.1, min_lr=1e-5, patience=0,
  26. verbose=1)
  27. callbacks = [
  28. earlystopping,
  29. checkpoint,
  30. lr_reduction
  31. ]
  32.  
  33. sum_model.fit([claim_input , evidence_input],
  34. new_label,
  35. epochs=5,
  36. batch_size=128,
  37. steps_per_epoch=6000)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement