Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- seq = concat(embeddings)
- cshape = (config.doc_size, sum(f.output_dim for f in features))
- seq = Reshape((1,)+cshape)(seq)
- convLayers = []
- for filter_size, filter_num in zip(config.filter_sizes, config.filter_nums):
- seq2 = Convolution2D(
- filter_num,
- filter_size,
- cshape[1],
- border_mode='valid',
- activation='relu',
- dim_ordering='th'
- )(seq)
- seq2 = MaxPooling2D(
- pool_size=(config.doc_size-filter_size+1, 1),
- dim_ordering='th'
- )(seq2)
- # seq2 = Flatten()(seq2)
- convLayers.append(seq2)
- #inz = lambda shape,dtype:hiddenLayerInit.initilize(data.train.get_targets())
- seq = concat(convLayers)
- if config.drop_prob:
- seq = Dropout(config.drop_prob)(seq)
- for s in config.hidden_sizes:
- seq = Dense(s, activation='relu')(seq)
- #need reshaping here
- seq = Reshape((200,3))(seq)
- # drop1 = SpatialDropout1D(0.3)(inputs)
- # print("The shape is ",seq.)
- sent_lstm = Bidirectional(GRU(100, name='blstm_1',
- activation='tanh',
- recurrent_activation='hard_sigmoid',
- recurrent_dropout=0.0,
- dropout=0.4,
- kernel_initializer='glorot_uniform',
- return_sequences=True),
- merge_mode='concat')(seq)
- sent_att_layer = AttentionWithContext()(sent_lstm)
- # sentence_input = Input(shape=(MAX_SENT_LEN,),
- # dtype='float')
- # input_layer = Input(shape=(MAX_SEQ_LEN,config.doc_size),
- # dtype='int32')
- input_layer = Input(shape=(MAX_SEQ_LEN,config.doc_size),
- dtype='int32',name="input_2")
- sentEncoder = Model(inputs[0], sent_att_layer)
- textEncoder = TimeDistributed(sentEncoder)(input_layer)
- drop2 = Dropout(0.4)(textEncoder)
- lstm_1 = Bidirectional(GRU(100, name='blstm_2',
- activation='tanh',
- recurrent_activation='hard_sigmoid',
- recurrent_dropout=0.0,
- dropout=0.4,
- kernel_initializer='glorot_uniform',
- return_sequences=True),
- merge_mode='concat')(drop2)
- lstm_1 = BatchNormalization()(lstm_1)
- att_layer = AttentionWithContext()(lstm_1)
- drop3 = Dropout(0.5)(att_layer)
- print("sequence printed")
- out = Dense(
- data.documents.target_dim, init=my_init,
- W_regularizer=W_regularizer(config),
- activation='sigmoid'
- )(drop3)
- model = Model(input=input_layer, output=out)
- # predictions = Dense(21, activation='sigmoid')(drop3)
- # model = Model(inputs=input_layer, outputs=predictions)
- # for lay in model.layers:
- # print(lay)
- if config.verbosity != 0:
- logging.info(model.summary())
- optimizer = get_optimizer(config)
- model.compile(
- loss='binary_crossentropy',
- optimizer=optimizer,
- #metrics=['accuracy', f1, prec, rec]
- metrics=['accuracy']
- )
- early_stopping = EarlyStopping(monitor='val_loss', patience=6, verbose=0, mode='auto')
- weights, results = [], {}
- callbacks = [
- EpochTimer(),
- early_stopping,
- # WeightStore(weights),
- # document_evaluator(data.train, label='train', results=results),
- evaluator(data.devel,model, label='devel', results=results)
- ]
- # if config.test:
- # callbacks.append(document_evaluator(data.test, label='test',
- # results=results))
- hist = model.fit(
- data.train.documents.inputs,
- data.train.documents.targets,
- validation_data=(
- data.devel.documents.inputs,
- data.devel.documents.targets,
- ),
- batch_size=config.batch_size,
- nb_epoch=config.epochs,
- callbacks=callbacks,
- verbose=config.verbosity
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement