Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #### MLP
- # Input variables
- mlp_user_input = Input(shape=(1,), dtype='int32', name = 'user_input')
- mlp_item_input = Input(shape=(1,), dtype='int32', name = 'item_input')
- MLP_Embedding_User = Embedding(input_dim = n_users, output_dim = latent_dim, name = 'user_embedding', input_length=1,embeddings_regularizer = regularizers.l2(1e-5))
- MLP_Embedding_Item = Embedding(input_dim = n_prods, output_dim = latent_dim, name = 'item_embedding', input_length=1,embeddings_regularizer = regularizers.l2(1e-5))
- # Crucial to flatten an embedding vector!
- mlp_user_latent = Flatten()(MLP_Embedding_User(mlp_user_input))
- mlp_item_latent = Flatten()(MLP_Embedding_Item(mlp_item_input))
- # The 0-th layer is the concatenation of embedding layers
- vector = merge([mlp_user_latent, mlp_item_latent], mode = 'mul')
- num_layer = 3
- layers = [200,100,50]
- # MLP layers
- for idx in range(1, num_layer):
- layer = Dense(layers[idx], kernel_regularizer=regularizers.l2(0.01), activation='relu',activity_regularizer=regularizers.l1(0.01), name = 'layer%d' %idx)
- vector = layer(vector)
- # Final prediction layer
- mlp_prediction = Dense(1, activation='sigmoid', init='lecun_uniform', name = 'prediction')(vector)
- MLP = Model(input=[mlp_user_input, mlp_item_input], output=mlp_prediction)
Add Comment
Please, Sign In to add comment