Advertisement
Guest User

Untitled

a guest
Oct 22nd, 2018
67
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.46 KB | None | 0 0
  1. def get_siamese_model(input_shape):
  2.  
  3.     left_input = Input(shape=input_shape)
  4.  
  5.     right_input = Input(shape=input_shape)
  6.  
  7.     model = Sequential()
  8.  
  9.     model.add(Embedding(input_dim=max_token, output_dim=512, input_shape=input_shape))
  10.  
  11.     model.add(Conv2D(64, (10,10), name="conv1", activation='relu',
  12.  
  13.                    kernel_initializer=initialize_weights, kernel_regularizer=l2(2e-4)))
  14.  
  15.     model.add(MaxPooling2D())
  16.  
  17.     model.add(Conv2D(128, (7,7), name="conv2", activation='relu', kernel_initializer=initialize_weights,
  18.  
  19.                      bias_initializer=initialize_bias, kernel_regularizer=l2(2e-4)))
  20.  
  21.     model.add(MaxPooling2D())
  22.  
  23.     model.add(Conv2D(128, (4,4), name="conv3", activation='relu', kernel_initializer=initialize_weights,
  24.  
  25.                      bias_initializer=initialize_bias, kernel_regularizer=l2(2e-4)))
  26.  
  27.     model.add(Flatten())
  28.  
  29.     model.add(Dense(4096, activation='sigmoid',
  30.  
  31.                    kernel_regularizer=l2(1e-3),
  32.  
  33.                    kernel_initializer=initialize_weights,bias_initializer=initialize_bias))
  34.  
  35.     encoded_l = model(left_input)
  36.  
  37.     encoded_r = model(right_input)
  38.  
  39.     L1_layer = Lambda(lambda tensors:K.abs(tensors[0] - tensors[1]))
  40.  
  41.     L1_distance = L1_layer([encoded_l, encoded_r])
  42.  
  43.     prediction = Dense(1,activation='sigmoid',bias_initializer=initialize_bias)(L1_distance)
  44.  
  45.     siamese_net = Model(inputs=[left_input,right_input],outputs=prediction)
  46.  
  47.     return siamese_net
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement