Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- K.set_image_dim_ordering('th')
- model = Sequential()
- #first set of CONV => CONV => CONV => LReLU => MAXPOOL
- model.add(Convolution2D(64, kernel_size=(3, 3), padding="same", data_format='channels_first', input_shape = (d, h, w), kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1) ))
- model.add(LeakyReLU(alpha=alp))
- model.add(Convolution2D(64, kernel_size=(3, 3), border_mode="same", data_format='channels_first', input_shape = (64, 33, 33), kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1) ))
- model.add(LeakyReLU(alpha=alp))
- model.add(Convolution2D(64, kernel_size=(3, 3), border_mode="same", data_format='channels_first', input_shape = (64, 33, 33), kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1) ))
- model.add(LeakyReLU(alpha=alp))
- model.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))
- #second set of CONV => CONV => CONV => LReLU => MAXPOOL
- model.add(Convolution2D(128, kernel_size=(3, 3), border_mode="same", data_format='channels_first', input_shape = (64, 16, 16), kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1) ))
- model.add(LeakyReLU(alpha=alp))
- model.add(Convolution2D(128, kernel_size=(3, 3), border_mode="same", data_format='channels_first', input_shape = (128, 16, 16), kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1) ))
- model.add(LeakyReLU(alpha=alp))
- model.add(Convolution2D(128, kernel_size=(3, 3), border_mode="same", data_format='channels_first', input_shape = (128, 16, 16), kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1) ))
- model.add(LeakyReLU(alpha = alp))
- model.add(MaxPooling2D(pool_size=(3, 3), strides=(2, 2)))
- #Fully connected layers
- # FC => LReLU => FC => LReLU
- model.add(Flatten())
- model.add(Dense(256, kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1)))
- model.add(LeakyReLU(alp))
- model.add(Dropout(dropout))
- model.add(Dense(256, kernel_initializer = 'glorot_normal', bias_initializer=constant(0.1)))
- model.add(LeakyReLU(alp))
- model.add(Dropout(dropout))
- # FC => SOFTMAX
- model.add(Dense(classes, kernel_initializer = 'glorot_normal', bias_initializer = constant(0.1)))
- model.add(Activation("softmax"))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement