Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from keras import backend as K
- from keras.layers import Layer
- class G_MAX_AVP_P(Layer):
- def __init__(self, **kwargs):
- #self.output_dim = input_shape[1]*input_shape[1]
- super(G_MAX_AVP_P, self).__init__(**kwargs)
- def build(self, input_shape):
- super(G_MAX_AVP_P, self).build(input_shape) # Be sure to call this at the end
- def call(self, x):
- return K.reshape(K.dot(K.transpose(x),x), [-1])
- def compute_output_shape(self, input_shape):
- return (input_shape[0], self.input_shape[0]*self.input_shape[0])
- class MULTI_ACTIVATION:
- @staticmethod
- def build(input_shape,classes):
- # we initialize the model
- input_tensor = Input(shape=input_shape)
- #G_MAX_AVP_P = G_MAX_AVP_P(input_shape)
- # Conv Block 1
- x = Conv2D(64, (8, 8), input_shape=input_shape, padding='same')(input_tensor)
- x = BatchNormalization()(x)
- x = Activation('relu')(x)
- x = Conv2D(64, (8, 8), padding='same')(x)
- x = BatchNormalization()(x)
- x = Activation('relu')(x)
- x = Conv2D(64, (8, 8), padding='same')(x)
- x = BatchNormalization()(x)
- x = Activation('relu')(x)
- # Conv Block 2
- x = Conv2D(128, (5, 5), padding='same')(x)
- x = BatchNormalization()(x)
- x = LeakyReLU(alpha=0.2)(x)
- # Conv Block 3
- x = Conv2D(256, (3, 3), padding='same')(x)
- x = BatchNormalization()(x)
- x = PReLU()(x)
- # Conv Block 4
- x = Conv2D(128, (5, 5), padding='same')(x)
- x = BatchNormalization()(x)
- x = ELU(alpha=0.3)(x)
- x = GlobalAveragePooling2D()(x)
- x = G_MAX_AVP_P()(x)
- output_tensor = Dense(units=classes, activation='softmax')(x)
- model = Model(input_tensor, output_tensor)
- model.summary()
- return model
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement