Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- cell_size = 8
- im_size = (8, 8)
- input_size = (*im_size, 3)
- gen_params = 50
- def discriminator():
- m = keras.models.Sequential()
- m.add(keras.layers.UpSampling2D(input_shape = input_size))
- m.add(keras.layers.Conv2D(cell_size,
- 5,
- # input_shape = input_size,
- padding = 'same',
- # activation=keras.activations.selu,
- # kernel_initializer = 'lecun_normal'
- ))
- m.add(keras.layers.Dropout(0.4))
- m.add(keras.layers.BatchNormalization(momentum=0.9))
- m.add(keras.layers.LeakyReLU(alpha=0.2))
- m.add(keras.layers.Conv2D(cell_size,
- 1,
- input_shape = input_size,
- padding = 'same',
- # activation=keras.activations.selu,
- # kernel_initializer = 'lecun_normal'
- ))
- m.add(keras.layers.Dropout(0.4))
- m.add(keras.layers.BatchNormalization(momentum=0.9))
- m.add(keras.layers.LeakyReLU(alpha=0.2))
- m.add(keras.layers.Conv2D(cell_size,
- 1,
- input_shape = input_size,
- padding = 'same',
- # activation=keras.activations.selu,
- # kernel_initializer = 'lecun_normal'
- ))
- m.add(keras.layers.Dropout(0.4))
- m.add(keras.layers.BatchNormalization(momentum=0.9))
- m.add(keras.layers.LeakyReLU(alpha=0.2))
- m.add(keras.layers.Flatten())
- m.add(keras.layers.Dense(1, activation=keras.activations.sigmoid))
- return m
- def generator():
- m = keras.models.Sequential()
- m.add(keras.layers.Dense(8 * 8 * 3,
- input_dim=gen_params,
- # activation=keras.activations.selu,
- # kernel_initializer = 'lecun_normal'
- ))
- m.add(keras.layers.BatchNormalization(momentum=0.9))
- m.add(keras.layers.LeakyReLU(alpha=0.2))
- m.add(keras.layers.Dropout(0.4))
- m.add(keras.layers.Reshape((8, 8, 3)))
- m.add(keras.layers.Conv2DTranspose(4,
- 1,
- padding = 'same',
- # activation = keras.activations.selu,
- kernel_initializer = 'lecun_normal'))
- m.add(keras.layers.BatchNormalization(momentum=0.9))
- m.add(keras.layers.LeakyReLU(alpha=0.2))
- m.add(keras.layers.Dropout(0.4))
- m.add(keras.layers.Conv2DTranspose(input_size[-1],
- 5,
- padding = 'same',
- activation = keras.activations.sigmoid,
- # kernel_initializer = 'lecun_normal'
- ))
- assert np.array_equal(m.layers[-1].output_shape[1:], input_size), m.summary()
- return m
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement