Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #========================================================================================
- # Network
- runID = runID + 1
- import keras as keras
- from keras import losses
- from keras.models import Model
- from keras.optimizers import SGD
- from keras.layers import Input, Conv2D, Activation, Concatenate, Dense, LeakyReLU, BatchNormalization, AlphaDropout, Dropout, MaxPooling2D, Conv2DTranspose, Reshape, Permute, UpSampling2D
- from keras.utils.vis_utils import plot_model
- import tensorflow as tf
- from keras.utils import multi_gpu_model
- initializer = 'he_uniform'
- input_img = Input(shape=in_shape, name='input_img')
- # Encoder
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV0')(input_img)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV1')(encoder)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- POOL1 = MaxPooling2D((2, 2), name='POOL1') (encoder)
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV2')(POOL1)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- POOL2 = MaxPooling2D((2, 2), name='POOL2') (encoder)
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV3')(POOL2)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- POOL3 = MaxPooling2D((2, 2), name='POOL3') (encoder)
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV4')(POOL3)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- POOL4 = MaxPooling2D((2, 2), name='POOL4') (encoder)
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV5')(POOL4)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- encoder = MaxPooling2D((2, 2), name='POOL5') (encoder)
- encoder = Conv2D(48, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='ENC_CONV6')(encoder)
- encoder = LeakyReLU(alpha=0.1)(encoder)
- # Decoder
- decoder = UpSampling2D((2, 2), name='UPSAMPLE5')(encoder)
- decoder = keras.layers.concatenate([decoder,POOL4], name='CONCAT5')
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV5A')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV5B')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = UpSampling2D((2, 2), name='UPSAMPLE4')(decoder)
- decoder = keras.layers.concatenate([decoder,POOL3], name='CONCAT4')
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV4A')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV4B')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = UpSampling2D((2, 2), name='UPSAMPLE3')(decoder)
- decoder = keras.layers.concatenate([decoder,POOL2], name='CONCAT3')
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV3A')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV3B')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = UpSampling2D((2, 2), name='UPSAMPLE2')(decoder)
- decoder = keras.layers.concatenate([decoder,POOL1], name='CONCAT2')
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV2A')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = Conv2D(96, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV2B')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = UpSampling2D((2, 2), name='UPSAMPLE1')(decoder)
- decoder = keras.layers.concatenate([decoder,input_img], name='CONCAT1')
- decoder = Conv2D(64, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV1A')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = Conv2D(32, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV1B')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- decoder = Conv2D(1, kernel_size=3, strides=1, padding='same', kernel_initializer=initializer, name='DEC_CONV1C')(decoder)
- decoder = LeakyReLU(alpha=0.1)(decoder)
- with tf.device('/cpu:0'):
- model = Model(input_img, decoder)
- parallel_model = multi_gpu_model(model, gpus=2)
- # Optimizer:
- optimizer = keras.optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.99, epsilon=1e-8)
- # Compile:
- parallel_model.compile(loss=losses.mean_squared_logarithmic_error, optimizer=optimizer)
- parallel_model.summary()
- #plot_model(model, to_file='./Graphs/'+str(runID)+'.png', show_shapes=True, show_layer_names=True)
- # Hyperparameters
- batch_size = 128
- epochs = 100
- validation_split = 0.2
- runID = runID + 1
- # Tensorboard
- tbCallBack = keras.callbacks.TensorBoard(log_dir='./Graphs/'+str(runID), histogram_freq=0, write_graph=True, write_images=True)
- # Modify inputs
- x_train = x
- y_train = y
- history = parallel_model.fit(x_train, y_train, callbacks=[tbCallBack], epochs=epochs, batch_size=batch_size, validation_split=validation_split)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement