Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # VAE model = encoder(+sampling) + decoder
- # build encoder model
- def encoder_model(inputs):
- x1 = Dense(intermediate_dim_1, activation='relu')(inputs)
- x2 = Dense(intermediate_dim_2, activation='relu')(x1)
- x3 = Dense(intermediate_dim_3, activation='relu')(x2)
- x4 = Dense(intermediate_dim_4, activation='relu')(x3)
- z_mean_encoded = Dense(latent_dim, name='z_mean')(x4)
- z_log_var_encoded = Dense(latent_dim, name='z_log_var')(x4)
- # instantiate encoder model
- encoder = Model(inputs, [z_mean_encoded, z_log_var_encoded], name='encoder')
- return encoder, z_mean_encoded, z_log_var_encoded
- # build decoder model
- def decoder_model():
- latent_inputs = Input(shape=(latent_dim,), name='z_sampling')
- x4 = Dense(intermediate_dim_4, activation='relu')(latent_inputs)
- x3 = Dense(intermediate_dim_3, activation='relu')(x4)
- x2 = Dense(intermediate_dim_2, activation='relu')(x3)
- x1 = Dense(intermediate_dim_1, activation='relu')(x2)
- outputs = Dense(original_dim)(x1)
- # instantiate decoder model
- decoder = Model(latent_inputs, outputs, name='decoder')
- return decoder
- def sampling(args):
- """Reparameterization trick by sampling fr an isotropic unit Gaussian.
- # Arguments:
- args (tensor): mean and log of variance of Q(z|X)
- # Returns:
- z (tensor): sampled latent vector
- """
- z_mean, z_log_var = args
- batch = K.shape(z_mean)[0]
- dim = K.int_shape(z_mean)[1] # Returns the shape of tensor or variable as a tuple of int or None entries.
- # by default, random_normal has mean=0 and std=1.0
- epsilon = K.random_normal(shape=(batch, dim))
- return z_mean + K.exp(0.5 * z_log_var) * epsilon
- def save_model(model,fName):
- ff = h5py.File(fName,'w')
- ww = model.get_weights()
- for i in range(len(ww)):
- ff.create_dataset('ww'+str(i),data=ww[i])
- ff.close()
- print(fName,'saved')
- def save_sclr(sclr,fName):
- joblib.dump(sclr,fName)
- print(fName,'saved')
- def save_x_sclr(sclr,fName):
- with open(fName,'w') as f:
- f.write(str(sclr))
- print(fName,'saved')
- if __name__ == '__main__':
- # tensorboard = TensorBoard(log_dir = "logs/{}".format(time()))
- x_trn,x_val,y_trn,y_val = train_test_split(Cp_inputs, X_all, test_size=0.2,shuffle=True,random_state=0)
- original_dim = x_trn.shape[1]
- x_trn = np.reshape(x_trn, [-1, original_dim])
- x_val = np.reshape(x_val, [-1, original_dim])
- # x_trn = x_trn.astype('float32')
- # x_val = x_val.astype('float32')
- input_shape = (original_dim, )
- inputs = Input(shape=input_shape, name='encoder_input')
- # Define Intermediate Layer Dimension and Latent layer Dimension
- intermediate_dim_1 = 128
- intermediate_dim_2 = 256
- intermediate_dim_3 = 128
- intermediate_dim_4 = 64
- latent_dim = 3
- # Define batch_size / epochs
- epochs = 2000
- batch_size = 128
- encoder, z_mean_encoded, z_log_var_encoded = encoder_model(inputs)
- # use reparameterization trick to push the sampling out as input
- # note that "output_shape" isn't necessary with the TensorFlow backend
- z_sampled = Lambda(sampling, output_shape=(latent_dim,), name='z')([z_mean_encoded, z_log_var_encoded]) # Reparameterization Trick
- decoder = decoder_model()
- # instantiate VAE model
- outputs = decoder(z_sampled) # z_sampled = sampled z from [z_mean_encoded and z_log_var_encoded]
- vae = Model(inputs, outputs, name='vae_mlp')
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement