Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from keras.layers import Dense, Dropout, Activation,Lambda,Input,LSTM
- from keras.layers import Conv1D, MaxPooling1D,Flatten,TimeDistributed,Reshape
- from keras.models import Model
- import keras
- # =============================================================================
- #Spatial Part
- #conv1d for temperature.......>
- #concatente
- #con1d for pressure .......>
- # =============================================================================
- # Conv1D Model 1
- pnnl_temp=Input(shape=(200,1))
- connv_temp1=Conv1D(filters=2,kernel_size=(10),strides=2,padding="valid" ,activation="relu")(pnnl_temp)
- conv_maxpooling1=MaxPooling1D(pool_size=3,strides=1)(connv_temp1)
- connv_temp2=Conv1D(filters=1,kernel_size=(10),strides=2,padding="valid" ,activation="relu")(conv_maxpooling1)
- conv_maxpooling2=MaxPooling1D(pool_size=2,strides=None)(connv_temp2)
- conv_maxpooling2_size=conv_maxpooling2.get_shape().as_list()[-1]*
- conv_maxpooling2.get_shape().as_list()[-2] # find the number of elements in tensor
- conv_flatter_temp=Reshape((conv_maxpooling2_size,1))(conv_maxpooling2) #flatten layer returns (?,?)as dimension
- # Conv1D Model 2
- pnnl_pressure=Input(shape=(200,1))
- connv_pressure1=Conv1D(filters=2,kernel_size=(10),strides=2,padding="valid" ,activation="relu")(pnnl_pressure)
- conv_maxpooling_pressure1=MaxPooling1D(pool_size=3,strides=1)(connv_pressure1)
- connv_pressure2=Conv1D(filters=1,kernel_size=(10),strides=2,padding="valid" ,activation="relu")(conv_maxpooling_pressure1)
- conv_maxpooling_pressure2=MaxPooling1D(pool_size=2,strides=None)(connv_pressure2)
- conv_maxpooling2_size_pressure=conv_maxpooling_pressure2.get_shape().as_list()[-1]*
- conv_maxpooling_pressure2.get_shape().as_list()[-2]
- conv_flatter_pressure=Reshape((conv_maxpooling2_size,1))(conv_maxpooling_pressure2)
- # Merge Conv1D 1&2
- output = keras.layers.concatenate([conv_flatter_pressure, conv_flatter_temp], axis=1)
- spatial_model=Model([pnnl_temp,pnnl_pressure],output)
- #=============================================================================
- # temporal part
- #x1.....>
- #spatial_model ....> time distributed layer .....>lstm ......
- #x2....>
- # =============================================================================
- x1 = Input(shape=(224, 200, 1))
- x2 = Input(shape=(224, 200, 1))
- new_input=keras.layers.concatenate([x1,x2],axis=3)
- encoded_frame_sequence = TimeDistributed(Lambda(lambda x:spatial_model([x[:,:,0:1],x[:,:,1:]] )))(new_input) # used lambda to allow multiple input for TimeDistributed
- new_encoded_frame_sequence=Reshape((224,42))(encoded_frame_sequence)
- lastm_1=LSTM(52)(new_encoded_frame_sequence)
- Temporal_model =Model([x1,x2],lastm_1)
- Layer (type) Output Shape Param # Connected to
- ==================================================================================================
- input_11 (InputLayer) (None, 224, 200, 1) 0
- __________________________________________________________________________________________________
- input_12 (InputLayer) (None, 224, 200, 1) 0
- __________________________________________________________________________________________________
- concatenate_6 (Concatenate) (None, 224, 200, 2) 0 input_11[0][0]
- input_12[0][0]
- __________________________________________________________________________________________________
- time_distributed_4 (TimeDistrib (None, 224, 42, 1) 0 concatenate_6[0][0]
- __________________________________________________________________________________________________
- reshape_9 (Reshape) (None, 224, 42) 0 time_distributed_4[0][0]
- __________________________________________________________________________________________________
- lstm_4 (LSTM) (None, 52) 19760 reshape_9[0][0]
- ==================================================================================================
- Total params: 19,760
- Trainable params: 19,760
- Non-trainable params: 0
- __________________________________________________________________________________________________
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement