Guest User

Untitled

a guest
May 27th, 2018
76
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 0.84 KB | None | 0 0
  1. import numpy as np
  2. import keras
  3. from keras.models import Model
  4. from keras.layers import Input, Dense
  5.  
  6. def get_model1():
  7. inputs = Input(shape=(20,))
  8. x = inputs
  9. x = Dense(15)(x)
  10. x = Dense(10)(x)
  11. x = Dense(30, activation='softmax')(x)
  12. model = Model(inputs=inputs, outputs=x)
  13. return model
  14.  
  15. def main():
  16. model1 = get_model1()
  17. model1.compile(loss='categorical_crossentropy', optimizer='sgd')
  18. print(model1.summary())
  19. # Assume a scenario where user sets weights of model1 via some
  20. # pretraining strategy, such as stacked DAEs, and the size of the last
  21. # layer is inconsequential, since users typically ignore it in pretraining
  22. # Now, after it is trained, we save the weights
  23. # (just using the initial weights here)
  24. model1.save_weights('pretrained_weights.h5')
  25.  
  26. if __name__ == '__main__':
  27. main()
Add Comment
Please, Sign In to add comment