Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import keras
- from keras import backend as K
- from keras.layers.core import Dense, Activation
- from keras.optimizers import Adam
- from keras.metrics import mean_squared_error
- from keras.preprocessing.image import ImageDataGenerator
- from keras.preprocessing import image
- from keras.models import Model
- from keras.applications import imagenet_utils
- from keras.layers import Dense, GlobalAveragePooling2D
- from keras.applications import MobileNetV2
- from keras.applications.mobilenet import preprocess_input
- import numpy as np
- from IPython.display import Image
- from keras.optimizers import Adam
- # imports the mobilenet model and discards the last 1000 neuron layer.
- base_model = MobileNetV2(weights='imagenet', include_top=False)
- x = base_model.output
- x = GlobalAveragePooling2D()(x)
- # we add dense layers so that the model can learn more complex functions and classify for better results.
- x = Dense(512, activation='relu')(x) # dense layer 3
- preds = Dense(1, activation='relu')(x) # final layer with softmax activation
- base_model.compile(optimizer='Adam', loss='mean_squared_error')
- base_model.summary()
- # https://towardsdatascience.com/transfer-learning-using-mobilenet-and-keras-c75daf7ff299
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement