Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- model = Sequential()
- dim = 28
- nclasses = 10
- model.add(Conv2D(filters=32, kernel_size=(5,5), padding='same', activation='relu', input_shape=(dim,dim,1)))
- model.add(Conv2D(filters=32, kernel_size=(5,5), padding='same', activation='relu',))
- model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))
- model.add(Dropout(0.2))
- model.add(Conv2D(filters=64, kernel_size=(5,5), padding='same', activation='relu'))
- model.add(Conv2D(filters=64, kernel_size=(5,5), padding='same', activation='relu'))
- model.add(MaxPool2D(pool_size=(2,2), strides=(2,2)))
- model.add(Dropout(0.2))
- model.add(Flatten())
- model.add(Dense(120, activation='relu'))
- model.add(Dense(84, activation='relu'))
- model.add(Dense(nclasses, activation='softmax'))
- opt = SGD(lr=0.001)
- reduce_lr = ReduceLROnPlateau(monitor='val_acc', factor=0.9, patience=25, min_lr=0.000001, verbose=1)
- model.compile(optimizer=opt, loss="categorical_crossentropy", metrics=["accuracy"])
- history = model.fit(x=x_train,
- y=y_train,
- batch_size=10,
- epochs=1,
- verbose=1,
- callbacks=[reduce_lr],
- validation_data=(x_val,y_val),
- shuffle=True)
- plt.plot(history.history['val_acc'])
- plt.plot(history.history['lr'])
- plt.title('Plot of overall accuracy to larning rate for SGD optimizer')
- plt.ylabel('accuracy')
- plt.xlabel('learning rate')
- plt.legend(['x_train', 'x_test'], loc='upper right')
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement