Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import pandas as pd
- import matplotlib.pyplot as plt
- from tensorflow.keras.datasets import mnist
- (x_train,y_train), (x_test,y_test) = mnist.load_data()
- x_train.shape
- image_0 = x_train[0]
- plt.imshow(image_0)
- from tensorflow.keras.utils import to_categorical
- y_cat_train = to_categorical(y_train)
- y_cat_test = to_categorical(y_test)
- x_train = x_train / 255
- x_test = x_test / 255
- x_train = x_train.reshape(60000, 28, 28, 1)
- x_test = x_test.reshape(10000, 28, 28, 1)
- from tensorflow.keras.models import Sequential
- from tensorflow.keras.layers import Dense, Conv2D, MaxPool2D, Flatten
- model = Sequential()
- model.add(Conv2D(filters=32, kernel_size=(4,4),input_shape=(28,28,1),activation='relu'))
- model.add(MaxPool2D(pool_size=(2,2)))
- model.add(Flatten())
- model.add(Dense(128,activation='relu'))
- model.add(Dense(10,activation='softmax'))
- model.compile(loss='categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
- from tensorflow.keras.callbacks import EarlyStopping
- early_stop = EarlyStopping(monitor='val_loss',patience=1)
- model.fit(x_train,y_cat_train,epochs=10,validation_data=(x_test, y_cat_test), callbacks=[early_stop])
- metrics = pd.DataFrame(model.history.history)
- metrics[['loss','val_loss']].plot()
- from sklearn.metrics import classification_report,confusion_matrix
- predictions = np.argmax(model.predict(x_test), axis=1)
- print(classification_report(y_test,predictions))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement