Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import random
- import numpy as np # linear algebra
- import pathlib
- import tensorflow as tf
- from glob import glob
- import matplotlib.pyplot as plt
- import os
- import PIL
- from tensorflow import keras
- from tensorflow.keras import layers
- from tensorflow.keras.models import Sequential
- import pandas as pd
- import matplotlib.pyplot as plt
- from shutil import copyfile
- from tensorflow.keras import activations
- from keras import optimizers
- trainpath = "C:/Users/makci/Desktop/rentgen/dataset_python/"
- batch_size = 32
- img_height = 150
- img_width = 150
- image_size = (150, 150)
- def ds_gen(image_size, batch_size, type_ds):
- return tf.keras.preprocessing.image_dataset_from_directory(
- trainpath,
- labels='inferred',
- color_mode='rgb',
- # label_mode='categorical',
- validation_split=0.2,
- subset=type_ds,
- seed=1488,
- interpolation='bilinear',
- image_size=image_size,
- batch_size=batch_size,
- )
- train_ds = ds_gen(image_size ,batch_size , "training")
- val_ds = ds_gen(image_size, batch_size, "validation")
- class_names = train_ds.class_names
- print(class_names)
- AUTOTUNE = tf.data.experimental.AUTOTUNE
- train_ds = train_ds.cache().shuffle(30000).prefetch(buffer_size=AUTOTUNE)
- val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
- data_augmentation = keras.Sequential(
- [
- # layers.experimental.preprocessing.RandomFlip("horizontal_and_vertical",
- # input_shape=(img_height,
- # img_width,
- # 3)),
- layers.experimental.preprocessing.RandomRotation(0.1, input_shape=(img_height,
- img_width,
- 3)),
- layers.experimental.preprocessing.RandomZoom(0.1),
- ]
- )
- def model_gen(num_classes, dense, drop):
- return Sequential([
- data_augmentation,
- layers.experimental.preprocessing.Rescaling(1. / 255),
- layers.Conv2D(8, 3, padding='same', activation='relu'),
- layers.MaxPooling2D(),
- layers.Conv2D(16, 3, padding='same', activation='relu'),
- layers.MaxPooling2D(),
- layers.Conv2D(32, 3, padding='same', activation='relu'),
- layers.MaxPooling2D(),
- layers.Conv2D(64, 3, padding='same', activation='relu'),
- layers.MaxPooling2D(),
- # layers.Dropout(0.2),
- # layers.Conv2D(256, 3, padding='same', activation='relu'),
- # layers.MaxPooling2D(),
- layers.Dropout(drop),
- layers.Flatten(),
- layers.Dense(dense, activation=activations.relu),
- layers.Dense(num_classes)
- ])
- neurons = 2048
- dropout = 0.2
- model = model_gen(len(class_names), neurons, dropout)
- model.compile(optimizer='adam',
- loss=tf.keras.losses.SparseCategoricalCrossentropy(
- from_logits=True, reduction="auto", name="sparse_categorical_crossentropy"),
- metrics=['accuracy'])
- model.summary()
- history = model.fit(
- train_ds,
- validation_data=val_ds,
- epochs=20
- )
- pd.DataFrame(model.history.history).plot()
- plt.show()
- test_loss, test_acc = model.evaluate(val_ds)
- print('\nTest accuracy:', test_acc)
- # model.save("C:/Users/makci/Desktop/bestmodel")
- # model = keras.models.load_model("C:/Users/makci/Desktop/bestmodel")
- # loss, acc = model.evaluate(train_ds, verbose=2)
- # print("Restored model, accuracy: {:5.2f}%".format(100 * acc))
- #
- #
- # def predicting(image_val):
- # img = keras.preprocessing.image.load_img(
- # image_val, target_size=(20, 20)
- # )
- # img_array = keras.preprocessing.image.img_to_array(img)
- # img_array = tf.expand_dims(img_array, 0) # Create a batch
- #
- # predictions = model.predict(img_array)
- # score = tf.nn.softmax(predictions[0])
- #
- # return class_names[np.argmax(score)]
- # # return (
- # # " : {} {:.2f} % confidence."
- # # .format(class_names[np.argmax(score)], 100 * np.max(score))
- # # )
- #
- # directory = "C:/Users/makci/Desktop/dataset/test_set/avia-test/"
- # for filename in os.listdir(directory):
- # if filename.endswith(".jpg") or filename.endswith(".png"):
- # path = os.path.join(directory, filename)
- # score = predicting(path)
- # copyfile(path, "C:/Users/makci/Desktop/dataset/test_set/" + score + "/" + filename)
- # else:
- # continue
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement