Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import pandas as pd
- import matplotlib.pyplot as plt
- from sklearn import datasets
- from tensorflow.keras.utils import to_categorical
- #Открыть набор данных в переменную
- df = pd.read_csv('vehicle.dat')
- print(df.columns)
- X = df.drop([' Class'], axis = 1)
- X.head()
- #Y = to_categorical(df[' Class'])
- Y = df[' Class']
- Y = pd.get_dummies(Y, columns=[' Class'])
- Y.head()
- #print(Y)
- from scipy.spatial.distance import pdist, squareform
- import seaborn as sns
- from sklearn.preprocessing import StandardScaler, MinMaxScaler
- data_scaled = MinMaxScaler().fit_transform(X.to_numpy())
- import tensorflow as tf
- def plot_the_loss_curve(epochs,mse):
- """График лосс-значений/эпохи"""
- plt.figure()
- plt.xlabel("Epoch")
- plt.ylabel("Mean Squared Error")
- plt.plot(epochs, mse, label="Loss")
- plt.legend()
- plt.ylim([mse.min()*0.97, mse.max()])
- plt.show()
- print("Defined the plot_the_model and plot_the_loss_curve functions.")
- def build_model(my_learning_rate, X, Y):
- """Создание классификатора"""
- model = tf.keras.models.Sequential()
- model.add(tf.keras.layers.Dense(units=10,
- input_shape=(X.shape[1],),activation='tanh'))
- model.add(tf.keras.layers.Dense(10,activation='tanh'))
- # Выходной слой с числом нейронов по числу классов
- model.add(tf.keras.layers.Dense(Y.shape[1], activation = 'softmax'))
- # Лосс-функция уже другая
- model.compile(optimizer=tf.optimizers.Adam(lr=my_learning_rate),
- loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
- metrics=['accuracy'])
- return model
- def train_model(model, feature, label, epochs, batch_size):
- """Обучение модели"""
- history = model.fit(x=feature,
- y=label,
- batch_size=batch_size,
- epochs=epochs)
- trained_weight = model.get_weights()[0]
- trained_bias = model.get_weights()[1]
- epochs = history.epoch
- hist = pd.DataFrame(history.history)
- acc = hist["accuracy"]
- return trained_weight, trained_bias, epochs, acc
- print("Defined create_model and train_model")
- my_features = data_scaled
- my_labels = Y
- learning_rate=0.05
- epochs=100
- my_batch_size= 10
- my_model = build_model(learning_rate, X, Y)
- trained_weight, trained_bias, epochs, acc = train_model(my_model, my_features,
- my_labels, epochs,
- my_batch_size)
- from sklearn.datasets import fetch_openml
- from sklearn.preprocessing import StandardScaler
- dataset = fetch_openml('har')
- X = StandardScaler().fit_transform(dataset.data)
- Y = dataset.target
- Y_new = []
- for i in range(len(Y)):
- Y_new.append(int(Y[i]))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement