Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- from sklearn.model_selection import train_test_split
- from sklearn.preprocessing import OneHotEncoder
- from sklearn.metrics import classification_report
- import matplotlib.pyplot as plt
- # Function to implement a single-layer neural network
- class SingleLayerNeuralNetwork:
- def __init__(self, input_size, hidden_size, output_size):
- # Initialize weights and biases
- self.weights1 = np.random.rand(input_size, hidden_size)
- self.weights2 = np.random.rand(hidden_size, output_size)
- self.bias1 = np.random.rand(hidden_size)
- self.bias2 = np.random.rand(output_size)
- def sigmoid(self, x):
- return 1 / (1 + np.exp(-x))
- def sigmoid_derivative(self, x):
- return x * (1 - x)
- def forward(self, x):
- self.hidden = self.sigmoid(np.dot(x, self.weights1) + self.bias1)
- output = self.sigmoid(np.dot(self.hidden, self.weights2) + self.bias2)
- return output
- def backward(self, x, y, output, learning_rate):
- error = y - output
- d_output = error * self.sigmoid_derivative(output)
- error_hidden = d_output.dot(self.weights2.T)
- d_hidden = error_hidden * self.sigmoid_derivative(self.hidden)
- # Update weights and biases
- self.weights1 += x.T.dot(d_hidden) * learning_rate
- self.weights2 += self.hidden.T.dot(d_output) * learning_rate
- self.bias1 += np.sum(d_hidden, axis=0) * learning_rate
- self.bias2 += np.sum(d_output, axis=0) * learning_rate
- def train(self, x, y, epochs, learning_rate):
- for epoch in range(epochs):
- output = self.forward(x)
- self.backward(x, y, output, learning_rate)
- # Preparing the data
- iris_data = np.array(iris_data)
- X = iris_data[:, 0:2] # features
- y = iris_data[:, 2] # labels
- # One-hot encode the labels
- encoder = OneHotEncoder(sparse=False)
- y_encoded = encoder.fit_transform(y.reshape(-1, 1))
- # Splitting the dataset into training and test sets
- X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.2, random_state=42)
- # Neural Network parameters
- input_size = 2 # two features
- hidden_size = 5 # arbitrary choice
- output_size = 3 # three classes
- # Initialize the neural network
- nn = SingleLayerNeuralNetwork(input_size, hidden_size, output_size)
- # Train the neural network
- nn.train(X_train, y_train, epochs=1000, learning_rate=0.01)
- # Predictions for the test set
- y_pred = np.array([np.argmax(nn.forward(x)) for x in X_test])
- # Evaluation
- print(classification_report(y_test.argmax(axis=1), y_pred))
- # Plotting classification map
- def plot_classification_map(nn, X, y):
- x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
- y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
- h = 0.01
- xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
- Z = np.array([np.argmax(nn.forward(np.array([xx.ravel()[i], yy.ravel()[i]]))) for i in range(xx.ravel().shape[0])])
- Z = Z.reshape(xx.shape)
- plt.figure(figsize=(10, 6))
- plt.contourf(xx, yy, Z, alpha=0.8)
- plt.scatter(X[:, 0], X[:, 1], c=y, edgecolors='k', marker='o')
- plt.xlabel('Feature 1')
- plt.ylabel('Feature 2')
- plt.title('Classification Map of Iris Dataset')
- plt.show()
- plot_classification_map(nn, X, y)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement