Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # https://scikit-learn.org/0.18/auto_examples/svm/plot_iris.html
- # Copyright 2010-2016, scikit-learn developers
- # Copyright 2022 Roland Richter
- # used and released under BSD 3-Clause License
- import numpy as np
- import matplotlib.pyplot as plt
- from matplotlib.colors import ListedColormap
- from sklearn import datasets
- from sklearn.neighbors import KNeighborsClassifier
- from sklearn.linear_model import Perceptron
- from sklearn.tree import DecisionTreeClassifier
- from sklearn import svm
- from sklearn.neural_network import MLPClassifier
- # import iris dataset, but use only two features
- features = [0, 2]
- iris = datasets.load_iris()
- X = iris.data[:, features]
- labels = iris.target
- # create instances of methods, and fit data
- nbrs = KNeighborsClassifier(n_neighbors=5).fit(X, labels)
- dtree = DecisionTreeClassifier().fit(X, labels)
- ptron = Perceptron(shuffle=True).fit(X, labels)
- svm_lin = svm.LinearSVC(C=1.0).fit(X, labels)
- svm_rbf = svm.SVC(kernel="rbf", gamma=0.7, C=1.0).fit(X, labels)
- nn_mlp = MLPClassifier(
- shuffle=True, hidden_layer_sizes=(20,), activation="tanh", random_state=42
- ).fit(X, labels)
- # create a mesh to plot in
- h = 0.02 # step size in the mesh
- x_min, x_max = X[:, 0].min() - 0.3, X[:, 0].max() + 0.3
- y_min, y_max = X[:, 1].min() - 0.3, X[:, 1].max() + 0.3
- xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
- # title for the plots
- titles = [
- "k nearest neighbours",
- "decision tree",
- "perceptron",
- "SVM with linear kernel",
- "SVM with RBF kernel",
- "multi-layer perceptron",
- ]
- colormap = ListedColormap(["red", "green", "blue"])
- for i, clf in enumerate((nbrs, dtree, ptron, svm_lin, svm_rbf, nn_mlp)):
- # Plot the decision boundary. For that, we will assign a color to each
- # point in the mesh [x_min, x_max]x[y_min, y_max].
- plt.subplot(2, 3, i + 1)
- plt.subplots_adjust(wspace=0.3, hspace=0.4)
- Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
- # put the result into a color plot
- Z = Z.reshape(xx.shape)
- plt.contourf(xx, yy, Z, cmap=colormap, alpha=0.3)
- # plot the training points
- plt.scatter(X[:, 0], X[:, 1], s=4.0, c=labels, cmap=colormap)
- plt.xlabel(iris.feature_names[features[0]])
- plt.ylabel(iris.feature_names[features[1]])
- plt.xlim(xx.min(), xx.max())
- plt.ylim(yy.min(), yy.max())
- plt.xticks(())
- plt.yticks(())
- plt.title(titles[i])
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement