Advertisement
rric

ml_plot_iris.py

Mar 1st, 2024
603
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.41 KB | None | 0 0
  1. # https://scikit-learn.org/0.18/auto_examples/svm/plot_iris.html
  2. # Copyright 2010-2016, scikit-learn developers
  3. # Copyright 2022 Roland Richter
  4. # used and released under BSD 3-Clause License
  5.  
  6. import numpy as np
  7. import matplotlib.pyplot as plt
  8. from matplotlib.colors import ListedColormap
  9.  
  10. from sklearn import datasets
  11. from sklearn.neighbors import KNeighborsClassifier
  12. from sklearn.linear_model import Perceptron
  13. from sklearn.tree import DecisionTreeClassifier
  14. from sklearn import svm
  15. from sklearn.neural_network import MLPClassifier
  16.  
  17. # import iris dataset, but use only two features
  18. features = [0, 2]
  19. iris = datasets.load_iris()
  20. X = iris.data[:, features]
  21. labels = iris.target
  22.  
  23. # create instances of methods, and fit data
  24. nbrs = KNeighborsClassifier(n_neighbors=5).fit(X, labels)
  25. dtree = DecisionTreeClassifier().fit(X, labels)
  26. ptron = Perceptron(shuffle=True).fit(X, labels)
  27. svm_lin = svm.LinearSVC(C=1.0).fit(X, labels)
  28. svm_rbf = svm.SVC(kernel="rbf", gamma=0.7, C=1.0).fit(X, labels)
  29. nn_mlp = MLPClassifier(
  30.     shuffle=True, hidden_layer_sizes=(20,), activation="tanh", random_state=42
  31. ).fit(X, labels)
  32.  
  33. # create a mesh to plot in
  34. h = 0.02  # step size in the mesh
  35. x_min, x_max = X[:, 0].min() - 0.3, X[:, 0].max() + 0.3
  36. y_min, y_max = X[:, 1].min() - 0.3, X[:, 1].max() + 0.3
  37. xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
  38.  
  39. # title for the plots
  40. titles = [
  41.     "k nearest neighbours",
  42.     "decision tree",
  43.     "perceptron",
  44.     "SVM with linear kernel",
  45.     "SVM with RBF kernel",
  46.     "multi-layer perceptron",
  47. ]
  48.  
  49. colormap = ListedColormap(["red", "green", "blue"])
  50.  
  51. for i, clf in enumerate((nbrs, dtree, ptron, svm_lin, svm_rbf, nn_mlp)):
  52.     # Plot the decision boundary. For that, we will assign a color to each
  53.     # point in the mesh [x_min, x_max]x[y_min, y_max].
  54.     plt.subplot(2, 3, i + 1)
  55.     plt.subplots_adjust(wspace=0.3, hspace=0.4)
  56.  
  57.     Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
  58.  
  59.     # put the result into a color plot
  60.     Z = Z.reshape(xx.shape)
  61.     plt.contourf(xx, yy, Z, cmap=colormap, alpha=0.3)
  62.  
  63.     # plot the training points
  64.     plt.scatter(X[:, 0], X[:, 1], s=4.0, c=labels, cmap=colormap)
  65.     plt.xlabel(iris.feature_names[features[0]])
  66.     plt.ylabel(iris.feature_names[features[1]])
  67.     plt.xlim(xx.min(), xx.max())
  68.     plt.ylim(yy.min(), yy.max())
  69.     plt.xticks(())
  70.     plt.yticks(())
  71.     plt.title(titles[i])
  72.  
  73. plt.show()
  74.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement