Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Since I cannot directly view the image, I will assume it is a decision boundary plot for a decision tree.
- # The following code will generate a decision boundary plot similar to what might be in the user's image.
- import numpy as np
- import matplotlib.pyplot as plt
- from sklearn import datasets
- from sklearn.tree import DecisionTreeClassifier
- # Load the iris dataset
- iris = datasets.load_iris()
- X = iris.data[:, 2:] # we only take the last two features for easy visualization.
- y = iris.target
- # Train a DecisionTreeClassifier
- tree_clf = DecisionTreeClassifier(max_depth=2, random_state=42)
- tree_clf.fit(X, y)
- # Create a mesh grid for plotting decision boundary
- x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
- y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
- xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1),
- np.arange(y_min, y_max, 0.1))
- # Predict class for each point in the mesh grid
- Z = tree_clf.predict(np.c_[xx.ravel(), yy.ravel()])
- Z = Z.reshape(xx.shape)
- # Plotting the decision boundary
- plt.figure(figsize=(8, 6))
- plt.contourf(xx, yy, Z, alpha=0.3)
- # Plot the training points
- plt.scatter(X[:, 0], X[:, 1], c=y, s=50, edgecolor='k')
- plt.title("Decision surface of a decision tree using paired features")
- plt.xlabel('Feature 1')
- plt.ylabel('Feature 2')
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement