# Untitled

Dec 26th, 2023
17
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. # Since I cannot directly view the image, I will assume it is a decision boundary plot for a decision tree.
2. # The following code will generate a decision boundary plot similar to what might be in the user's image.
3.
4. import numpy as np
5. import matplotlib.pyplot as plt
6. from sklearn import datasets
7. from sklearn.tree import DecisionTreeClassifier
8.
9. # Load the iris dataset
11. X = iris.data[:, 2:] # we only take the last two features for easy visualization.
12. y = iris.target
13.
14. # Train a DecisionTreeClassifier
15. tree_clf = DecisionTreeClassifier(max_depth=2, random_state=42)
16. tree_clf.fit(X, y)
17.
18. # Create a mesh grid for plotting decision boundary
19. x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
20. y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
21. xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1),
22. np.arange(y_min, y_max, 0.1))
23.
24. # Predict class for each point in the mesh grid
25. Z = tree_clf.predict(np.c_[xx.ravel(), yy.ravel()])
26. Z = Z.reshape(xx.shape)
27.
28. # Plotting the decision boundary
29. plt.figure(figsize=(8, 6))
30. plt.contourf(xx, yy, Z, alpha=0.3)
31.
32. # Plot the training points
33. plt.scatter(X[:, 0], X[:, 1], c=y, s=50, edgecolor='k')
34. plt.title("Decision surface of a decision tree using paired features")
35. plt.xlabel('Feature 1')
36. plt.ylabel('Feature 2')
37. plt.show()
38.