Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # To combine both the decision tree visualization and the decision boundary plot in one code,
- # we will use the same dataset (iris dataset) and DecisionTreeClassifier for both.
- from sklearn.datasets import load_iris
- from sklearn.model_selection import train_test_split
- from sklearn.tree import DecisionTreeClassifier, plot_tree
- import matplotlib.pyplot as plt
- import numpy as np
- # Load the iris dataset
- iris = load_iris()
- X = iris.data
- y = iris.target
- # We will only take two features for visualization purposes
- X = X[:, 2:]
- # Split the dataset into training and testing sets
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
- # Initialize the DecisionTreeClassifier
- clf = DecisionTreeClassifier(max_depth=2, random_state=42)
- # Fit the model to the training data
- clf.fit(X_train, y_train)
- # Create a grid for plotting decision boundaries
- x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
- y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
- xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.01),
- np.arange(y_min, y_max, 0.01))
- # Predict class for each point in the mesh grid
- Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
- Z = Z.reshape(xx.shape)
- # Plot decision boundary
- plt.figure(figsize=(12, 6))
- # Plot decision tree
- plt.subplot(1, 2, 1)
- plot_tree(clf, filled=True, feature_names=iris.feature_names[2:], class_names=iris.target_names)
- plt.title('Decision Tree')
- # Plot decision boundaries
- plt.subplot(1, 2, 2)
- plt.contourf(xx, yy, Z, alpha=0.3)
- plt.scatter(X[:, 0], X[:, 1], c=y, s=50, edgecolor='k')
- plt.title("Decision Boundary")
- plt.xlabel('Petal length')
- plt.ylabel('Petal width')
- # Adjust layout
- plt.tight_layout()
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement