1"""
2=======================================================================
3Plot the decision surface of decision trees trained on the iris dataset
4=======================================================================
5
6Plot the decision surface of a decision tree trained on pairs
7of features of the iris dataset.
8
9See :ref:`decision tree <tree>` for more information on the estimator.
10
11For each pair of iris features, the decision tree learns decision
12boundaries made of combinations of simple thresholding rules inferred from
13the training samples.
14
15We also show the tree structure of a model built on all of the features.
16"""
17# %%
18# First load the copy of the Iris dataset shipped with scikit-learn:
19from sklearn.datasets import load_iris
20
21iris = load_iris()
22
23
24# %%
25# Display the decision functions of trees trained on all pairs of features.
26import numpy as np
27import matplotlib.pyplot as plt
28from sklearn.tree import DecisionTreeClassifier
29
30# Parameters
31n_classes = 3
32plot_colors = "ryb"
33plot_step = 0.02
34
35
36for pairidx, pair in enumerate([[0, 1], [0, 2], [0, 3], [1, 2], [1, 3], [2, 3]]):
37    # We only take the two corresponding features
38    X = iris.data[:, pair]
39    y = iris.target
40
41    # Train
42    clf = DecisionTreeClassifier().fit(X, y)
43
44    # Plot the decision boundary
45    plt.subplot(2, 3, pairidx + 1)
46
47    x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
48    y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
49    xx, yy = np.meshgrid(
50        np.arange(x_min, x_max, plot_step), np.arange(y_min, y_max, plot_step)
51    )
52    plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)
53
54    Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
55    Z = Z.reshape(xx.shape)
56    cs = plt.contourf(xx, yy, Z, cmap=plt.cm.RdYlBu)
57
58    plt.xlabel(iris.feature_names[pair[0]])
59    plt.ylabel(iris.feature_names[pair[1]])
60
61    # Plot the training points
62    for i, color in zip(range(n_classes), plot_colors):
63        idx = np.where(y == i)
64        plt.scatter(
65            X[idx, 0],
66            X[idx, 1],
67            c=color,
68            label=iris.target_names[i],
69            cmap=plt.cm.RdYlBu,
70            edgecolor="black",
71            s=15,
72        )
73
74plt.suptitle("Decision surface of decision trees trained on pairs of features")
75plt.legend(loc="lower right", borderpad=0, handletextpad=0)
76_ = plt.axis("tight")
77
78# %%
79# Display the structure of a single decision tree trained on all the features
80# together.
81from sklearn.tree import plot_tree
82
83plt.figure()
84clf = DecisionTreeClassifier().fit(iris.data, iris.target)
85plot_tree(clf, filled=True)
86plt.title("Decision tree trained on all the iris features")
87plt.show()
88