Дерево решений является одним из наиболее широко используемых алгоритмов машинного обучения, поскольку они просты для понимания и интерпретации, просты в использовании, универсальны и эффективны.

В этой статье мы рассмотрим различные способы визуализации дерева решений в Python.

Сначала мы создадим простое дерево решений, используя набор данных IRIS.

from sklearn.datasets import load_iris
from sklearn import tree
import matplotlib 
% matplotlib inline
iris = load_iris()
clf = tree.DecisionTreeClassifier()
clf = clf.fit(iris.data, iris.target)

sklearn.tree предоставляет функцию дерева графиков для визуализации дерева в блокноте Jupyter.

data =tree.plot_tree(clf.fit(iris.data, iris.target),filled=True)

Также вы можете сохранить визуализацию в формате graphviz как:

dotfile = open(“DT_metrics.dot”, ‘w’)
tree.export_graphviz(clf, out_file=dotfile, feature_names=iris.feature_names,class_names=iris.target_names)
dotfile.close()

Чтобы визуализировать файл .dot, скопируйте его содержимое на http://webgraphviz.com/ и сгенерируйте дерево.

sklearn.treeпредоставляет функцию export_text для получения английских правил дерева.

from sklearn.tree.export import export_text
r = export_text(clf, feature_names=iris[‘feature_names’])
print(r)
|--- petal length (cm) <= 2.45
|   |--- class: 0
|--- petal length (cm) >  2.45
|   |--- petal width (cm) <= 1.75
|   |   |--- petal length (cm) <= 4.95
|   |   |   |--- petal width (cm) <= 1.65
|   |   |   |   |--- class: 1
|   |   |   |--- petal width (cm) >  1.65
|   |   |   |   |--- class: 2
|   |   |--- petal length (cm) >  4.95
|   |   |   |--- petal width (cm) <= 1.55
|   |   |   |   |--- class: 2
|   |   |   |--- petal width (cm) >  1.55
|   |   |   |   |--- petal length (cm) <= 5.45
|   |   |   |   |   |--- class: 1
|   |   |   |   |--- petal length (cm) >  5.45
|   |   |   |   |   |--- class: 2
|   |--- petal width (cm) >  1.75
|   |   |--- petal length (cm) <= 4.85
|   |   |   |--- sepal width (cm) <= 3.10
|   |   |   |   |--- class: 2
|   |   |   |--- sepal width (cm) >  3.10
|   |   |   |   |--- class: 1
|   |   |--- petal length (cm) >  4.85
|   |   |   |--- class: 2

Вы также можете написать пользовательскую функцию, как показано ниже, чтобы получить правила в текстовом формате.

def tree_rules(dt_model, target_names, feature_names):
    """
    Return the tree if-then-else rules
    Parameters:
    -----------
    dt_model  : DecisionTreeClassifier object
    target_names : Target class names (Y)
    feature_names : Features / Predictors (X)
Returns:
    --------
    tree_rules: Decision tree rules
Example:
    --------
    >>> from sklearn.datasets import load_iris
    >>> from sklearn import tree
    >>> iris = load_iris()
    >>> dt_model = tree.DecisionTreeClassifier().fit(iris.data, iris.target)
    >>> print(tree_rules(dt_model, iris.target_names,iris.feature_names))
    """
if (dt_model is None) or (feature_names is None) or (target_names is None):
        raise ValueError("Please pass DecisionTreeClassifier object, list of features and target variable classes")
tree_ = dt_model.tree_
    tree_rules = ""
    left = dt_model.tree_.children_left
    right = dt_model.tree_.children_right
    threshold = dt_model.tree_.threshold
    features = [feature_names[i] for i in dt_model.tree_.feature]
    value = dt_model.tree_.value
    tree_rules = tree_rules +  "\n"+ "Structure of the tree using Predictors:" + format(", ".join(feature_names))
def add_node(left, right, threshold, features, node, depth=0):
        indent = "    " * depth
        tree_rules = ""
if (threshold[node] != -2):
            tree_rules =  tree_rules+ "\n"+ indent + "if ( " + features[node] + " <= " + str(threshold[node]) + " ) {"
            
            if left[node] != -1:
                tree_rules =  tree_rules + add_node (left, right, threshold, features, left[node], depth+1)
                tree_rules =   tree_rules+"\n" + indent + "} else {"
                if right[node] != -1:
                    tree_rules =  tree_rules + add_node (left, right, threshold, features, right[node], depth+1)
                tree_rules =  tree_rules+"\n"+  indent + "}"
        else:
            tree_rules =  "\n" + tree_rules+ indent + "Prediction: " + target_names[np.argmax(tree_.value[node][0])] 
        
        return tree_rules
tree_rules = tree_rules + add_node(left, right, threshold, features, 0)
    
    return tree_rules

Чтобы получить дерево решений в виде JSON, которое можно использовать для рендеринга в веб-приложении:

def tree_to_json(dt_model, target_names, feature_names=None):
    """ 
    Returns the Decision Tree output rules in JSON format
    Parameters:
    -----------
    dt_model  : DecisionTreeClassifier object
    target_names : Target class names (Y)
    feature_names : Features / Predictors (X)
    Returns:
    --------
    tree_json: Decision tree output as JSON
    Example
    --------
    >>> from sklearn.datasets import load_iris
    >>> from sklearn import tree
    >>> iris = load_iris()
    >>> dt_model = tree.DecisionTreeClassifier().fit(iris.data, iris.target)
    >>> print(tree_to_json(dt_model,iris.target_names,iris.feature_names))
    """
if (dt_model is None) or (feature_names is None) or (target_names is None):
        raise ValueError("Please pass DecisionTreeClassifier object, list of features and target variable classes")
tree_ = dt_model.tree_
    tree_json = ""
def node_to_str(tree, node_id, criterion):
        if not isinstance(criterion, six.string_types):
            criterion = "impurity"
value = tree.value[node_id]
        if tree.n_outputs == 1:
            value = value[0, :]
jsonValue = ', '.join([str(x) for x in value])
if tree.children_left[node_id] == sklearn.tree._tree.TREE_LEAF:
            is_leaf = 'TRUE'
            return '"id": "%s", "isLeaf": "%s", "criterion": "%s", "impurity": "%s", "samples": "%s", "prediction": "%s", "samplesDistribution": [%s]' \
                   % (node_id,
                      is_leaf,
                      criterion,
                      tree.impurity[node_id],
                      tree.n_node_samples[node_id],
                      target_names[np.argmax(tree_.value[node_id][0])],
                      jsonValue)
        else:
            is_leaf = 'FALSE'
            if feature_names is not None:
                feature = feature_names[tree.feature[node_id]]
            else:
                feature = tree.feature[node_id]
if "=" in feature:
                ruleType = "="
                ruleValue = "false"
            else:
                ruleType = "<="
                ruleValue = "%.3f" % tree.threshold[node_id]
return '"id": "%s", "isLeaf": "%s", "rule": "%s %s %s", "impurity": "%s", "samples": "%s"' \
                   % (node_id,
                      is_leaf,
                      feature,
                      ruleType,
                      ruleValue,
                      tree.impurity[node_id],
                      tree.n_node_samples[node_id])
def add_node(tree, node_id, criterion, parent=None, depth=0):
        tabs = "  " * depth
        tree_json = ""
left_child = tree.children_left[node_id]
        right_child = tree.children_right[node_id]
tree_json = tree_json + "\n" + tabs + "{\n" + tabs + "  " + node_to_str(tree, node_id, criterion)
if left_child != sklearn.tree._tree.TREE_LEAF:
            tree_json = tree_json + ",\n" + tabs + '  "left": ' + \
                 add_node(tree, left_child, criterion=criterion, parent=node_id, depth=depth + 1) + \
                ",\n" + tabs + '  "right": ' + \
                 add_node(tree, right_child, criterion=criterion, parent=node_id, depth=depth + 1)
tree_json = tree_json + tabs + "\n" + tabs + "}"
return tree_json
if isinstance(dt_model, sklearn.tree.tree.Tree):
        tree_json = tree_json + add_node(dt_model, 0, criterion="gini")
    else:
        tree_json = tree_json + add_node(dt_model.tree_, 0, criterion=dt_model.criterion)
return tree_json

Чтобы получить дерево решений в виде упорядоченного словаря:

def tree_to_dict(dt_model, target_names, feature_names=None):
    """ 
    Returns the Decision Tree output rules as dictionary
    Parameters:
    -----------
    dt_model  : DecisionTreeClassifier object
    target_names : Target class names (Y)
    feature_names : Features / Predictors (X)
    Returns:
    --------
    tree_dict: Decision tree output as dictionary
    Example
    --------
    >>> from sklearn.datasets import load_iris
    >>> from sklearn import tree
    >>> iris = load_iris()
    >>> dt_model = tree.DecisionTreeClassifier().fit(iris.data, iris.target)
    >>> print(tree_to_dict(dt_model,iris.target_names,iris.feature_names))
    """
tree_ = dt_model.tree_
    tree_dict = OrderedDict()
def node_to_str(tree, node_id, criterion):
        if not isinstance(criterion, six.string_types):
            criterion = "impurity"
value = tree.value[node_id]
        if tree.n_outputs == 1:
            value = value[0, :]
json_value = ', '.join([str(x) for x in value])
if tree.children_left[node_id] == sklearn.tree._tree.TREE_LEAF:
            is_leaf = 'TRUE'
            return {"id": int(node_id), "isLeaf": is_leaf, "criterion": criterion, "impurity": float(tree.impurity[node_id]), "samples": float(tree.n_node_samples[node_id]), "prediction": str(target_names[np.argmax(tree_.value[node_id][0])]), "samplesDistribution": json_value}
else:
            is_leaf = 'FALSE'
            if feature_names is not None:
                feature = str(feature_names[tree.feature[node_id]])
            else:
                feature = tree.feature[node_id]
if "=" in feature:
                rule_type = "="
                rule_value = "false"
            else:
                rule_type = "<="
                rule_value = "%.3f" % tree.threshold[node_id]
return {"id": int(node_id), "isLeaf": is_leaf, "rule": feature + rule_type + rule_value, "impurity": float(tree.impurity[node_id]), 
                    "samples": float(tree.n_node_samples[node_id])}
def add_node(tree, node_id, criterion, parent=None, depth=0):
        tree_dict = OrderedDict()
left_child = tree.children_left[node_id]
        right_child = tree.children_right[node_id]
        tree_dict.update(node_to_str(tree, node_id, criterion))
if left_child != sklearn.tree._tree.TREE_LEAF:
            tree_dict.update({"left": add_node(tree, left_child, criterion=criterion, parent=node_id, depth=depth + 1)})
            tree_dict.update({"right": add_node(tree, right_child, criterion=criterion, parent=node_id, depth=depth + 1)})
return tree_dict
if isinstance(dt_model, sklearn.tree.tree.Tree):
        tree_dict.update(add_node(dt_model, 0, criterion="gini"))
    else:
        tree_dict.update(add_node(dt_model.tree_, 0, criterion=dt_model.criterion))
return tree_dict