Kaidul

decision tree

Apr 23rd, 2014
126
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 1.05 KB | None | 0 0
  1. function [Nodes, node] = decision_tree(X, Y, depth, Nodes, indx)
  2.     if depth == 0
  3.         fprintf('Root\n')
  4.     end
  5.     if length(Y) < 1 % if example is empty
  6.         fprintf('Null\n');
  7.     elseif length(unique(Y)) == 1 % if all example has the same classification
  8.         fprintf('Leaf: Class %d\n', Y(1));
  9.         Nodes(indx) = struct('value', 0, 'infogain', 0, 'left', 0, 'right', 0);
  10.         [node] = 0;
  11.         [Nodes] = Nodes;
  12.     else
  13.         [col, igain] = infogain(X, Y);
  14.         fprintf('split on feature %d (info gain: %f)\n', col, igain);
  15.         features = unique(X(:, col));
  16.         children(1) = 0;
  17.         children(2) = 0;
  18.         for f = features'
  19.            fprintf('Depth: %d - Feature Branch: %d\n', depth, f);
  20.            pos = find(X(:, col) == f);
  21.            [Nodes, children(f)] = decision_tree(X(pos, :), Y(pos, :), depth + 1, Nodes, indx + 1);
  22.        end
  23.        Nodes(indx) = struct('value', col, 'infogain', igain, 'left', children(1), 'right', children(2));
  24.        [node] = col;
  25.        [Nodes] = Nodes;
  26.    end
  27. end
Advertisement
Add Comment
Please, Sign In to add comment