Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- function [Nodes, node] = decision_tree(X, Y, depth, Nodes, indx)
- if depth == 0
- fprintf('Root\n')
- end
- if length(Y) < 1 % if example is empty
- fprintf('Null\n');
- elseif length(unique(Y)) == 1 % if all example has the same classification
- fprintf('Leaf: Class %d\n', Y(1));
- Nodes(indx) = struct('value', 0, 'infogain', 0, 'left', 0, 'right', 0);
- [node] = 0;
- [Nodes] = Nodes;
- else
- [col, igain] = infogain(X, Y);
- fprintf('split on feature %d (info gain: %f)\n', col, igain);
- features = unique(X(:, col));
- children(1) = 0;
- children(2) = 0;
- for f = features'
- fprintf('Depth: %d - Feature Branch: %d\n', depth, f);
- pos = find(X(:, col) == f);
- [Nodes, children(f)] = decision_tree(X(pos, :), Y(pos, :), depth + 1, Nodes, indx + 1);
- end
- Nodes(indx) = struct('value', col, 'infogain', igain, 'left', children(1), 'right', children(2));
- [node] = col;
- [Nodes] = Nodes;
- end
- end
Advertisement
Add Comment
Please, Sign In to add comment