Advertisement
Guest User

Untitled

a guest
Nov 15th, 2019
113
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.74 KB | None | 0 0
  1. function tree = DecisionTreeLearning (features , labels)
  2. x = 1;
  3. line = 1;
  4. if(size(labels,1) == 0)
  5. tree.op = [];
  6. tree.kids = [];
  7. tree.class = [];
  8. tree.attribute = [];
  9. tree.threshold = [];
  10. return
  11. end
  12. for line = 2 : size(labels,1) %Check to see if all of the labels are the same
  13. %labels(line,:)
  14. %labels(x,:)
  15. %labels(line,:)~=labels(x,:)
  16. if (ismember(1,labels(line,1)~=labels(x,1))) %Checks by seeing if any are different
  17. break
  18. end
  19. x = x + 1 ;
  20. end
  21. y = size(labels,1)
  22. if(x == line)%If all labels are the same, then create a leaf node
  23. tree.op = [];
  24. tree.kids = [];
  25. tree.class = labels(x-1,:);
  26. tree.attribute = [];
  27. tree.threshold = [];
  28. return
  29. end
  30. if(size(labels,1) == 1)%If all labels are the same, then create a leaf node
  31. tree.op = [];
  32. tree.kids = [];
  33. tree.class = labels(x,:);
  34. tree.attribute = [];
  35. tree.threshold = [];
  36. return
  37. end
  38.  
  39.  
  40. [attribute, threshold] = ChooseAttribute(features , labels); %Choose the attribute and threshold to test
  41.  
  42. [lessThanFeatures,lessThanLabels,greaterThanFeatures,greaterThanLabels] = Split(features, labels , attribute , threshold); %Split the data set according to attribute and threshold
  43.  
  44.  
  45. tree.op = num2str(attribute);
  46. tree.attribute = attribute;
  47. tree.threshold = threshold;
  48. tree.class = [];
  49. tree.kids = {DecisionTreeLearning(lessThanFeatures,lessThanLabels),DecisionTreeLearning(greaterThanFeatures,greaterThanLabels)}; %Create the branches with new trees with the split data
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement