Advertisement
Guest User

DrvoKolokvium

a guest
Jan 20th, 2018
629
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 13.72 KB | None | 0 0
  1. data = [[242.0, 23.2, 25.4, 30.0, 38.4, 13.4, 1],
  2.         [290.0, 24.0, 26.3, 31.2, 40.0, 13.8, 1],
  3.         [340.0, 23.9, 26.5, 31.1, 39.8, 15.1, 1],
  4.         [363.0, 26.3, 29.0, 33.5, 38.0, 13.3, 1],
  5.         [430.0, 26.5, 29.0, 34.0, 36.6, 15.1, 1],
  6.         [450.0, 26.8, 29.7, 34.7, 39.2, 14.2, 1],
  7.         [500.0, 26.8, 29.7, 34.5, 41.1, 15.3, 1],
  8.         [390.0, 27.6, 30.0, 35.0, 36.2, 13.4, 1],
  9.         [450.0, 27.6, 30.0, 35.1, 39.9, 13.8, 1],
  10.         [500.0, 28.5, 30.7, 36.2, 39.3, 13.7, 1],
  11.         [475.0, 28.4, 31.0, 36.2, 39.4, 14.1, 1],
  12.         [500.0, 28.7, 31.0, 36.2, 39.7, 13.3, 1],
  13.         [500.0, 29.1, 31.5, 36.4, 37.8, 12.0, 1],
  14.         [500.0, 29.5, 32.0, 37.3, 37.3, 13.6, 1],
  15.         [600.0, 29.4, 32.0, 37.2, 40.2, 13.9, 1],
  16.         [600.0, 29.4, 32.0, 37.2, 41.5, 15.0, 1],
  17.         [700.0, 30.4, 33.0, 38.3, 38.8, 13.8, 1],
  18.         [700.0, 30.4, 33.0, 38.5, 38.8, 13.5, 1],
  19.         [610.0, 30.9, 33.5, 38.6, 40.5, 13.3, 1],
  20.         [650.0, 31.0, 33.5, 38.7, 37.4, 14.8, 1],
  21.         [575.0, 31.3, 34.0, 39.5, 38.3, 14.1, 1],
  22.         [685.0, 31.4, 34.0, 39.2, 40.8, 13.7, 1],
  23.         [620.0, 31.5, 34.5, 39.7, 39.1, 13.3, 1],
  24.         [680.0, 31.8, 35.0, 40.6, 38.1, 15.1, 1],
  25.         [700.0, 31.9, 35.0, 40.5, 40.1, 13.8, 1],
  26.         [725.0, 31.8, 35.0, 40.9, 40.0, 14.8, 1],
  27.         [720.0, 32.0, 35.0, 40.6, 40.3, 15.0, 1],
  28.         [714.0, 32.7, 36.0, 41.5, 39.8, 14.1, 1],
  29.         [850.0, 32.8, 36.0, 41.6, 40.6, 14.9, 1],
  30.         [1000.0, 33.5, 37.0, 42.6, 44.5, 15.5, 1],
  31.         [920.0, 35.0, 38.5, 44.1, 40.9, 14.3, 1],
  32.         [955.0, 35.0, 38.5, 44.0, 41.1, 14.3, 1],
  33.         [925.0, 36.2, 39.5, 45.3, 41.4, 14.9, 1],
  34.         [975.0, 37.4, 41.0, 45.9, 40.6, 14.7, 1],
  35.         [950.0, 38.0, 41.0, 46.5, 37.9, 13.7, 1],
  36.         [270.0, 23.6, 26.0, 28.7, 29.2, 14.8, 2],
  37.         [270.0, 24.1, 26.5, 29.3, 27.8, 14.5, 2],
  38.         [306.0, 25.6, 28.0, 30.8, 28.5, 15.2, 2],
  39.         [540.0, 28.5, 31.0, 34.0, 31.6, 19.3, 2],
  40.         [800.0, 33.7, 36.4, 39.6, 29.7, 16.6, 2],
  41.         [1000.0, 37.3, 40.0, 43.5, 28.4, 15.0, 2],
  42.         [40.0, 12.9, 14.1, 16.2, 25.6, 14.0, 3],
  43.         [69.0, 16.5, 18.2, 20.3, 26.1, 13.9, 3],
  44.         [78.0, 17.5, 18.8, 21.2, 26.3, 13.7, 3],
  45.         [87.0, 18.2, 19.8, 22.2, 25.3, 14.3, 3],
  46.         [120.0, 18.6, 20.0, 22.2, 28.0, 16.1, 3],
  47.         [0.0, 19.0, 20.5, 22.8, 28.4, 14.7, 3],
  48.         [110.0, 19.1, 20.8, 23.1, 26.7, 14.7, 3],
  49.         [120.0, 19.4, 21.0, 23.7, 25.8, 13.9, 3],
  50.         [150.0, 20.4, 22.0, 24.7, 23.5, 15.2, 3],
  51.         [145.0, 20.5, 22.0, 24.3, 27.3, 14.6, 3],
  52.         [160.0, 20.5, 22.5, 25.3, 27.8, 15.1, 3],
  53.         [140.0, 21.0, 22.5, 25.0, 26.2, 13.3, 3],
  54.         [160.0, 21.1, 22.5, 25.0, 25.6, 15.2, 3],
  55.         [169.0, 22.0, 24.0, 27.2, 27.7, 14.1, 3],
  56.         [161.0, 22.0, 23.4, 26.7, 25.9, 13.6, 3],
  57.         [200.0, 22.1, 23.5, 26.8, 27.6, 15.4, 3],
  58.         [180.0, 23.6, 25.2, 27.9, 25.4, 14.0, 3],
  59.         [290.0, 24.0, 26.0, 29.2, 30.4, 15.4, 3],
  60.         [272.0, 25.0, 27.0, 30.6, 28.0, 15.6, 3],
  61.         [390.0, 29.5, 31.7, 35.0, 27.1, 15.3, 3],
  62.         [55.0, 13.5, 14.7, 16.5, 41.5, 14.1, 4],
  63.         [60.0, 14.3, 15.5, 17.4, 37.8, 13.3, 4],
  64.         [90.0, 16.3, 17.7, 19.8, 37.4, 13.5, 4],
  65.         [120.0, 17.5, 19.0, 21.3, 39.4, 13.7, 4],
  66.         [150.0, 18.4, 20.0, 22.4, 39.7, 14.7, 4],
  67.         [140.0, 19.0, 20.7, 23.2, 36.8, 14.2, 4],
  68.         [170.0, 19.0, 20.7, 23.2, 40.5, 14.7, 4],
  69.         [145.0, 19.8, 21.5, 24.1, 40.4, 13.1, 4],
  70.         [200.0, 21.2, 23.0, 25.8, 40.1, 14.2, 4],
  71.         [273.0, 23.0, 25.0, 28.0, 39.6, 14.8, 4],
  72.         [300.0, 24.0, 26.0, 29.0, 39.2, 14.6, 4],
  73.         [6.7, 9.3, 9.8, 10.8, 16.1, 9.7, 5],
  74.         [7.5, 10.0, 10.5, 11.6, 17.0, 10.0, 5],
  75.         [7.0, 10.1, 10.6, 11.6, 14.9, 9.9, 5],
  76.         [9.7, 10.4, 11.0, 12.0, 18.3, 11.5, 5],
  77.         [9.8, 10.7, 11.2, 12.4, 16.8, 10.3, 5],
  78.         [8.7, 10.8, 11.3, 12.6, 15.7, 10.2, 5],
  79.         [10.0, 11.3, 11.8, 13.1, 16.9, 9.8, 5],
  80.         [9.9, 11.3, 11.8, 13.1, 16.9, 8.9, 5],
  81.         [9.8, 11.4, 12.0, 13.2, 16.7, 8.7, 5],
  82.         [12.2, 11.5, 12.2, 13.4, 15.6, 10.4, 5],
  83.         [13.4, 11.7, 12.4, 13.5, 18.0, 9.4, 5],
  84.         [12.2, 12.1, 13.0, 13.8, 16.5, 9.1, 5],
  85.         [19.7, 13.2, 14.3, 15.2, 18.9, 13.6, 5],
  86.         [19.9, 13.8, 15.0, 16.2, 18.1, 11.6, 5],
  87.         [200.0, 30.0, 32.3, 34.8, 16.0, 9.7, 6],
  88.         [300.0, 31.7, 34.0, 37.8, 15.1, 11.0, 6],
  89.         [300.0, 32.7, 35.0, 38.8, 15.3, 11.3, 6],
  90.         [300.0, 34.8, 37.3, 39.8, 15.8, 10.1, 6],
  91.         [430.0, 35.5, 38.0, 40.5, 18.0, 11.3, 6],
  92.         [345.0, 36.0, 38.5, 41.0, 15.6, 9.7, 6],
  93.         [456.0, 40.0, 42.5, 45.5, 16.0, 9.5, 6],
  94.         [510.0, 40.0, 42.5, 45.5, 15.0, 9.8, 6],
  95.         [540.0, 40.1, 43.0, 45.8, 17.0, 11.2, 6],
  96.         [500.0, 42.0, 45.0, 48.0, 14.5, 10.2, 6],
  97.         [567.0, 43.2, 46.0, 48.7, 16.0, 10.0, 6],
  98.         [770.0, 44.8, 48.0, 51.2, 15.0, 10.5, 6],
  99.         [950.0, 48.3, 51.7, 55.1, 16.2, 11.2, 6],
  100.         [1250.0, 52.0, 56.0, 59.7, 17.9, 11.7, 6],
  101.         [1600.0, 56.0, 60.0, 64.0, 15.0, 9.6, 6],
  102.         [1550.0, 56.0, 60.0, 64.0, 15.0, 9.6, 6],
  103.         [1650.0, 59.0, 63.4, 68.0, 15.9, 11.0, 6],
  104.         [5.9, 7.5, 8.4, 8.8, 24.0, 16.0, 7],
  105.         [32.0, 12.5, 13.7, 14.7, 24.0, 13.6, 7],
  106.         [40.0, 13.8, 15.0, 16.0, 23.9, 15.2, 7],
  107.         [51.5, 15.0, 16.2, 17.2, 26.7, 15.3, 7],
  108.         [70.0, 15.7, 17.4, 18.5, 24.8, 15.9, 7],
  109.         [100.0, 16.2, 18.0, 19.2, 27.2, 17.3, 7],
  110.         [78.0, 16.8, 18.7, 19.4, 26.8, 16.1, 7],
  111.         [80.0, 17.2, 19.0, 20.2, 27.9, 15.1, 7],
  112.         [85.0, 17.8, 19.6, 20.8, 24.7, 14.6, 7],
  113.         [85.0, 18.2, 20.0, 21.0, 24.2, 13.2, 7],
  114.         [110.0, 19.0, 21.0, 22.5, 25.3, 15.8, 7],
  115.         [115.0, 19.0, 21.0, 22.5, 26.3, 14.7, 7],
  116.         [125.0, 19.0, 21.0, 22.5, 25.3, 16.3, 7],
  117.         [130.0, 19.3, 21.3, 22.8, 28.0, 15.5, 7],
  118.         [120.0, 20.0, 22.0, 23.5, 26.0, 14.5, 7],
  119.         [120.0, 20.0, 22.0, 23.5, 24.0, 15.0, 7],
  120.         [130.0, 20.0, 22.0, 23.5, 26.0, 15.0, 7],
  121.         [135.0, 20.0, 22.0, 23.5, 25.0, 15.0, 7],
  122.         [110.0, 20.0, 22.0, 23.5, 23.5, 17.0, 7],
  123.         [130.0, 20.5, 22.5, 24.0, 24.4, 15.1, 7],
  124.         [150.0, 20.5, 22.5, 24.0, 28.3, 15.1, 7],
  125.         [145.0, 20.7, 22.7, 24.2, 24.6, 15.0, 7],
  126.         [150.0, 21.0, 23.0, 24.5, 21.3, 14.8, 7],
  127.         [170.0, 21.5, 23.5, 25.0, 25.1, 14.9, 7],
  128.         [225.0, 22.0, 24.0, 25.5, 28.6, 14.6, 7],
  129.         [145.0, 22.0, 24.0, 25.5, 25.0, 15.0, 7],
  130.         [188.0, 22.6, 24.6, 26.2, 25.7, 15.9, 7],
  131.         [180.0, 23.0, 25.0, 26.5, 24.3, 13.9, 7],
  132.         [197.0, 23.5, 25.6, 27.0, 24.3, 15.7, 7],
  133.         [218.0, 25.0, 26.5, 28.0, 25.6, 14.8, 7],
  134.         [300.0, 25.2, 27.3, 28.7, 29.0, 17.9, 7],
  135.         [260.0, 25.4, 27.5, 28.9, 24.8, 15.0, 7],
  136.         [265.0, 25.4, 27.5, 28.9, 24.4, 15.0, 7],
  137.         [250.0, 25.4, 27.5, 28.9, 25.2, 15.8, 7],
  138.         [250.0, 25.9, 28.0, 29.4, 26.6, 14.3, 7],
  139.         [300.0, 26.9, 28.7, 30.1, 25.2, 15.4, 7],
  140.         [320.0, 27.8, 30.0, 31.6, 24.1, 15.1, 7],
  141.         [514.0, 30.5, 32.8, 34.0, 29.5, 17.7, 7],
  142.         [556.0, 32.0, 34.5, 36.5, 28.1, 17.5, 7],
  143.         [840.0, 32.5, 35.0, 37.3, 30.8, 20.9, 7],
  144.         [685.0, 34.0, 36.5, 39.0, 27.9, 17.6, 7],
  145.         [700.0, 34.0, 36.0, 38.3, 27.7, 17.6, 7],
  146.         [700.0, 34.5, 37.0, 39.4, 27.5, 15.9, 7],
  147.         [690.0, 34.6, 37.0, 39.3, 26.9, 16.2, 7],
  148.         [900.0, 36.5, 39.0, 41.4, 26.9, 18.1, 7],
  149.         [650.0, 36.5, 39.0, 41.4, 26.9, 14.5, 7],
  150.         [820.0, 36.6, 39.0, 41.3, 30.1, 17.8, 7],
  151.         [850.0, 36.9, 40.0, 42.3, 28.2, 16.8, 7],
  152.         [900.0, 37.0, 40.0, 42.5, 27.6, 17.0, 7],
  153.         [1015.0, 37.0, 40.0, 42.4, 29.2, 17.6, 7],
  154.         [820.0, 37.1, 40.0, 42.5, 26.2, 15.6, 7],
  155.         [1100.0, 39.0, 42.0, 44.6, 28.7, 15.4, 7],
  156.         [1000.0, 39.8, 43.0, 45.2, 26.4, 16.1, 7],
  157.         [1100.0, 40.1, 43.0, 45.5, 27.5, 16.3, 7],
  158.         [1000.0, 40.2, 43.5, 46.0, 27.4, 17.7, 7],
  159.         [1000.0, 41.1, 44.0, 46.6, 26.8, 16.3, 7]]
  160.  
  161.  
  162. class decisionnode:
  163.     def __init__(self, col=-1, value=None, results=None, tb=None, fb=None):
  164.         self.col = col
  165.         self.value = value
  166.         self.results = results
  167.         self.tb = tb
  168.         self.fb = fb
  169.  
  170.  
  171. def sporedi_broj(row, column, value):
  172.     return row[column] >= value
  173.  
  174.  
  175. def sporedi_string(row, column, value):
  176.     return row[column] == value
  177.  
  178.  
  179.     # Divides a set on a specific column. Can handle numeric
  180.     # or nominal values
  181.  
  182.  
  183. def divideset(rows, column, value):
  184.     # Make a function that tells us if a row is in
  185.     # the first group (true) or the second group (false)
  186.     split_function = None
  187.     if isinstance(value, int) or isinstance(value, float):  # ako vrednosta so koja sporeduvame e od tip int ili float
  188.         # split_function=lambda row:row[column]>=value # togas vrati funkcija cij argument e row i vrakja vrednost true ili false
  189.         split_function = sporedi_broj
  190.     else:
  191.         # split_function=lambda row:row[column]==value # ako vrednosta so koja sporeduvame e od drug tip (string)
  192.         split_function = sporedi_string
  193.  
  194.     # Divide the rows into two sets and return them
  195.     set_false = []
  196.     set_true = []
  197.     for row in rows:
  198.         if split_function(row, column, value):
  199.             set_true.append(row)
  200.         else:
  201.             set_false.append(row)
  202.     set1 = [row for row in rows if
  203.             split_function(row, column, value)]  # za sekoj row od rows za koj split_function vrakja true
  204.     set2 = [row for row in rows if
  205.             not split_function(row, column, value)]  # za sekoj row od rows za koj split_function vrakja false
  206.     # return (set1, set2)
  207.     return (set_true, set_false)
  208.  
  209.  
  210. def uniquecounts(rows):
  211.     results = {}
  212.     for row in rows:
  213.         # The result is the last column
  214.         r = row[-1]
  215.         results.setdefault(r, 0)
  216.         results[r] += 1
  217.  
  218.     return results
  219.  
  220.  
  221. def log2(x):
  222.     from math import log
  223.     l2 = log(x) / log(2)
  224.     return l2
  225.  
  226.  
  227. def entropy(rows):
  228.     results = uniquecounts(rows)
  229.     # Now calculate the entropy
  230.     ent = 0.0
  231.     for r in results.keys():
  232.         p = float(results[r]) / len(rows)
  233.         ent = ent - p * log2(p)
  234.     return ent
  235.  
  236.  
  237. def buildtree(rows, scoref=entropy):
  238.     if len(rows) == 0: return decisionnode()
  239.     current_score = scoref(rows)
  240.  
  241.     # Set up some variables to track the best criteria
  242.     best_gain = 0.0
  243.     best_column = -1
  244.     best_value = None
  245.     best_subsetf = None
  246.     best_subsett = None
  247.  
  248.     column_count = len(rows[0]) - 1
  249.     for col in range(column_count):
  250.         # Generate the list of different values in
  251.         # this column
  252.         column_values = set()
  253.         for row in rows:
  254.             column_values.add(row[col])
  255.         # Now try dividing the rows up for each value
  256.         # in this column
  257.         for value in column_values:
  258.             (set1, set2) = divideset(rows, col, value)
  259.  
  260.             # Information gain
  261.             p = float(len(set1)) / len(rows)
  262.             gain = current_score - p * scoref(set1) - (1 - p) * scoref(set2)
  263.             if gain > best_gain and len(set1) > 0 and len(set2) > 0:
  264.                 best_gain = gain
  265.                 best_column = col
  266.                 best_value = value
  267.                 best_subsett = set1
  268.                 best_subsetf = set2
  269.                 # best_criteria = (col, value)
  270.                 # best_sets = (set1, set2)
  271.  
  272.     # Create the subbranches
  273.     if best_gain > 0:
  274.         trueBranch = buildtree(best_subsett, scoref)
  275.         falseBranch = buildtree(best_subsetf, scoref)
  276.         return decisionnode(col=best_column, value=best_value,
  277.                             tb=trueBranch, fb=falseBranch)
  278.     else:
  279.         return decisionnode(results=uniquecounts(rows))
  280.  
  281.  
  282. def classify(observation, tree):
  283.     if tree.results != None:
  284.         pom=[]
  285.         for k,v in tree.results.items():
  286.             pom=k
  287.         return pom
  288.  
  289.     else:
  290.         vrednost = observation[tree.col]
  291.         branch = None
  292.  
  293.         if isinstance(vrednost, int) or isinstance(vrednost, float):
  294.             if vrednost >= tree.value:
  295.                 branch = tree.tb
  296.             else:
  297.                 branch = tree.fb
  298.         else:
  299.             if vrednost == tree.value:
  300.                 branch = tree.tb
  301.             else:
  302.                 branch = tree.fb
  303.  
  304.         return classify(observation, branch)
  305.  
  306. def printtree(tree, indent=''):
  307.     # Is this a leaf node?
  308.     if tree.results != None:
  309.         print(indent + str(sorted(tree.results.items())))
  310.     else:
  311.         # Print the criteria
  312.         print(indent + str(tree.col) + ':' + str(tree.value) + '? ')
  313.         # Print the branches
  314.         print(indent + 'T->')
  315.         printtree(tree.tb, indent + '  ')
  316.         print(indent + 'F->')
  317.         printtree(tree.fb, indent + '  ')
  318.  
  319. if __name__ == "__main__":
  320.     index = input()
  321.     data2 = data[index]
  322.  
  323.     lista15=[]
  324.     lista25=[]
  325.     lista35=[]
  326.     lista45=[]
  327.     lista55=[]
  328.     lista65=[]
  329.     lista75=[]
  330.  
  331.     eden=1
  332.     dva=2
  333.     tri=3
  334.     cetiri=4
  335.     pet=5
  336.     sest=6
  337.     sedum=7
  338.  
  339.     for i in range(0,len(data)):
  340.         if data[i][6]==eden:
  341.             lista15.append(data[i])
  342.         if data[i][6]==dva:
  343.                     lista25.append(data[i])
  344.         if data[i][6]==tri:
  345.                     lista35.append(data[i])
  346.         if data[i][6]==cetiri:
  347.                     lista45.append(data[i])
  348.         if data[i][6]==pet:
  349.                     lista55.append(data[i])
  350.         if data[i][6]==sest:
  351.                     lista65.append(data[i])
  352.         if data[i][6]==sedum:
  353.                     lista75.append(data[i])
  354.  
  355.     listaSo5=[]
  356.     listaSo5=lista15[:5]+lista25[:5]+lista35[:5]+lista45[:5]+lista55[:5]+lista65[:5]+lista75[:5]
  357.  
  358.     drvoSoPo5 = buildtree(listaSo5)
  359.     printtree
  360.     drvo1c= classify(data2,drvoSoPo5)
  361.  
  362.     print drvo1c
  363.  
  364.  
  365.     #solution = None
  366.     #print solution
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement