Advertisement
Guest User

Zadaca1

a guest
Nov 19th, 2018
118
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.86 KB | None | 0 0
  1. Да се промени класата за дрво на одлука да чува и информација на кое ниво во дрвото се наоѓа јазолот. Потоа да се променат и функциите за градење и печатење на дрвото така што за секој јазол ќе се печати и нивото. Коренот е на нулто ниво. На излез со функцијата printTree треба да се испечати даденото тренинг множество. Прочитана инстанца од стандарден влез да се додаде на тренинг множеството и потоа да се истренира и испечати истото.
  2.  
  3. trainingData=[['slashdot','USA','yes',18,'None'],
  4. ['google','France','yes',23,'Premium'],
  5. ['google','France','yes',23,'Basic'],
  6. ['google','France','yes',23,'Basic'],
  7. ['digg','USA','yes',24,'Basic'],
  8. ['kiwitobes','France','yes',23,'Basic'],
  9. ['google','UK','no',21,'Premium'],
  10. ['(direct)','New Zealand','no',12,'None'],
  11. ['(direct)','UK','no',21,'Basic'],
  12. ['google','USA','no',24,'Premium'],
  13. ['slashdot','France','yes',19,'None'],
  14. ['digg','USA','no',18,'None'],
  15. ['google','UK','no',18,'None'],
  16. ['kiwitobes','UK','no',19,'None'],
  17. ['digg','New Zealand','yes',12,'Basic'],
  18. ['slashdot','UK','no',21,'None'],
  19. ['google','UK','yes',18,'Basic'],
  20. ['kiwitobes','France','yes',19,'Basic']]
  21.  
  22. class decisionnode:
  23. def __init__(self,col=-1,value=None,results=None,tb=None,fb=None):
  24. self.col=col
  25. self.value=value
  26. self.results=results
  27. self.tb=tb
  28. self.fb=fb
  29.  
  30. def sporedi_broj(row,column,value):
  31. return row[column]>=value
  32.  
  33. def sporedi_string(row,column,value):
  34. return row[column]==value
  35.  
  36. # Divides a set on a specific column. Can handle numeric
  37. # or nominal values
  38. def divideset(rows,column,value):
  39. # Make a function that tells us if a row is in
  40. # the first group (true) or the second group (false)
  41. split_function=None
  42. if isinstance(value,int) or isinstance(value,float): # ako vrednosta so koja sporeduvame e od tip int ili float
  43. #split_function=lambda row:row[column]>=value # togas vrati funkcija cij argument e row i vrakja vrednost true ili false
  44. split_function=sporedi_broj
  45. else:
  46. # split_function=lambda row:row[column]==value # ako vrednosta so koja sporeduvame e od drug tip (string)
  47. split_function=sporedi_string
  48.  
  49. # Divide the rows into two sets and return them
  50. # set1=[row for row in rows if split_function(row)] # za sekoj row od rows za koj split_function vrakja true
  51. # set2=[row for row in rows if not split_function(row)] # za sekoj row od rows za koj split_function vrakja false
  52. set1=[row for row in rows if split_function(row,column,value)] # za sekoj row od rows za koj split_function vrakja true
  53. set2=[row for row in rows if not split_function(row,column,value)] # za sekoj row od rows za koj split_function vrakja false
  54. return (set1,set2)
  55.  
  56. # Create counts of possible results (the last column of
  57. # each row is the result)
  58. def uniquecounts(rows):
  59. results={}
  60. for row in rows:
  61. # The result is the last column
  62. r=row[len(row)-1]
  63. if r not in results: results[r]=0
  64. results[r]+=1
  65. return results
  66.  
  67. # Probability that a randomly placed item will
  68. # be in the wrong category
  69. def giniimpurity(rows):
  70. total=len(rows)
  71. counts=uniquecounts(rows)
  72. imp=0
  73. for k1 in counts:
  74. p1=float(counts[k1])/total
  75. for k2 in counts:
  76. if k1==k2: continue
  77. p2=float(counts[k2])/total
  78. imp+=p1*p2
  79. return imp
  80.  
  81.  
  82. # Entropy is the sum of p(x)log(p(x)) across all
  83. # the different possible results
  84. def entropy(rows):
  85. from math import log
  86. log2=lambda x:log(x)/log(2)
  87. results=uniquecounts(rows)
  88. # Now calculate the entropy
  89. ent=0.0
  90. for r in results.keys():
  91. p=float(results[r])/len(rows)
  92. ent=ent-p*log2(p)
  93. return ent
  94.  
  95. def buildtree(rows,scoref=entropy):
  96. if len(rows)==0: return decisionnode()
  97. current_score=scoref(rows)
  98.  
  99. # Set up some variables to track the best criteria
  100. best_gain=0.0
  101. best_criteria=None
  102. best_sets=None
  103.  
  104. column_count=len(rows[0])-1
  105. for col in range(0,column_count):
  106. # Generate the list of different values in
  107. # this column
  108. column_values={}
  109. for row in rows:
  110. column_values[row[col]]=1
  111. # Now try dividing the rows up for each value
  112. # in this column
  113. for value in column_values.keys():
  114. (set1,set2)=divideset(rows,col,value)
  115.  
  116. # Information gain
  117. p=float(len(set1))/len(rows)
  118. gain=current_score-p*scoref(set1)-(1-p)*scoref(set2)
  119. if gain>best_gain and len(set1)>0 and len(set2)>0:
  120. best_gain=gain
  121. best_criteria=(col,value)
  122. best_sets=(set1,set2)
  123.  
  124. # Create the subbranches
  125. if best_gain>0:
  126. trueBranch=buildtree(best_sets[0])
  127. falseBranch=buildtree(best_sets[1])
  128. return decisionnode(col=best_criteria[0],value=best_criteria[1],
  129. tb=trueBranch, fb=falseBranch)
  130. else:
  131. return decisionnode(results=uniquecounts(rows))
  132.  
  133. def printtree(tree,indent='',br=0):
  134. if tree.results!=None:
  135. print str(tree.results)
  136. else:
  137. print str(tree.col)+':'+str(tree.value)+'? '+'Level='+str(br)
  138. print indent+'T->',
  139. printtree(tree.tb,indent+' ',br+1)
  140. print indent+'F->',
  141. printtree(tree.fb,indent+' ',br+1)
  142.  
  143.  
  144. def classify(observation,tree):
  145. if tree.results!=None:
  146. return tree.results
  147. else:
  148. vrednost=observation[tree.col]
  149. branch=None
  150.  
  151. if isinstance(vrednost,int) or isinstance(vrednost,float):
  152. if vrednost>=tree.value: branch=tree.tb
  153. else: branch=tree.fb
  154. else:
  155. if vrednost==tree.value: branch=tree.tb
  156. else: branch=tree.fb
  157.  
  158. return classify(observation,branch)
  159.  
  160.  
  161. if __name__ == "__main__":
  162. # referrer='slashdot'
  163. # location='US'
  164. # readFAQ='no'
  165. # pagesVisited=19
  166. # serviceChosen='None'
  167.  
  168. referrer=input()
  169. location=input()
  170. readFAQ=input()
  171. pagesVisited=input()
  172. serviceChosen=input()
  173.  
  174. testCase=[referrer, location, readFAQ, pagesVisited, serviceChosen]
  175. trainingData.append(testCase)
  176. t=buildtree(trainingData)
  177. printtree(t)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement