Advertisement
Guest User

Untitled

a guest
May 21st, 2018
143
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 3.71 KB | None | 0 0
  1.  
  2.  
  3. def zad1():
  4.     from sklearn import tree
  5.     X = [[0, 0], [0, 1], [1, 0], [1, 1]]
  6.     Y = [0, 0, 0, 1]
  7.     clf = tree.DecisionTreeClassifier()
  8.     clf = clf.fit(X, Y)
  9.     print(clf.predict([[1, 1]]))
  10. def zad2_1():
  11.     #przebieg, marka, uszkodzony,pojemnosc
  12.     dict = {"VW":0, "Ford":1, "Opel":2}
  13.     X =[
  14.         [1000000,dict["VW"],0],
  15.         [50000, dict["VW"],1],
  16.         [150000, dict["Ford"],0],
  17.         [300000, dict["Opel"],0],
  18.         [3000000,dict["Opel"],0],
  19.         [145000, dict["VW"], 1],
  20.         [230000, dict["Opel"], 0],
  21.         [45000, dict["Ford"], 0],
  22.         [340000, dict["Ford"], 0],
  23.         [34622, dict["Opel"], 1],
  24.         [45603, dict["Opel"], 0],
  25.         [64000, dict["Ford"], 0],
  26.  
  27.         ]
  28.     Y = [0,0,1,1,0,0,0,1,0,0,1,1]
  29.     clf = tree.DecisionTreeClassifier()
  30.     clf = clf.fit(X, Y)
  31.     print(clf.predict([[50000,dict["Ford"], 0]]))
  32. def zad2_2():
  33.     from sklearn import tree
  34.     # temperatura, pora dnia[godzina 0-23], opady[w milimetrach]
  35.     # czy temperatura jest spoko
  36.     X = [
  37.         [12,13,0],
  38.         [24, 14, 0],
  39.         [18, 6, 22],
  40.         [35, 12, 0],
  41.         [1, 13, 23],
  42.         [4, -13, 0],
  43.         [23, 15, 12],
  44.         [3, 13, 15],
  45.  
  46.     ]
  47.     Y = [1,1,0,0,0,0,1,0]
  48.     clf = tree.DecisionTreeClassifier()
  49.     clf = clf.fit(X, Y)
  50.     print(clf.predict([[12,13, 0]]))
  51. def zad3_przyklad():
  52.     from sklearn import cluster
  53.     from sklearn import datasets
  54.     from mpl_toolkits.mplot3d import Axes3D
  55.     import numpy as np
  56.     import matplotlib.pyplot as plt
  57.  
  58.     iris = datasets.load_iris()
  59.     print(iris.data)
  60.     print(iris.target)
  61.  
  62.     X = iris.data
  63.     k_means = cluster.KMeans(n_clusters=3)
  64.     k_means.fit(X)
  65.  
  66.     labels = k_means.labels_
  67.     fig = plt.figure()
  68.     ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
  69.     ax.scatter(X[:, 3], X[:, 0], X[:, 2], c=labels.astype(np.float))
  70.     ax.w_xaxis.set_ticklabels([])
  71.     ax.w_yaxis.set_ticklabels([])
  72.     ax.w_zaxis.set_ticklabels([])
  73.     ax.set_xlabel('Petal width')
  74.     ax.set_ylabel('Sepal length')
  75.     ax.set_zlabel('Petal length')
  76.     plt.show()
  77. def zad3():
  78.     from sklearn import tree
  79.     import cv2
  80.     img = cv2.imread('beach1.jpeg')
  81.     hist = cv2.calcHist([img], [1], None, [8], [0, 256])
  82.  
  83.     cv2.imshow('obrazek',img)
  84.     cv2.waitKey()
  85. def zad4():
  86.     import matplotlib.pyplot as plt
  87.     from sklearn import datasets, svm, metrics
  88.     import numpy as np
  89.     digits = datasets.load_digits()
  90.  
  91.     images_and_labels = list(zip(digits.images, digits.target))
  92.     for index, (image, label) in enumerate(images_and_labels[:4]):
  93.         plt.subplot(2, 4, index + 1)
  94.         plt.axis('off')
  95.         plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
  96.         plt.title('Training: %i' % label)
  97.  
  98.     n_samples = len(digits.images)
  99.     data = digits.images.reshape((n_samples, -1))
  100.  
  101.     classifier = svm.LinearSVC()
  102.     classifier.fit(data[:np.int(n_samples / 2)], digits.target[:np.int(n_samples / 2)])
  103.     expected = digits.target[np.int(n_samples / 2):]
  104.     predicted = classifier.predict(data[np.int(n_samples / 2):])
  105.  
  106.     print("Classification report for classifier %s:\n%s\n"
  107.           % (classifier, metrics.classification_report(expected, predicted)))
  108.     print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted))
  109.  
  110.     images_and_predictions = list(zip(digits.images[int(n_samples / 2):], predicted))
  111.     for index, (image, prediction) in enumerate(images_and_predictions[:4]):
  112.         plt.subplot(2, 4, index + 5)
  113.         plt.axis('off')
  114.         plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
  115.         plt.title('Prediction: %i' % prediction)
  116.  
  117.     plt.show()
  118.  
  119. if __name__ == "__main__":
  120.     zad4()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement