Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def zad1():
- from sklearn import tree
- X = [[0, 0], [0, 1], [1, 0], [1, 1]]
- Y = [0, 0, 0, 1]
- clf = tree.DecisionTreeClassifier()
- clf = clf.fit(X, Y)
- print(clf.predict([[1, 1]]))
- def zad2_1():
- #przebieg, marka, uszkodzony,pojemnosc
- dict = {"VW":0, "Ford":1, "Opel":2}
- X =[
- [1000000,dict["VW"],0],
- [50000, dict["VW"],1],
- [150000, dict["Ford"],0],
- [300000, dict["Opel"],0],
- [3000000,dict["Opel"],0],
- [145000, dict["VW"], 1],
- [230000, dict["Opel"], 0],
- [45000, dict["Ford"], 0],
- [340000, dict["Ford"], 0],
- [34622, dict["Opel"], 1],
- [45603, dict["Opel"], 0],
- [64000, dict["Ford"], 0],
- ]
- Y = [0,0,1,1,0,0,0,1,0,0,1,1]
- clf = tree.DecisionTreeClassifier()
- clf = clf.fit(X, Y)
- print(clf.predict([[50000,dict["Ford"], 0]]))
- def zad2_2():
- from sklearn import tree
- # temperatura, pora dnia[godzina 0-23], opady[w milimetrach]
- # czy temperatura jest spoko
- X = [
- [12,13,0],
- [24, 14, 0],
- [18, 6, 22],
- [35, 12, 0],
- [1, 13, 23],
- [4, -13, 0],
- [23, 15, 12],
- [3, 13, 15],
- ]
- Y = [1,1,0,0,0,0,1,0]
- clf = tree.DecisionTreeClassifier()
- clf = clf.fit(X, Y)
- print(clf.predict([[12,13, 0]]))
- def zad3_przyklad():
- from sklearn import cluster
- from sklearn import datasets
- from mpl_toolkits.mplot3d import Axes3D
- import numpy as np
- import matplotlib.pyplot as plt
- iris = datasets.load_iris()
- print(iris.data)
- print(iris.target)
- X = iris.data
- k_means = cluster.KMeans(n_clusters=3)
- k_means.fit(X)
- labels = k_means.labels_
- fig = plt.figure()
- ax = Axes3D(fig, rect=[0, 0, .95, 1], elev=48, azim=134)
- ax.scatter(X[:, 3], X[:, 0], X[:, 2], c=labels.astype(np.float))
- ax.w_xaxis.set_ticklabels([])
- ax.w_yaxis.set_ticklabels([])
- ax.w_zaxis.set_ticklabels([])
- ax.set_xlabel('Petal width')
- ax.set_ylabel('Sepal length')
- ax.set_zlabel('Petal length')
- plt.show()
- def zad3():
- from sklearn import tree
- import cv2
- img = cv2.imread('beach1.jpeg')
- hist = cv2.calcHist([img], [1], None, [8], [0, 256])
- cv2.imshow('obrazek',img)
- cv2.waitKey()
- def zad4():
- import matplotlib.pyplot as plt
- from sklearn import datasets, svm, metrics
- import numpy as np
- digits = datasets.load_digits()
- images_and_labels = list(zip(digits.images, digits.target))
- for index, (image, label) in enumerate(images_and_labels[:4]):
- plt.subplot(2, 4, index + 1)
- plt.axis('off')
- plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
- plt.title('Training: %i' % label)
- n_samples = len(digits.images)
- data = digits.images.reshape((n_samples, -1))
- classifier = svm.LinearSVC()
- classifier.fit(data[:np.int(n_samples / 2)], digits.target[:np.int(n_samples / 2)])
- expected = digits.target[np.int(n_samples / 2):]
- predicted = classifier.predict(data[np.int(n_samples / 2):])
- print("Classification report for classifier %s:\n%s\n"
- % (classifier, metrics.classification_report(expected, predicted)))
- print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted))
- images_and_predictions = list(zip(digits.images[int(n_samples / 2):], predicted))
- for index, (image, prediction) in enumerate(images_and_predictions[:4]):
- plt.subplot(2, 4, index + 5)
- plt.axis('off')
- plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
- plt.title('Prediction: %i' % prediction)
- plt.show()
- if __name__ == "__main__":
- zad4()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement