Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/env python3
- # -*- coding: utf-8 -*-
- """
- Created on Tue May 14 14:55:14 2019
- @author: student
- """
- #
- from sklearn.datasets.samples_generator import make_blobs
- from sklearn.cluster import KMeans
- import matplotlib.pyplot as plt
- from pandas import DataFrame
- X,y= make_blobs(n_samples=1000, centers=4
- ,random_state=0)
- kmeans = KMeans(n_clusters = 4)
- kmeans.fit(X)
- y_kmeans = kmeans.predict(X)
- plt.figure()
- plt.scatter(X[:,0],X[:,1],c=y_kmeans)
- plt.show
- from sklearn.datasets import make_moons
- X, y = make_moons(200,noise=.05)
- kmeans = KMeans(n_clusters = 2)
- kmeans.fit(X)
- y_kmeans = kmeans.predict(X)
- plt.figure()
- plt.scatter(X[:,0],X[:,1],c=y_kmeans)
- plt.show
- # generate 2d classification dataset
- X, y = make_moons(n_samples=100, noise=0.1)
- # scatter plot, dots colored by class value
- df = DataFrame(dict(x=X[:,0], y=X[:,1], label=y))
- colors = {0:'red', 1:'blue'}
- fig, ax = plt.subplots()
- grouped = df.groupby('label')
- for key, group in grouped:
- group.plot(ax=ax, kind='scatter', x='x', y='y', label=key, color=colors[key])
- plt.show()
- from scipy.cluster import hierarchy
- import matplotlib.pyplot as plt
- Z = hierarchy.linkage(X, 'ward')
- plt.figure()
- dn = hierarchy.dendrogram(Z, p=12, truncate_mode='lastp')
- #
- # Zadanie 4
- #
- #import matplotlib.pyplot as plt
- #from mpl_toolkits.mplot3d import Axes3D
- #from sklearn import datasets
- #from sklearn.decomposition import PCA
- #
- ## import some data to play with
- #iris = datasets.load_iris()
- #X = iris.data[:, :2] # we only take the first two features.
- #y = iris.target
- #
- #x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
- #y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
- #
- #plt.figure(2, figsize=(8, 6))
- #plt.clf()
- #
- ## Plot the training points
- #plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Set1,
- # edgecolor='k')
- #plt.xlabel('Sepal length')
- #plt.ylabel('Sepal width')
- #
- #plt.xlim(x_min, x_max)
- #plt.ylim(y_min, y_max)
- #plt.xticks(())
- #plt.yticks(())
- #
- ## To getter a better understanding of interaction of the dimensions
- ## plot the first three PCA dimensions
- #fig = plt.figure(1, figsize=(8, 6))
- #ax = Axes3D(fig, elev=-150, azim=110)
- #X_reduced = PCA(n_components=3).fit_transform(iris.data)
- #ax.scatter(X_reduced[:, 0], X_reduced[:, 1], X_reduced[:, 2], c=y,
- # cmap=plt.cm.Set1, edgecolor='k', s=40)
- #ax.set_title("First three PCA directions")
- #ax.set_xlabel("1st eigenvector")
- #ax.w_xaxis.set_ticklabels([])
- #ax.set_ylabel("2nd eigenvector")
- #ax.w_yaxis.set_ticklabels([])
- #ax.set_zlabel("3rd eigenvector")
- #ax.w_zaxis.set_ticklabels([])
- # Zadanie 5
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement