Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from sklearn.cluster import KMeans
- from sklearn.pipeline import Pipeline
- from sklearn.preprocessing import StandardScaler
- from sklearn.metrics import silhouette_score
- Features = heart_data.drop(["DEATH_EVENT"], axis = 1).columns
- X = heart_data[Features]
- sc = []
- for i in range(2, 25):
- kmeans = Pipeline([("scaling",StandardScaler()),("clustering",KMeans(n_clusters=i, init='k-means++', max_iter=300, n_init=10, random_state=0))]).fit(X)
- score = silhouette_score(X, kmeans["clustering"].labels_)
- sc.append(score)
- plt.plot(range(2, 25), sc, marker = "o")
- plt.title('Silhouette')
- plt.xlabel('Number of clusters')
- plt.ylabel('Score')
- plt.show()
- from sklearn.decomposition import PCA
- import matplotlib.pyplot as plt
- plt.style.use("seaborn-whitegrid")
- pca = Pipeline([("scaling",StandardScaler()),("decompositioning",PCA(n_components = 2))]).fit(X)
- X2D = pca.transform(X)
- plt.scatter(X2D[:,0],X2D[:,1], c = kmeans["clustering"].labels_, cmap = "RdYlBu")
- plt.colorbar();
- kmeans = KMeans(n_clusters=2, random_state=0).fit(X)
- cluster_result = kmeans.predict(X)
- new_array = np.concatenate((heart_data, cluster_result[:,None]), axis = 1)
- comparison = pd.DataFrame(new_array)
- pd.set_option("display.max_rows", None, "display.max_columns", None) #to show all the rows
- final_comp = comparison.iloc[:, 12:14]
- final_comp
- 12 13
- 0 1.0 0.0
- 1 1.0 0.0
- 2 1.0 0.0
- 3 1.0 0.0
- 4 1.0 1.0
- 5 1.0 0.0
- 6 1.0 0.0
- 7 1.0 1.0
- 8 1.0 0.0
- 9 1.0 1.0
- 10 1.0 1.0
- 11 1.0 0.0
- 12 1.0 0.0
- 13 1.0 0.0
- 14 0.0 1.0
- 15 1.0 0.0
- 16 1.0 0.0
- 17 1.0 0.0
- 18 1.0 0.0
- 19 1.0 0.0
- 20 0.0 0.0
- 21 1.0 0.0
- 22 1.0 0.0
- 23 0.0 1.0
- 24 1.0 0.0
- 25 1.0 0.0
- 26 1.0 0.0
- 27 1.0 0.0
- 28 1.0 0.0
- 29 1.0 0.0
- 30 1.0 0.0
- 31 1.0 1.0
- 32 1.0 1.0
- 33 0.0 0.0
- 34 1.0 0.0
- 35 1.0 0.0
- 36 1.0 0.0
- 37 1.0 1.0
- 38 0.0 0.0
- 39 1.0 1.0
- 40 1.0 0.0
- 41 1.0 0.0
- 42 1.0 0.0
- 43 0.0 0.0
- 44 1.0 0.0
- 45 1.0 0.0
- 46 1.0 0.0
- 47 1.0 1.0
- 48 1.0 0.0
- 49 1.0 1.0
- 50 1.0 0.0
- 51 1.0 1.0
- 52 1.0 0.0
- 53 1.0 1.0
- 54 1.0 0.0
- 55 1.0 1.0
- 56 0.0 0.0
- 57 0.0 0.0
- 58 1.0 1.0
- 59 1.0 0.0
- 60 1.0 1.0
- 61 1.0 0.0
- 62 0.0 0.0
- 63 1.0 1.0
- 64 0.0 0.0
- 65 1.0 0.0
- 66 1.0 0.0
- 67 1.0 0.0
- 68 1.0 0.0
- 69 1.0 1.0
- 70 0.0 1.0
- 71 0.0 0.0
- 72 1.0 0.0
- 73 0.0 0.0
- 74 1.0 0.0
- 75 1.0 0.0
- 76 0.0 1.0
- 77 0.0 0.0
- 78 0.0 0.0
- 79 0.0 1.0
- 80 0.0 0.0
- 81 0.0 0.0
- 82 1.0 0.0
- 83 0.0 0.0
- 84 1.0 0.0
- 85 0.0 1.0
- 86 0.0 0.0
- 87 0.0 0.0
- 88 0.0 0.0
- 89 0.0 0.0
- 90 0.0 0.0
- 91 0.0 0.0
- 92 0.0 0.0
- 93 1.0 0.0
- 94 0.0 1.0
- 95 0.0 0.0
- 96 0.0 0.0
- 97 0.0 0.0
- 98 0.0 1.0
- 99 0.0 0.0
- 100 0.0 0.0
- 101 0.0 0.0
- 102 0.0 0.0
- 103 0.0 0.0
- 104 0.0 0.0
- 105 1.0 1.0
- 106 0.0 0.0
- 107 0.0 0.0
- 108 0.0 0.0
- 109 0.0 1.0
- 110 1.0 0.0
- 111 0.0 0.0
- 112 0.0 0.0
- 113 1.0 1.0
- 114 0.0 1.0
- 115 0.0 0.0
- 116 0.0 0.0
- 117 0.0 1.0
- 118 0.0 0.0
- 119 1.0 0.0
- 120 0.0 0.0
- 121 0.0 0.0
- 122 0.0 0.0
- 123 0.0 0.0
- 124 1.0 0.0
- 125 0.0 0.0
- 126 1.0 0.0
- 127 0.0 0.0
- 128 0.0 0.0
- 129 0.0 0.0
- 130 0.0 0.0
- 131 0.0 0.0
- 132 0.0 0.0
- 133 0.0 0.0
- 134 0.0 0.0
- 135 0.0 0.0
- 136 0.0 0.0
- 137 0.0 0.0
- 138 0.0 0.0
- 139 0.0 0.0
- 140 1.0 0.0
- 141 0.0 1.0
- 142 0.0 1.0
- 143 0.0 0.0
- 144 1.0 1.0
- 145 0.0 0.0
- 146 0.0 0.0
- 147 0.0 0.0
- 148 1.0 0.0
- 149 0.0 0.0
- 150 1.0 0.0
- 151 0.0 0.0
- 152 0.0 0.0
- 153 0.0 0.0
- 154 0.0 0.0
- 155 0.0 0.0
- 156 0.0 0.0
- 157 0.0 0.0
- 158 0.0 0.0
- 159 0.0 1.0
- 160 0.0 0.0
- 161 0.0 0.0
- 162 0.0 1.0
- 163 1.0 0.0
- 164 1.0 1.0
- 165 1.0 0.0
- 166 0.0 0.0
- 167 1.0 0.0
- 168 0.0 0.0
- 169 0.0 0.0
- 170 0.0 0.0
- 171 0.0 1.0
- 172 0.0 0.0
- 173 0.0 0.0
- 174 0.0 0.0
- 175 0.0 1.0
- 176 0.0 0.0
- 177 0.0 0.0
- 178 0.0 0.0
- 179 0.0 0.0
- 180 0.0 0.0
- 181 1.0 0.0
- 182 1.0 0.0
- 183 1.0 0.0
- 184 1.0 0.0
- 185 1.0 1.0
- 186 1.0 0.0
- 187 1.0 1.0
- 188 0.0 0.0
- 189 0.0 0.0
- 190 0.0 1.0
- 191 0.0 0.0
- 192 0.0 0.0
- 193 0.0 0.0
- 194 1.0 0.0
- 195 1.0 0.0
- 196 0.0 0.0
- 197 0.0 0.0
- 198 0.0 0.0
- 199 0.0 0.0
- 200 0.0 0.0
- 201 0.0 1.0
- 202 0.0 0.0
- 203 0.0 0.0
- 204 0.0 0.0
- 205 0.0 1.0
- 206 0.0 0.0
- 207 0.0 0.0
- 208 0.0 0.0
- 209 0.0 0.0
- 210 0.0 1.0
- 211 0.0 0.0
- 212 0.0 1.0
- 213 1.0 0.0
- 214 0.0 0.0
- 215 0.0 0.0
- 216 0.0 1.0
- 217 1.0 0.0
- 218 0.0 0.0
- 219 0.0 1.0
- 220 1.0 0.0
- 221 0.0 0.0
- 222 0.0 1.0
- 223 0.0 0.0
- 224 0.0 1.0
- 225 0.0 0.0
- 226 0.0 0.0
- 227 0.0 0.0
- 228 0.0 0.0
- 229 0.0 0.0
- 230 1.0 0.0
- 231 0.0 0.0
- 232 0.0 0.0
- 233 0.0 1.0
- 234 0.0 0.0
- 235 0.0 1.0
- 236 0.0 0.0
- 237 0.0 0.0
- 238 0.0 0.0
- 239 0.0 0.0
- 240 0.0 1.0
- 241 0.0 0.0
- 242 0.0 0.0
- 243 0.0 0.0
- 244 0.0 0.0
- 245 0.0 0.0
- 246 1.0 1.0
- 247 0.0 0.0
- 248 0.0 0.0
- 249 0.0 0.0
- 250 0.0 1.0
- 251 0.0 0.0
- 252 0.0 0.0
- 253 0.0 0.0
- 254 0.0 0.0
- 255 0.0 1.0
- 256 0.0 0.0
- 257 0.0 0.0
- 258 0.0 0.0
- 259 0.0 0.0
- 260 0.0 0.0
- 261 0.0 0.0
- 262 1.0 0.0
- 263 0.0 0.0
- 264 0.0 0.0
- 265 0.0 1.0
- 266 1.0 0.0
- 267 0.0 0.0
- 268 0.0 0.0
- 269 0.0 0.0
- 270 0.0 0.0
- 271 0.0 0.0
- 272 0.0 0.0
- 273 0.0 0.0
- 274 0.0 0.0
- 275 0.0 1.0
- 276 0.0 1.0
- 277 0.0 0.0
- 278 0.0 0.0
- 279 0.0 1.0
- 280 0.0 0.0
- 281 0.0 0.0
- 282 0.0 0.0
- 283 0.0 0.0
- 284 0.0 0.0
- 285 0.0 1.0
- 286 0.0 0.0
- 287 0.0 1.0
- 288 0.0 0.0
- 289 0.0 1.0
- 290 0.0 0.0
- 291 0.0 0.0
- 292 0.0 1.0
- 293 0.0 0.0
- 294 0.0 0.0
- 295 0.0 0.0
- 296 0.0 1.0
- 297 0.0 0.0
- 298 0.0 1.0
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement