Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import sklearn
- from sklearn.utils import shuffle
- from sklearn.neighbors import KNeighborsClassifier
- import pandas as pd
- import numpy as np
- from sklearn import linear_model, preprocessing
- CarsData = pd.read_csv("car.data")
- le = preprocessing.LabelEncoder()
- buying = le.fit_transform(list(CarsData["buying"]))
- maint = le.fit_transform(list(CarsData["maint"]))
- door = le.fit_transform(list(CarsData["door"]))
- persons = le.fit_transform(list(CarsData["persons"]))
- lug_boot = le.fit_transform(list(CarsData["lug_boot"]))
- safety = le.fit_transform(list(CarsData["safety"]))
- cls = le.fit_transform(list(CarsData["class"]))
- MyPredict = "class"
- X = list(zip(buying, maint, door, persons, lug_boot, safety)) # Features
- y = list(cls) # Labels
- #n = 7
- Max_Ac = 0
- KNN = None
- for n in range (1,10):
- for i in range (10):
- x_train, x_test, y_train, y_test = sklearn.model_selection.train_test_split(X, y, test_size = 0.1)
- KNN_Model = KNeighborsClassifier(n_neighbors=n)
- KNN_Model.fit(x_train, y_train)
- Accuracity = KNN_Model.score(x_test, y_test)
- if Accuracity > Max_Ac:
- Max_Ac = Accuracity
- KNN = n
- Accuracity = str("- Max Accuracity: ") + str(round(Max_Ac*100, 2)) + str("%") + str("\n") + str("- Neighbors: ") + str(KNN)
- print(Accuracity)
Add Comment
Please, Sign In to add comment