Guest User

Untitled

a guest
May 26th, 2018
80
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 1.12 KB | None | 0 0
  1. from sklearn.datasets import make_regression
  2. from sklearn.model_selection import train_test_split
  3. from sklearn.linear_model import Ridge
  4. from sklearn.metrics import mean_squared_error
  5. import numpy as np
  6. import matplotlib.pyplot as plt
  7.  
  8. X, y, w = make_regression(n_samples=200, n_features=200, coef=True,
  9. random_state=1, bias=0, noise=8, tail_strength=0.9, effective_rank=10)
  10.  
  11. X_train, X_test, y_train, y_test = train_test_split(
  12. X, y, test_size=0.4, random_state=2)
  13.  
  14. alphas = 10.**np.arange(-4, 10, 0.1)
  15.  
  16. scores_test = []
  17. scores_train = []
  18. for a in alphas:
  19. print(a)
  20. clf = Ridge(alpha=a, solver='auto')
  21. clf.fit(X_train, y_train)
  22. scores_train.append(mean_squared_error(y_train, clf.predict(X_train)))
  23. scores_test.append(mean_squared_error(y_test, clf.predict(X_test)))
  24.  
  25. fig, ax = plt.subplots(figsize=(8, 6))
  26. ax.set_xscale('log')
  27. ax.plot(alphas, scores_test, 'r', label='test')
  28. ax.plot(alphas, scores_train, 'b', label='train')
  29. ax.set_ylabel('MSE', fontsize=16)
  30. ax.set_xlabel('alpha (regularisation strength)', fontsize=16)
  31. ax.legend(fontsize=18)
  32. plt.savefig('bias_variance_tradeoff.png')
Add Comment
Please, Sign In to add comment