Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #initialize libraries
- library(xlsx)
- library(glmnet)
- library(plotmo)
- #Data Input
- x<-matrix(c(-1,-4,0,0,1,16),nrow=3,ncol=2,byrow=TRUE)
- y<-matrix(c(-2.2,0,3.8),nrow=3,ncol=1,byrow=TRUE)
- #lasso fit and plots
- yfit=glmnet(x,y,family="gaussian",standardize.response=FALSE,intercept = FALSE,lambda=(exp(seq(log(0.0001), log(100), length.out=100))),upper.limits = 1000,lower.limits = -1000)
- ycvfit=cv.glmnet(x,y,family="gaussian",nfolds=3,standardize.response=FALSE,intercept = FALSE,grouped = FALSE,lambda=(exp(seq(log(0.0001), log(100), length.out=100))),upper.limits = 1000,lower.limits = -1000)
- plot.cv.glmnet(ycvfit)
- plot.glmnet(yfit,xvar="lambda")
- #writing results
- lassocoeff=as.matrix(coef(yfit,s=0.1))
- print(lassocoeff)
- import numpy as np
- import matplotlib.pyplot as plt
- from sklearn.linear_model import Lasso, LassoCV
- from __future__ import division
- import time
- x=np.array([[-1,-4],[0,0],[1,16]])
- y=np.array([[-2.2],[0],[3.8]]).reshape(3,)
- #set alphas to be tested, note this is in logspace
- alphas = np.logspace(-7,5,num=1000,base=np.e)
- #plot coefficent size vs log(alpha) graph.
- lasso = Lasso(max_iter=10000, normalize=False, fit_intercept=False)
- coefs = []
- for a in alphas:
- lasso.set_params(alpha=a)
- lasso.fit((x), (y))
- coefs.append(lasso.coef_)
- ax = plt.gca()
- ax.plot(np.log(alphas), coefs)
- ax.set_xscale('linear')
- plt.axis('tight')
- plt.xlabel('log(alpha)')
- plt.ylabel('weights')
- print("Computing regularization path using the coordinate descent lasso...")
- t1 = time.time()
- model = LassoCV(alphas=None,cv=3, normalize=False, fit_intercept=False).fit((x), (y))
- t_lasso_cv = time.time() - t1
- # Display results
- m_log_alphas = np.log(model.alphas_)
- plt.figure()
- #To properly show, the ymin and ymax may need adjusted
- ymin, ymax = 0, 25
- plt.plot(m_log_alphas, model.mse_path_.mean(axis=-1), 'k',
- label='Average across the folds', linewidth=2)
- plt.axvline(np.log(model.alpha_), linestyle='--', color='k',
- label='alpha: CV estimate')
- plt.legend()
- plt.xlabel('log(alpha)')
- plt.ylabel('Mean square error')
- plt.title('Mean square error on each fold: coordinate descent '
- '(train time: %.2fs)' % t_lasso_cv)
- plt.axis('tight')
- plt.ylim(ymin, ymax)
- plt.xlim(-10,10)
- #Calculate Lasso coefficents from proper alpha. Skipping for now, as we are just comparing 1 alpha value.
- #lassocv = LassoCV(eps=.000000001,max_n_alphas=1000, cv=N, max_iter=100000, normalize=False, fit_intercept=False)
- #lassocv.fit(x, y)
- #minimumalpha = lassocv.alpha_
- lasso=Lasso(alpha=.1,normalize=False,fit_intercept=False,max_iter=100000)
- lasso.fit(x, y)
- print(lasso.coef_)
Add Comment
Please, Sign In to add comment