############################################################
############################################################
## file:  Hitters_shrink.R
## Do Ridge and Lasso on Hitters.
## This tries to follow the lab for chapter 6 of ISLR.
############################################################
############################################################
library(glmnet) #R package for regularized regression (ridge, lasso, elastic ..)
dpl=FALSE

##------------------------------------------------------------
## Get the data
library(ISLR)
print(names(Hitters))
print(dim(Hitters))
## Salary has some missing values so let's get rid of the observations
cat('number missing for Salary: ',sum(is.na(Hitters$Salary)),'\n')
## Drop observations with missing
Hitters=na.omit(Hitters) 
cat('number missing for Salary after drop: ',sum(is.na(Hitters$Salary)),'\n')

##------------------------------------------------------------
## get data in (x,y) form (useful for glmnet)
x=model.matrix(Salary~.,Hitters)[,-1]
y=Hitters$Salary
print(dim(x))
print(colnames(x))

##------------------------------------------------------------
##------------------------------------------------------------
## do Hitters data following glmnet vignette
fit = glmnet(x,y) #default is lasso, data is standardized, compute path over grid of lambda
plot(fit,label=TRUE) #see coefficents path as lambda varies, label=TRUE labels each path with var #

#to get coefficients at a lambda (they use the label s for lambda) use coef
#  note: coefs are returned as sparse matrices, I'll just convert to matrices
#  note: coefs are returned on the original x scales
cmat = coef(fit,s=.1)[,1] #get coefficients at lambda=.1, [,1] drops sparse matrix format
cmat = coef(fit)
print(class(cmat))
cmat = as.matrix(cmat)
print(class(cmat))
print(dim(cmat)) #each row is the coefficient path of the used grid of lambda values
print(row.names(cmat))
plot(fit$lambda,cmat[3,]) #plot second x coefficient vs lambda.
title(main=paste("coefficient path for ",row.names(cmat)[3]))

# to get predictions use predict, we'll just get the insample fits at lambda=50
yhat = predict(fit,newx=x,s=50)
plot(y,yhat)
abline(0,1,col="red",lwd=3,lty=3)

# we can us cv.glmnet to pick lambda using cross-validation
cvfit = cv.glmnet( x, y) #does 10-fold cv by default

#we can plot the cv:
#  the lambda with the min cv is indicated as is the 1se lambda (largest lambda with min within 1se)
#              where se is computed across the 10 folds
# note that they plot mse.
plot(cvfit) 

# pick off best lambda:
cat("best lambda is: ", cvfit$lambda.min,"\n")
cat("best log(lambda) is: ", log(cvfit$lambda.min),"\n")
cat("best lambda 1se is: ", cvfit$lambda.1se,"\n")
cat("best log(lambda) 1se is: ", log(cvfit$lambda.1se),"\n")


# make predictions (in sample fits) at a good lambda
yhat = predict(cvfit,newx=x,s=cvfit$lambda.min)
yhat1 = predict(cvfit,newx=x,s="lambda.min")
yhat2 = predict(cvfit,newx=x,s=cvfit$lambda.1se)
yhat3 = predict(fit,newx=x,s=cvfit$lambda.min)

pairs(cbind(y,yhat,yhat1,yhat2,yhat3))



##------------------------------------------------------------
##------------------------------------------------------------
## do Hitters as done in the class notes (was based on ISLR lab)

##------------------------------------------------------------
## Note: glmnet uses x and y instead of y~x.
## Get the model matrix (x) and Salary (y)
x=model.matrix(Salary~.,Hitters)[,-1]
x=scale(x) #let's scale the data to start with so we an interpret stuff.
y=Hitters$Salary
print(dim(x))
print(colnames(x))

##------------------------------------------------------------
## Fit ridge on grid of lambda values.
gsize=100
grid=5^seq(10,-2,length=gsize)
print(grid)
#note: the default automatically standardizes the x's.
#note: alpha=0 is ridge, alpha=1 is lasso.
ridge.mod=glmnet(x,y,alpha=0,lambda=grid,standardize=FALSE)  #I already standardized
cat('lambda 50: ',ridge.mod$lambda[50],'\n') #print 50th lambda value
cat('coefficients:\n')
print(coef(ridge.mod)[,50]) #coefficients for 50th lambda value

##------------------------------------------------------------
## plot coefficients
cmat = coef(ridge.mod) 
cmat = cmat[2:nrow(cmat),] #drop intercept
rgy = range(cmat)
cpar = log(1/grid)
if(dpl) pdf(file='ridge-coefs-alldata.pdf',height=7,width=10)
plot(range(cpar),rgy,type='n',xlab='log(1/lambda)',ylab='coeficients',cex.lab=1.5)
for(i in 1:nrow(cmat)) lines(cpar,cmat[i,],col=i+1,type='l')
if(dpl) dev.off()

##------------------------------------------------------------
##  Try Cross validation using cv.glmnet
set.seed(14)
cv.out = cv.glmnet(x,y,alpha=0,lambda=grid)
cmp = log(1/cv.out$lambda)

if(dpl) pdf(file='Hitters_cv-glmnet.pdf',height=8,width=10)
plot(cmp,cv.out$cvm,type='b',xlab='cmp = log(1/lambda)',cex.lab=1.5)
bestlam = cv.out$lambda.min
bestcmp = log(1/bestlam)
text(bestcmp,160000,paste('best lambda is: ',round(bestlam,2)),col='red',cex=1.5)
abline(v=bestcmp,col='red')
if(dpl) dev.off()

##get coefficients for best lambda
bestridgecoef = predict(ridge.mod,s=bestlam,type='coefficients',exact=TRUE)[,1]
ddf = data.frame(x,y)
lm.mod = lm(y~.,ddf)
rgxy = range(c(lm.mod$coeficients,bestridgecoef))
if(dpl) pdf(file='comp_coefs_lm-ridge.pdf',height=10,width=12)
plot(lm.mod$coef,bestridgecoef,xlim=rgxy,ylim=rgxy,xlab='linear coefficients',ylab='ridge coefficients')
abline(0,1,col='red',lty=2)
abline(v=0,lty=3)
abline(h=0,lty=3)
if(dpl) dev.off()

##get fits
ridge.fit = predict(ridge.mod,s=bestlam,newx=x)
lm.fit = lm.mod$fitted
fmat = cbind(y,lm.fit,ridge.fit)
colnames(fmat) = c('y','linear','ridge')
if(dpl) pdf(file='pairs_in-sample-fits_lm-ridge.pdf',height=10,width=12)
pairs(fmat)
if(dpl) dev.off()

##------------------------------------------------------------
##------------------------------------------------------------
## repeat for lasso (alpha=1)
lasso.mod=glmnet(x,y,alpha=1,lambda=grid,standardize=FALSE)  #I already standardized

##------------------------------------------------------------
## plot coefficients
cmat = coef(lasso.mod) 
cmat = cmat[2:nrow(cmat),] #drop intercept
rgy = range(cmat)
cpar = log(1/grid)
if(dpl) pdf(file='lasso-coefs-alldata.pdf',height=7,width=10)
plot(range(cpar),rgy,type='n',xlab='log(1/lambda)',ylab='coeficients',cex.lab=1.5)
for(i in 1:nrow(cmat)) lines(cpar,cmat[i,],col=i+1,type='l')
if(dpl) dev.off()
##------------------------------------------------------------
##  Try Cross validation using cv.glmnet
set.seed(14)
cv.out = cv.glmnet(x,y,alpha=1,lambda=grid)
cmp = log(1/cv.out$lambda)

if(dpl) pdf(file='Hitters_cv-glmnet-lasso.pdf',height=8,width=10)
plot(cmp,cv.out$cvm,type='b',xlab='cmp = log(1/lambda)',cex.lab=1.5)
bestlam = cv.out$lambda.min
bestcmp = log(1/bestlam)
text(bestcmp,160000,paste('best lambda is: ',round(bestlam,2)),col='red',cex=1.5)
abline(v=bestcmp,col='red')
if(dpl) dev.off()

##get coefficients for best lambda
bestlassocoef = predict(lasso.mod,s=bestlam,type='coefficients',exact=TRUE)[,1]
rgxy = 1.1*range(c(lm.mod$coeficients,bestridgecoef,bestlassocoef))
if(dpl) pdf(file='comp_coefs_lm-ridge-lasso.pdf',height=10,width=12)
plot(lm.mod$coef,bestridgecoef,xlim=rgxy,ylim=rgxy,xlab='linear coefficients',ylab='ridge-lasso coefficients',col='green',pch=2,cex.lab=1.5)
points(lm.mod$coef,bestlassocoef,col='blue',pch=19)
abline(0,1,col='red',lty=2)
abline(v=0,lty=3)
abline(h=0,lty=3)
legend('topleft',legend=c('ridge','lasso'),pch=19,col=c('green','blue'))
if(dpl) dev.off()

print(bestlassocoef)

##get fits
lasso.fit = predict(lasso.mod,s=bestlam,newx=x)
fmat = cbind(y,lm.fit,ridge.fit,lasso.fit)
colnames(fmat) = c('y','linear','ridge','lasso')
if(dpl) pdf(file='pairs_in-sample-fits_lm-ridge-lasso.pdf',height=10,width=12)
pairs(fmat)
if(dpl) dev.off()

#--------------------------------------------------
if(dpl) rm(list=ls())
