Files
louiscklaw 9035c1312b update,
2025-02-01 02:09:32 +08:00

513 lines
25 KiB
R

text(lambda_min~which.min(lambda_min_list), labels=sprintf("%s Optimal Lambda for Alpha of: %s",round(lambda_min,5),alpha_seq[which.min(lambda_min_list)]),cex=0.9, font=2,pos=2)
plot(c(1:length(alpha_seq)),lambda_min_list,type = "l",ylab = 'Lambda Values',xlab='X sequence (1 to 90)',main = 'Lambda Plots for Elastic Net')
lambda_min = lambda_min_list[which.min(lambda_min_list)]
text(alpha_seq[which.min(lambda_min_list)]~lambda_min, labels=sprintf("%s Optimal Lambda for Alpha of: %s",lambda_min,alpha_seq[which.min(lambda_min_list)]),cex=0.9, font=2,pos=3)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
dev.new(width = 550, height = 330, unit = "px")
abline(v = lambda_ridge)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE,width = 550, height = 330)
abline(v = lambda_ridge)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE,width = 700, height = 330)
abline(v = lambda_ridge)
# Lasso
lasso = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
lambda_lasso = lasso$lambda.min
lambda_lasso
coef.lasso = matrix(coef(lasso, s = lambda_lasso))[2:(p+1)]
lasso = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
plot(lasso, xvar = "lambda", label = TRUE)
abline(v = lambda_lasso)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE,width = 550, height = 330)
abline(v = lambda_ridge)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
gamma = 2
b.ols = solve(t(as.matrix(X_train))%*%as.matrix(X_train))%*%t(as.matrix(X_train))%*%as.matrix(y_train)
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
l.ridge = ridge$lambda.min
b.ridge = matrix(coef(ridge, s = l.ridge))[2:(p+1)]
w1 = 1/abs(b.ols)^gamma
w2 = 1/abs(b.ridge)^gamma
alasso1 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
lambda1 = alasso1$lambda.min
lambda2 = alasso2$lambda.min
coef.alasso1 = matrix(coef(alasso1, s = lambda1))[2:(p+1)]
coef.alasso2 = matrix(coef(alasso2, s = lambda2))[2:(p+1)]
alasso1 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
plot(alasso1, xvar = "lambda", label = TRUE)
abline(v=lambda1)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
# Lasso
lasso = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
lambda_lasso = lasso$lambda.min
lambda_lasso
coef.lasso = matrix(coef(lasso, s = lambda_lasso))[2:(p+1)]
lasso = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
plot(lasso, xvar = "lambda", label = TRUE)
abline(v = log(lambda_lasso))
log(lambda_lasso)
elastic = cv.glmnet(as.matrix(X_train), as.matrix(y_train),alpha=lambda_min, family = "gaussian", intercept = FALSE, nfolds=20)
y_elastic = predict(elastic, as.matrix(X_train), s = lambda_min)
mse_lambda_min = sum((y_test-y_elastic)^2)/nrow(X_test)
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
library(tidyr)
library(glmnet)
#Read in and cleanse the data
income_democracy <- read_csv('./data/income_democracy.csv')
income_cleansed = subset(income_democracy,select = -c(country,year,code))
income_cleansed = income_cleansed %>% drop_na()
## 80% of the sample size
smp_size <- floor(0.8 * nrow(income_cleansed))
train_ind <- sample(seq_len(nrow(income_cleansed)), size = smp_size)
train <- income_cleansed[train_ind, ]
test <- income_cleansed[-train_ind, ]
y_train <- train[c("dem_ind")]
X_train <- subset(train,select = -c(dem_ind))
y_test <- test[c("dem_ind")]
X_test <- subset(test,select = -c(dem_ind))
p=9
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
# Ridge
y_ridge = predict(ridge, as.matrix(X_train), s = lambda_ridge)
mse_ridge = sum((y_test-y_ridge)^2)/nrow(X_test)
mse_ridge
# Lasso
lasso = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
lambda_lasso = lasso$lambda.min
lambda_lasso
coef.lasso = matrix(coef(lasso, s = lambda_lasso))[2:(p+1)]
lasso = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
plot(lasso, xvar = "lambda", label = TRUE)
abline(v = log(lambda_lasso))
# Ridge
y_lasso = predict(lasso, as.matrix(X_train), s = lambda_lasso)
mse_lasso = sum((y_test-y_lasso)^2)/nrow(X_test)
mse_lasso
gamma = 2
b.ols = solve(t(as.matrix(X_train))%*%as.matrix(X_train))%*%t(as.matrix(X_train))%*%as.matrix(y_train)
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
l.ridge = ridge$lambda.min
b.ridge = matrix(coef(ridge, s = l.ridge))[2:(p+1)]
w1 = 1/abs(b.ols)^gamma
w2 = 1/abs(b.ridge)^gamma
alasso1 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
lambda1 = alasso1$lambda.min
lambda2 = alasso2$lambda.min
coef.alasso1 = matrix(coef(alasso1, s = lambda1))[2:(p+1)]
coef.alasso2 = matrix(coef(alasso2, s = lambda2))[2:(p+1)]
alasso1 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
plot(alasso1, xvar = "lambda", label = TRUE)
abline(v=lambda1)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=log(lambda2))
y_alasso1 = predict(alasso1, as.matrix(X_train), s = lambda1)
mse_alasso1 = sum((y_test-y_alasso1)^2)/nrow(X_test)
y_alasso2 = predict(alasso2, as.matrix(X_train), s = lambda2)
mse_alasso2 = sum((y_test-y_alasso2)^2)/nrow(X_test)
mse_alasso1
mse_alasso2
# Elastic
alpha_seq = seq(0.1,0.99,0.01)
lambda_min_list <- rep(0,length(alpha_seq))
for (i in c(1:length(alpha_seq))) {
elastic = cv.glmnet(as.matrix(X_train), as.matrix(y_train),alpha=alpha_seq[i], family = "gaussian", intercept = FALSE, nfolds=20)
lambda_elastic = elastic$lambda.min
lambda_min_list[i]=lambda_elastic
}
plot(c(1:length(alpha_seq)),lambda_min_list,type = "l",ylab = 'Lambda Values',xlab='X sequence (1 to 90)',main = 'Lambda Plots for Elastic Net')
lambda_elastic = lambda_min_list[which.min(lambda_min_list)]
points(which.min(lambda_min_list),lambda_elastic,col = "red", lwd=5)
text(lambda_elastic~which.min(lambda_min_list), labels=sprintf("%s Optimal Lambda for Alpha of: %s",round(lambda_elastic,5),alpha_seq[which.min(lambda_min_list)]),cex=0.9, font=2,pos=2)
elastic = cv.glmnet(as.matrix(X_train), as.matrix(y_train),alpha=lambda_elastic, family = "gaussian", intercept = FALSE, nfolds=20)
y_elastic = predict(elastic, as.matrix(X_train), s = lambda_elastic)
coef.elastic = matrix(coef(elastic, s = lambda_elastic))[2:(p+1)]
plot(elastic, xvar = "lambda", label = TRUE)
abline(v = log(lambda_elastic))
mse_elastic = sum((y_test-y_elastic)^2)/nrow(X_test)
mse_elastic
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
elastic = glmnet(as.matrix(X_train), as.matrix(y_train),alpha=lambda_elastic, family = "gaussian", intercept = FALSE, nfolds=20)
y_elastic = predict(elastic, as.matrix(X_train), s = lambda_elastic)
coef.elastic = matrix(coef(elastic, s = lambda_elastic))[2:(p+1)]
plot(elastic, xvar = "lambda", label = TRUE)
abline(v = log(lambda_elastic))
elastic = glmnet(as.matrix(X_train), as.matrix(y_train),alpha=lambda_elastic, family = "gaussian", intercept = FALSE, nfolds=20)
y_elastic = predict(elastic, as.matrix(X_train), s = lambda_elastic)
coef.elastic = matrix(coef(elastic, s = lambda_elastic))[2:(p+1)]
plot(elastic, xvar = "lambda", label = TRUE)
abline(v = lambda_elastic)
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
library(tidyr)
library(glmnet)
#Read in and cleanse the data
income_democracy <- read_csv('./data/income_democracy.csv')
income_cleansed = subset(income_democracy,select = -c(country,year,code))
income_cleansed = income_cleansed %>% drop_na()
## 80% of the sample size
smp_size <- floor(0.8 * nrow(income_cleansed))
train_ind <- sample(seq_len(nrow(income_cleansed)), size = smp_size)
train <- income_cleansed[train_ind, ]
test <- income_cleansed[-train_ind, ]
y_train <- train[c("dem_ind")]
X_train <- subset(train,select = -c(dem_ind))
y_test <- test[c("dem_ind")]
X_test <- subset(test,select = -c(dem_ind))
p=9
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
# Ridge
y_ridge = predict(ridge, as.matrix(X_train), s = lambda_ridge)
mse_ridge = sum((y_test-y_ridge)^2)/nrow(X_test)
mse_ridge
# Lasso
lasso = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
lambda_lasso = lasso$lambda.min
lambda_lasso
coef.lasso = matrix(coef(lasso, s = lambda_lasso))[2:(p+1)]
lasso = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
plot(lasso, xvar = "lambda", label = TRUE)
abline(v = lambda_lasso)
# Ridge
y_lasso = predict(lasso, as.matrix(X_train), s = lambda_lasso)
mse_lasso = sum((y_test-y_lasso)^2)/nrow(X_test)
mse_lasso
gamma = 2
b.ols = solve(t(as.matrix(X_train))%*%as.matrix(X_train))%*%t(as.matrix(X_train))%*%as.matrix(y_train)
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
l.ridge = ridge$lambda.min
b.ridge = matrix(coef(ridge, s = l.ridge))[2:(p+1)]
w1 = 1/abs(b.ols)^gamma
w2 = 1/abs(b.ridge)^gamma
alasso1 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
lambda1 = alasso1$lambda.min
lambda2 = alasso2$lambda.min
coef.alasso1 = matrix(coef(alasso1, s = lambda1))[2:(p+1)]
coef.alasso2 = matrix(coef(alasso2, s = lambda2))[2:(p+1)]
alasso1 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
plot(alasso1, xvar = "lambda", label = TRUE)
abline(v=lambda1)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
y_alasso1 = predict(alasso1, as.matrix(X_train), s = lambda1)
mse_alasso1 = sum((y_test-y_alasso1)^2)/nrow(X_test)
y_alasso2 = predict(alasso2, as.matrix(X_train), s = lambda2)
mse_alasso2 = sum((y_test-y_alasso2)^2)/nrow(X_test)
mse_alasso1
mse_alasso2
# Elastic
alpha_seq = seq(0.1,0.99,0.01)
lambda_min_list <- rep(0,length(alpha_seq))
for (i in c(1:length(alpha_seq))) {
elastic = cv.glmnet(as.matrix(X_train), as.matrix(y_train),alpha=alpha_seq[i], family = "gaussian", intercept = FALSE, nfolds=20)
lambda_elastic = elastic$lambda.min
lambda_min_list[i]=lambda_elastic
}
plot(c(1:length(alpha_seq)),lambda_min_list,type = "l",ylab = 'Lambda Values',xlab='X sequence (1 to 90)',main = 'Lambda Plots for Elastic Net')
lambda_elastic = lambda_min_list[which.min(lambda_min_list)]
points(which.min(lambda_min_list),lambda_elastic,col = "red", lwd=5)
text(lambda_elastic~which.min(lambda_min_list), labels=sprintf("%s Optimal Lambda for Alpha of: %s",round(lambda_elastic,5),alpha_seq[which.min(lambda_min_list)]),cex=0.9, font=2,pos=2)
elastic = glmnet(as.matrix(X_train), as.matrix(y_train),alpha=lambda_elastic, family = "gaussian", intercept = FALSE, nfolds=20)
y_elastic = predict(elastic, as.matrix(X_train), s = lambda_elastic)
coef.elastic = matrix(coef(elastic, s = lambda_elastic))[2:(p+1)]
plot(elastic, xvar = "lambda", label = TRUE)
abline(v = lambda_elastic)
mse_elastic = sum((y_test-y_elastic)^2)/nrow(X_test)
mse_elastic
knitr::opts_chunk$set(echo = TRUE)
library(tidyverse)
library(tidyr)
library(glmnet)
#Read in and cleanse the data
income_democracy <- read_csv('./data/income_democracy.csv')
income_cleansed = subset(income_democracy,select = -c(country,year,code))
income_cleansed = income_cleansed %>% drop_na()
## 80% of the sample size
smp_size <- floor(0.8 * nrow(income_cleansed))
train_ind <- sample(seq_len(nrow(income_cleansed)), size = smp_size)
train <- income_cleansed[train_ind, ]
test <- income_cleansed[-train_ind, ]
y_train <- train[c("dem_ind")]
X_train <- subset(train,select = -c(dem_ind))
y_test <- test[c("dem_ind")]
X_test <- subset(test,select = -c(dem_ind))
p=9
# Ridge
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
lambda_ridge = ridge$lambda.min
lambda_ridge
coef.ridge = matrix(coef(ridge, s = lambda_ridge))[2:(p+1)]
ridge = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
plot(ridge, xvar = "lambda", label = TRUE)
abline(v = lambda_ridge)
# Ridge
y_ridge = predict(ridge, as.matrix(X_train), s = lambda_ridge)
mse_ridge = sum((y_test-y_ridge)^2)/nrow(X_test)
mse_ridge
# Lasso
lasso = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
lambda_lasso = lasso$lambda.min
lambda_lasso
coef.lasso = matrix(coef(lasso, s = lambda_lasso))[2:(p+1)]
lasso = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
plot(lasso, xvar = "lambda", label = TRUE)
abline(v = lambda_lasso)
# Ridge
y_lasso = predict(lasso, as.matrix(X_train), s = lambda_lasso)
mse_lasso = sum((y_test-y_lasso)^2)/nrow(X_test)
mse_lasso
gamma = 2
b.ols = solve(t(as.matrix(X_train))%*%as.matrix(X_train))%*%t(as.matrix(X_train))%*%as.matrix(y_train)
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
l.ridge = ridge$lambda.min
b.ridge = matrix(coef(ridge, s = l.ridge))[2:(p+1)]
w1 = 1/abs(b.ols)^gamma
w2 = 1/abs(b.ridge)^gamma
alasso1 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
lambda1 = alasso1$lambda.min
lambda2 = alasso2$lambda.min
coef.alasso1 = matrix(coef(alasso1, s = lambda1))[2:(p+1)]
coef.alasso2 = matrix(coef(alasso2, s = lambda2))[2:(p+1)]
alasso1 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
plot(alasso1, xvar = "lambda", label = TRUE)
abline(v=lambda1)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
y_alasso1 = predict(alasso1, as.matrix(X_train), s = lambda1)
mse_alasso1 = sum((y_test-y_alasso1)^2)/nrow(X_test)
y_alasso2 = predict(alasso2, as.matrix(X_train), s = lambda2)
mse_alasso2 = sum((y_test-y_alasso2)^2)/nrow(X_test)
mse_alasso1
mse_alasso2
# Elastic
alpha_seq = seq(0.1,0.99,0.01)
lambda_min_list <- rep(0,length(alpha_seq))
for (i in c(1:length(alpha_seq))) {
elastic = cv.glmnet(as.matrix(X_train), as.matrix(y_train),alpha=alpha_seq[i], family = "gaussian", intercept = FALSE, nfolds=20)
lambda_elastic = elastic$lambda.min
lambda_min_list[i]=lambda_elastic
}
plot(c(1:length(alpha_seq)),lambda_min_list,type = "l",ylab = 'Lambda Values',xlab='X sequence (1 to 90)',main = 'Lambda Plots for Elastic Net')
lambda_elastic = lambda_min_list[which.min(lambda_min_list)]
points(which.min(lambda_min_list),lambda_elastic,col = "red", lwd=5)
text(lambda_elastic~which.min(lambda_min_list), labels=sprintf("%s Optimal Lambda for Alpha of: %s",round(lambda_elastic,5),alpha_seq[which.min(lambda_min_list)]),cex=0.9, font=2,pos=2)
elastic = glmnet(as.matrix(X_train), as.matrix(y_train),alpha=lambda_elastic, family = "gaussian", intercept = FALSE, nfolds=20)
y_elastic = predict(elastic, as.matrix(X_train), s = lambda_elastic)
coef.elastic = matrix(coef(elastic, s = lambda_elastic))[2:(p+1)]
plot(elastic, xvar = "lambda", label = TRUE)
abline(v = lambda_elastic)
mse_elastic = sum((y_test-y_elastic)^2)/nrow(X_test)
mse_elastic
coef.lasso
coef.ridge
View(income_cleansed)
View(X_train)
colnames(X_train)
mse_ridge
lambda_lasso
coef.lasso
# Lasso
lasso = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
lambda_lasso = lasso$lambda.min
lambda_lasso
coef.lasso = matrix(coef(lasso, s = lambda_lasso))[2:(p+1)]
lasso = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, nfolds=20)
plot(lasso, xvar = "lambda", label = TRUE)
abline(v = log(lambda_lasso))
log(lambda_lasso)
mse_lasso
coef.alasso1
lambda1
lambda2
gamma = 2
b.ols = solve(t(as.matrix(X_train))%*%as.matrix(X_train))%*%t(as.matrix(X_train))%*%as.matrix(y_train)
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
l.ridge = ridge$lambda.min
b.ridge = matrix(coef(ridge, s = l.ridge))[2:(p+1)]
w1 = 1/abs(b.ols)^gamma
w2 = 1/abs(b.ridge)^gamma
alasso1 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
lambda1 = alasso1$lambda.min
lambda2 = alasso2$lambda.min
coef.alasso1 = matrix(coef(alasso1, s = lambda1))[2:(p+1)]
coef.alasso2 = matrix(coef(alasso2, s = lambda2))[2:(p+1)]
alasso1 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
plot(alasso1, xvar = "lambda", label = TRUE)
abline(v=log(lambda1))
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=log(lambda2))
gamma = 2
b.ols = solve(t(as.matrix(X_train))%*%as.matrix(X_train))%*%t(as.matrix(X_train))%*%as.matrix(y_train)
ridge = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
l.ridge = ridge$lambda.min
b.ridge = matrix(coef(ridge, s = l.ridge))[2:(p+1)]
w1 = 1/abs(b.ols)^gamma
w2 = 1/abs(b.ridge)^gamma
alasso1 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = cv.glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
lambda1 = alasso1$lambda.min
lambda2 = alasso2$lambda.min
coef.alasso1 = matrix(coef(alasso1, s = lambda1))[2:(p+1)]
coef.alasso2 = matrix(coef(alasso2, s = lambda2))[2:(p+1)]
alasso1 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w1, nfolds=20)
alasso2 = glmnet(as.matrix(X_train), as.matrix(y_train), family = "gaussian", alpha = 1, intercept = FALSE, penalty.factor = w2, nfolds=20)
plot(alasso1, xvar = "lambda", label = TRUE)
abline(v=log(lambda1))
plot(alasso2, xvar = "lambda", label = TRUE)
abline(v=lambda2)
coef.alasso2
test = cv.glmnet(as.matrix(X_train$age_3), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
test = cv.glmnet(as.matrix(X_train$age_3+X_train$age_2), as.matrix(y_train), family = "gaussian", alpha = 0, intercept = FALSE, nfolds=20)
coef.alasso2
glasso$fit$beta[,glasso$min]
knitr::opts_chunk$set(echo = TRUE)
library(R.matlab)
library(rmatio)
library(fda)
library(pracma)
library(gglasso)
library(rTensor)
library(grpreg)
#Read in and cleanse the data
nsc_train_x <- read.mat('./data/NSC.mat')$x[[1]]
nsc_train_y <- read.mat('./data/NSC.mat')$y
#Read in and cleanse the data
sensor_names <- c("air aspirated per cylinder","engine rotational speed","total quantity of fuel injected","low presure EGR valve",
"inner torque","accelerator pedal position","aperture ratio of inlet valve","downstreem intercooler preasure",
"fuel in the 2nd pre-injection","vehicle velocity")
p = 10 #Number of parameters
m = 150
n = 203
par(mfrow=c(2,5))
X_train = array(0,dim=c(m,n,p))
for(i in 1:p)
{
X_train[,,i]=nsc_train_x[[i]]
matplot(t(nsc_train_x[[i]]), type = "l", xlab = sensor_names[i],ylab = "")
}
Z_train = array(0,dim=c(m,10,p))
Y_train = array(0,dim=c(m,n))
x = seq(0,1,length=203)
splinebasis_B=create.bspline.basis(c(0,1),10)
base_B=eval.basis(as.vector(x),splinebasis_B)
for (i in 1:10){
Z_train[,,i] = X_train[,,i] %*% base_B
}
YB_train = nsc_train_y %*% base_B
Z_matrix = matrix(Z_train ,150,100)
Z_stack=matrix(kronecker(diag(10),Z_train),1500,1000)
Y2B_stack = as.vector(YB_train)
group = rep(1:10,each=100)
glasso = cv.grpreg(Z_stack,Y2B_stack,group)
glasso_lambda <- min(glasso$lambda)
which(glasso$fit$beta[,glasso$min]==0)
p = 10 #Number of parameters
m = 50
n = 203
nsc_test_x <- read.mat('./data/NSC.test.mat')$x[[1]]
nsc_test_y <- read.mat('./data/NSC.test.mat')$y
X_test = array(0,dim=c(m,n,p))
for(i in 1:p)
{
X_test[,,i]=nsc_test_x[[i]]
}
Z_test = array(0,dim=c(m,10,p))
Y_test = array(0,dim=c(m,n))
for (i in 1:10){
Z_test[,,i] = X_test[,,i] %*% base_B
}
YB_test = nsc_test_y %*% base_B
Z_matrix = matrix(Z_test ,50,100)
Z_stack=matrix(kronecker(diag(10),Z_test),500,1000)
Y2B_stack = as.vector(YB_test)
y_pred_glasso <- predict(glasso,Z_stack,lambda=glasso_lambda)
mse_glasso = sum((Y2B_stack-y_pred_glasso)^2)/n
mse_glasso
glasso$fit$beta[,glasso$min
wexit''
glasso$fit$beta[,glasso$min]