unpenalized regression coefficients. simulated data using npdro
This commit is contained in:
parent
21ac5a3ad0
commit
ca21002f65
@ -5,10 +5,55 @@
|
||||
|
||||
# 1. Penalized Regression and Classification
|
||||
## a. Modified Ridge classification for LASSO penalties
|
||||
# gradient descent to optimize beta's
|
||||
ridge_betas <- function(X,y,beta_init=NULL,lam, alpha=0, method="BFGS"){
|
||||
if (is.null(beta_init)){beta_init <- rep(.1, ncol(X)+1)}
|
||||
# method: BFGS, CG, Nelder-Mead
|
||||
no_penalty_cg <- optim(beta_init, # guess
|
||||
fn=function(beta){penalized_loss(X, y, beta, lam, alpha=0)}, # objective
|
||||
gr=function(beta){ridge_grad(X, y, beta, lam)}, # gradient
|
||||
method = method) #, control= list(trace = 2))
|
||||
return(list(loss=no_penalty_cg$value, betas = no_penalty_cg$par))
|
||||
}
|
||||
|
||||
### Add cross-validation to tune penalty param
|
||||
lasso_betas <- function(X,y){
|
||||
ridge_betas(X,y,beta_init=NULL,lam=0,alpha=0,method="BFGS")
|
||||
}
|
||||
|
||||
### Use npdro simulated data to test
|
||||
if (!require("devtools")) install.packages("devtools")
|
||||
library(devtools)
|
||||
install_github("insilico/npdro")
|
||||
if (!require("npdro")) install.packages("npdro")
|
||||
library(npdro)
|
||||
if (!require("dplyr")) install.packages("dplyr")
|
||||
library(dplyr)
|
||||
|
||||
num.samples <- 300
|
||||
num.variables <- 100
|
||||
dataset <- npdro::createSimulation2(num.samples=num.samples,
|
||||
num.variables=num.variables,
|
||||
pct.imbalance=0.5,
|
||||
pct.signals=0.2,
|
||||
main.bias=0.5,
|
||||
interaction.bias=1,
|
||||
hi.cor=0.95,
|
||||
lo.cor=0.2,
|
||||
mix.type="main-interactionScalefree",
|
||||
label="class",
|
||||
sim.type="mixed",
|
||||
pct.mixed=0.5,
|
||||
pct.train=0.5,
|
||||
pct.holdout=0.5,
|
||||
pct.validation=0,
|
||||
plot.graph=F,
|
||||
graph.structure = NULL,
|
||||
verbose=T)
|
||||
train <- dataset$train #150x101
|
||||
test <- dataset$holdout
|
||||
validation <- dataset$validation
|
||||
dataset$signal.names
|
||||
colnames(train)
|
||||
|
||||
### Compare with Ridge
|
||||
|
||||
@ -134,7 +179,6 @@ plot.igraph(knn.graph,layout=layout_with_fr(knn.graph),
|
||||
# 2. Gradient Descent
|
||||
## Write fn with learning param
|
||||
grad.rosen <- function(xvec, a=2, b=100){
|
||||
#a <- 2; b <- 1000;
|
||||
x <- xvec[1];
|
||||
y <- xvec[2];
|
||||
f.x <- -2*(a-x) - 4*b*x*(y-x^2)
|
||||
@ -168,4 +212,4 @@ f.rosen <- function(xvec, a=2, b=100){
|
||||
|
||||
sol.BFGS <- optim(par=c(-1.8,3.0), fn=function(x){f.rosen(x,a=2,b=100)},
|
||||
gr=function(x){grad.rosen(x,a=2,b=100)}, method="BFGS")
|
||||
sol.BFGS$par
|
||||
sol.BFGS$par
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user