CS-7863-Sci-Stat-Proj-6/Schrick-Noah_Homework-6.R

169 lines
5.1 KiB
R

# Project 6 for the University of Tulsa's CS-7863 Sci-Stat Course
# Penalized Machine Learning
# Professor: Dr. McKinney, Spring 2023
# Noah L. Schrick - 1492657
# 1. Penalized Regression and Classification
## a. Modified Ridge classification for LASSO penalties
source("Schrick-Noah_Ridge-LASSO-Regression.R")
### Use npdro simulated data to test
source("Schrick-Noah_Simulated-Data.R")
bundled_data <- create_data()
# bundled_data$train.X = train.X
lasso.df <- data.frame(att=c("intercept", colnames(train.X)),
scores=unpen_beta$betas,
abs_scores=abs(unpen_beta$betas))
dplyr::slice_max(lasso.df,order_by=abs_scores,n=20)
### Compare with Ridge
### Compare with Random Forest
source("Schrick-Noah_Random-Forest.R")
rf_comp(train)
### Compare with glmnet
source("Schrick-Noah_glmnet.R")
#### Alpha = 0
glm_fcn(train.X, train.y, 0)
#### Alpha = 1
glm_fcn(train.X, train.y, 1)
## b. Repeat comparison using a graph with clusters
if (!require("igraph")) install.packages("igraph")
library(igraph)
if (!require("Matrix")) install.packages("Matrix")
library(Matrix) # bdiag
npc <-25 # nodes per cluster
n_clust <- 4 # 4 clusters with 25 nodes each
# no clusters
g0 <- erdos.renyi.game(npc*n_clust, 0.2)
plot(g0)
matlist = list()
for (i in 1:n_clust){
matlist[[i]] = get.adjacency(erdos.renyi.game(npc, 0.2))
}
# merge clusters into one matrix
mat_clust <- bdiag(matlist) # create block-diagonal matrix
## the following two things might not be necessary
# check for loner nodes, connected to nothing, and join them to something
k <- rowSums(mat_clust)
node_vector <- seq(1,npc*n_clust)
for (i in node_vector){
if (k[i]==0){ # if k=0, connect to something random
j <- sample(node_vector[-i],1)
mat_clust[i,j] <- 1
mat_clust[j,i] <- 1
}
}
node_colors <- c(rep("red",npc), rep("green",npc), rep("blue",npc), rep("orange",npc))
g1 <- graph_from_adjacency_matrix(mat_clust, mode="undirected", diag=F)
plot(g1, vertex.color=node_colors)
### Dataset with g1
dataset.graph <- npdro::createSimulation2(num.samples=num.samples,
num.variables=num.variables,
pct.imbalance=0.5,
pct.signals=0.2,
main.bias=0.5,
interaction.bias=1,
hi.cor=0.95,
lo.cor=0.2,
mix.type="main-interactionScalefree",
label="class",
sim.type="mixed",
pct.mixed=0.5,
pct.train=0.5,
pct.holdout=0.5,
pct.validation=0,
plot.graph=F,
graph.structure = g1,
verbose=T)
train.graph <- dataset.graph$train #150x101
test.graph <- dataset.graph$holdout
validation.graph <- dataset.graph$validation
dataset.graph$signal.names
colnames(train.graph)
# separate the class vector from the predictor data matrix
train.graph.X <- train.graph[, -which(colnames(train.graph) == "class")]
train.graph.y <- train.graph[, "class"]
train.graph.y.01 <- as.numeric(train.graph.y)-1
## c. Use npdro and igraph to create knn
my.k <- 3 # larger k, fewer clusters
npdr.nbpairs.idx <- npdro::nearestNeighbors(t(train.X),
# transpose does dist between predictors
# without transpose does dist between samples
#nbd.method="multisurf", k=0,
nbd.method = "relieff",
nbd.metric="manhattan",
k=my.k)
knn.graph <- graph_from_edgelist(as.matrix(npdr.nbpairs.idx),
directed=F)
knn.graph <- simplify(knn.graph)
### Plot network
plot.igraph(knn.graph,layout=layout_with_fr(knn.graph),
vertex.color="red",
vertex.size=3,vertex.label=NA,
main="Manhattan, knn-graph")
## d. Add Laplace graph penalty
### Find resulting beta coeffs
### Optimize or choose value for lambda2
### Compare to a) and b)
# 2. Gradient Descent
## Write fn with learning param
grad.rosen <- function(xvec, a=2, b=100){
x <- xvec[1];
y <- xvec[2];
f.x <- -2*(a-x) - 4*b*x*(y-x^2)
f.y <- 2*b*(y-x^2)
return( c(f.x, f.y))
}
a = 2
b=100
alpha = .0001 # learning rate
p = c(0,0) # start for momentum
xy = c(-1.8, 3.0) # guess for solution
# gradient descent
epochs = 1000000
for (epoch in 1:epochs){
p = -grad.rosen(xy,a,b);
xy = xy + alpha*p;
}
print(xy) # Should be: ~(2,4)
# Using optim:
f.rosen <- function(xvec, a=2, b=100){
#a <- 2; b <- 1000;
x <- xvec[1];
y <- xvec[2];
return ( (a-x)^2 + b*(y-x^2)^2)
}
sol.BFGS <- optim(par=c(-1.8,3.0), fn=function(x){f.rosen(x,a=2,b=100)},
gr=function(x){grad.rosen(x,a=2,b=100)}, method="BFGS")
sol.BFGS$par