From 21ac5a3ad02e54267b39132f5ca5b33730c9575d Mon Sep 17 00:00:00 2001 From: noah Date: Wed, 12 Apr 2023 17:04:09 -0500 Subject: [PATCH] Gradient Descent for Rosenbrock function using learning rate and momentum --- Schrick-Noah_Homework-6.R | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/Schrick-Noah_Homework-6.R b/Schrick-Noah_Homework-6.R index 21270e8..b5c3878 100644 --- a/Schrick-Noah_Homework-6.R +++ b/Schrick-Noah_Homework-6.R @@ -133,7 +133,39 @@ plot.igraph(knn.graph,layout=layout_with_fr(knn.graph), # 2. Gradient Descent ## Write fn with learning param +grad.rosen <- function(xvec, a=2, b=100){ + #a <- 2; b <- 1000; + x <- xvec[1]; + y <- xvec[2]; + f.x <- -2*(a-x) - 4*b*x*(y-x^2) + f.y <- 2*b*(y-x^2) + return( c(f.x, f.y)) +} -## Solve Rosenbrock function minimum +a = 2 +b=100 +alpha = .0001 # learning rate +p = c(0,0) # start for momentum -## Add momentum term \ No newline at end of file +xy = c(-1.8, 3.0) # guess for solution + +# gradient descent +epochs = 1000000 +for (epoch in 1:epochs){ + p = -grad.rosen(xy,a,b); + xy = xy + alpha*p; +} + +print(xy) # Should be: ~(2,4) + +# Using optim: +f.rosen <- function(xvec, a=2, b=100){ + #a <- 2; b <- 1000; + x <- xvec[1]; + y <- xvec[2]; + return ( (a-x)^2 + b*(y-x^2)^2) +} + +sol.BFGS <- optim(par=c(-1.8,3.0), fn=function(x){f.rosen(x,a=2,b=100)}, + gr=function(x){grad.rosen(x,a=2,b=100)}, method="BFGS") +sol.BFGS$par \ No newline at end of file