Gradient Descent for Rosenbrock function using learning rate and momentum
This commit is contained in:
parent
f738fd038a
commit
21ac5a3ad0
@ -133,7 +133,39 @@ plot.igraph(knn.graph,layout=layout_with_fr(knn.graph),
|
|||||||
|
|
||||||
# 2. Gradient Descent
|
# 2. Gradient Descent
|
||||||
## Write fn with learning param
|
## Write fn with learning param
|
||||||
|
grad.rosen <- function(xvec, a=2, b=100){
|
||||||
|
#a <- 2; b <- 1000;
|
||||||
|
x <- xvec[1];
|
||||||
|
y <- xvec[2];
|
||||||
|
f.x <- -2*(a-x) - 4*b*x*(y-x^2)
|
||||||
|
f.y <- 2*b*(y-x^2)
|
||||||
|
return( c(f.x, f.y))
|
||||||
|
}
|
||||||
|
|
||||||
## Solve Rosenbrock function minimum
|
a = 2
|
||||||
|
b=100
|
||||||
|
alpha = .0001 # learning rate
|
||||||
|
p = c(0,0) # start for momentum
|
||||||
|
|
||||||
## Add momentum term
|
xy = c(-1.8, 3.0) # guess for solution
|
||||||
|
|
||||||
|
# gradient descent
|
||||||
|
epochs = 1000000
|
||||||
|
for (epoch in 1:epochs){
|
||||||
|
p = -grad.rosen(xy,a,b);
|
||||||
|
xy = xy + alpha*p;
|
||||||
|
}
|
||||||
|
|
||||||
|
print(xy) # Should be: ~(2,4)
|
||||||
|
|
||||||
|
# Using optim:
|
||||||
|
f.rosen <- function(xvec, a=2, b=100){
|
||||||
|
#a <- 2; b <- 1000;
|
||||||
|
x <- xvec[1];
|
||||||
|
y <- xvec[2];
|
||||||
|
return ( (a-x)^2 + b*(y-x^2)^2)
|
||||||
|
}
|
||||||
|
|
||||||
|
sol.BFGS <- optim(par=c(-1.8,3.0), fn=function(x){f.rosen(x,a=2,b=100)},
|
||||||
|
gr=function(x){grad.rosen(x,a=2,b=100)}, method="BFGS")
|
||||||
|
sol.BFGS$par
|
||||||
Loading…
x
Reference in New Issue
Block a user