Optimization Techniques for the Sincos Function in R
Analyzing the Sincos Function
getwd()
rm(list=ls())
sincos <- function(x){
sol<-sin(x[1]) + cos(x[2])
return(sol)
}
Creating a Vector and Matrix
We create a vector x
with values from -3π to 3π in steps of 3π/100 using the seq
command.
x<-seq(-3*pi,3*pi, by = 3*pi/100)
Next, we create a matrix m
with 2 columns and a number of rows equal to the length of the vector x
.
m<-matrix( nrow = length(x), ncol = 2)
m[,1]<-x
m[,2]<-x
Each row of matrix m
represents a point in 2D space.
Calculating Sincos Function Values
Using the apply
function, we calculate the values of the sincos
function for each point in matrix m
.
soluciones<-apply(m,1, sincos)
soluciones
max(soluciones)
min(soluciones)
coordinates_max<-m[which.max(soluciones),]
coordinates_max
coordinates_min<-m[which.min(soluciones),]
coordinates_min
Comparing Optimization Methods
We adapt the compare.R
code to compare Hill Climbing, Monte Carlo, and Simulated Annealing when minimizing the sincos
function.
Parameters:
- D=2 (vectors of two components)
- 100 runs
- MAXIT=1000
- Values for the variables: [-3π, 3π]
We use set.seed(2023)
to fix the seed before running the simulation for the three methods.
getwd()
setwd("/Users/alvaroregidornestares/Documents/UNI/3o Carrera/1er cuatri/Optimization II")
list.files()
Modified Sincos Function
csincos <- function(x){
f<-sin(x[1]) + cos(x[2])
EV<<-EV+1 # increase evaluations
if(f if(EV<=MAXIT) F[EV]<<-BEST
return(f)
}
Loading Functions and Setting Parameters
getwd()
setwd("/Users/alvaroregidornestares/Documents/UNI/3o Carrera/1er cuatri/Optimization II/Tema 4")
list.files()
source("montecarlo.R")
source("hill.R")
source("blind.R") # -> para montecarlo
# SANN is within optim() which is within the R package
Runs<-100; D<-2; MAXIT<-1000;lower<-rep(-3*pi,D);upper<-rep(3*pi,D)
rchange1<-function(par,lower,upper) # change for hclimbing
{ hchange(par,lower=lower,upper=upper,rnorm,
mean=0,sd=0.5,round=FALSE) }
rchange2<-function(par) # change for optim
{ hchange(par,lower=lower,upper=upper,rnorm,
mean=0,sd=0.5,round=FALSE) }
CHILL<-list(maxit=MAXIT,REPORT=0)
CSANN<-list(maxit=MAXIT,temp=10,trace=FALSE)
Methods<-c("monte carlo","hill climbing","simulated annealing")
Running the Simulations
RES<-vector("list",length(Methods)) # all results
for(m in 1:length(Methods))
RES[[m]]<-matrix(nrow=MAXIT,ncol=Runs)
for(R in 1:Runs) # cycle all runs
{ s<-runif(D,-3*pi,3*pi) # initial search point
EV<-0; BEST<-Inf; F<-rep(NA,MAXIT) # reset these vars.
# Monte Carlo:
mcsearch(MAXIT,lower=lower,upper=upper,FUN=csincos)
RES[[1]][,R]<-F
# Hill Climbing:
EV<-0; BEST<-Inf; F<-rep(NA,MAXIT)
hclimbing(s,csincos,change=rchange1,lower=lower,
upper=upper,control=CHILL,type="min")
RES[[2]][,R]<-F
# Simulated Annealing:
EV<-0; BEST<-Inf; F<-rep(NA,MAXIT)
optim(s,csincos,method="SANN",gr=rchange2,control=CSANN)
RES[[3]][,R]<-F
}
Aggregating and Plotting Results
AV<-matrix(nrow=MAXIT,ncol=length(Methods))
CI<-AV
for(m in 1:length(Methods))
{
for(i in 1:MAXIT)
{
mi<-mean(RES[[m]][i,])
sample.n <- length(mtcars$mpg)
sample.sd <- sd(mtcars$mpg)
sample.se <- sample.sd/sqrt(sample.n)
AV[i,m]<-mi;
}
}
MIN<-min(AV)
MAX<-max(AV)
g1<-seq(1,MAXIT,length.out=100) # grid for lines
plot(g1,AV[g1,3],ylim=c(MIN,MAX),type="l",lwd=2,
ylab="average best",xlab="number of evaluations")
lines(g1,AV[g1,2],lwd=2,lty=2)
lines(g1,AV[g1,1],lwd=2,lty=3)
legend("topright",legend=rev(Methods),lwd=2,lty=1:3)
Analyzing Minimum Values and Iterations
Minimum values reached by each method:
min_mc<-min(RES[[1]])
min_mc
min_hc<-min(RES[[2]])
min_hc
min_SANN<-min(RES[[3]])
min_SANN
Iterations to reach the minimum value (example):
head(which(RES[[1]] == min_mc, arr.ind = TRUE))
head(which(RES[[2]] == min_hc, arr.ind = TRUE))
head(which(RES[[3]] == min_SANN, arr.ind = TRUE))
Changing the Initial Search Point
RES<-vector("list",length(Methods)) # all results
for(m in 1:length(Methods))
RES[[m]]<-matrix(nrow=MAXIT,ncol=Runs)
for(R in 1:Runs) # cycle all runs
{ s<-c(6,6) # initial search point
EV<-0; BEST<-Inf; F<-rep(NA,MAXIT) # reset these vars.
# Monte Carlo:
mcsearch(MAXIT,lower=lower,upper=upper,FUN=csincos)
RES[[1]][,R]<-F
# Hill Climbing:
EV<-0; BEST<-Inf; F<-rep(NA,MAXIT)
hclimbing(s,csincos,change=rchange1,lower=lower,
upper=upper,control=CHILL,type="min")
RES[[2]][,R]<-F
# Simulated Annealing:
EV<-0; BEST<-Inf; F<-rep(NA,MAXIT)
optim(s,csincos,method="SANN",gr=rchange2,control=CSANN)
RES[[3]][,R]<-F
}
Comparing Evolutionary Algorithms
We adapt the compare2.R
code to compare the Evolutionary Algorithm (EA), Differential Evolution (DE), and Particle Swarm Optimization (PSO) when minimizing the sincos
function.
list.files()
getwd()
setwd("/Users/alvaroregidornestares/Documents/UNI/3o Carrera/1er cuatri/Optimization II")
list.files()
library(genalg)
library(DEoptim)
library(pso)
Parameters:
- D=2 (vectors of two components)
- 100 runs
- MAXFN=1000
- 100 individuals for the population
- maxit=100
- Values for the variables: [-3π, 3π]
D<-2; Runs<-100; MAXFN=1000; LP=100; maxit=100; lower<-rep(-3*pi,D);upper<-rep(3*pi,D)
Methods=c("EA","DE","PSO")
We use set.seed(2023)
to fix the seed before running the simulation.
csincos <- function(x){
f<-sin(x[1]) + cos(x[2])
EV<<-EV+1 # increase evaluations
if(f if(EV<=MAXFN) F[EV]<<-BEST
return(f)
}
crun<-function(method,f,lower,upper,LP,maxit) # run a method
{ if(method=="EA")
rbga(evalFunc=f,stringMin=lower,stringMax=upper,popSize=LP,
iters=maxit*1.5)
else if(method=="DE")
{ C=DEoptim.control(itermax=maxit,trace=FALSE,NP=LP)
DEoptim(f,lower=lower,upper=upper,control=C)
}
else if(method=="PSO")
{ C=list(maxit=maxit,s=LP)
psoptim(rep(NA,length(lower)),fn=f,
lower=lower,upper=upper,control=C)
}
}
ctest<-function(Methods,f,lower,upper,type="min",Runs, # test
D,MAXFN,maxit,LP,pdf,main) # all methods:
{ RES<<-vector("list",length(Methods)) # all results
VAL<<-matrix(nrow=Runs,ncol=length(Methods)) # best values
for(m in 1:length(Methods)) # initialize RES object
RES[[m]]<<-matrix(nrow=MAXFN,ncol=Runs)
for(R in 1:Runs) # cycle all runs
for(m in 1:length(Methods))
{ EV<<-0; F<<-rep(NA,MAXFN) # reset EV and F
if(type=="min") BEST<<-Inf
else BEST<<- -Inf # reset BEST
suppressWarnings(crun(Methods[m],f,lower,upper,LP,maxit))
RES[[m]][,R]<<-F
# store all best values
VAL[R,m]<<-F[MAXFN] # store best value at MAXFN
}
# compute average F result per method:
AV<<-matrix(nrow=MAXFN,ncol=length(Methods))
for(m in 1:length(Methods))
for(i in 1:MAXFN)
AV[i,m]<<-mean(RES[[m]][i,])
# show results:
cat(main,"\n",Methods,"\n")
cat(apply(VAL,2,mean),digits=0," (average best)\n")
par(mar=c(4.0,4.0,1.8,0.6)) # reduce default plot margin
MIN=min(AV);MAX=max(AV)
# use a grid to improve clarity:
g1=seq(1,MAXFN,length.out=500) # grid for lines
plot(g1,AV[g1,1],ylim=c(MIN,MAX),type="l",lwd=2,main=main,
ylab="average best",xlab="number of evaluations")
for(i in 2:length(Methods)) lines(g1,AV[g1,i],lwd=2,lty=i)
if(type=="min") position="topright" else position=
"bottomright"
legend(position,legend=Methods,lwd=2,lty=1:length(Methods))
}
EV<-0;BEST<-Inf;F<-rep(NA,MAXFN)
Running the Evolutionary Algorithms
AV=NULL
RES=NULL
ctest(Methods,csincos,lower,upper,"min",Runs,D,MAXFN,maxit,
LP,"comp-sphere2","sincos (D=2)")
Analyzing Minimum Values and Iterations
Minimum values reached by each method:
min_EA<-min(RES[[1]])
min_EA
min_DE<-min(RES[[2]])
min_DE
min_PSO<-min(RES[[3]])
min_PSO
Iterations to reach the minimum value (example):
head(which(RES[[1]] == min_EA, arr.ind = TRUE))
head(which(RES[[2]] == min_DE, arr.ind = TRUE))
head(which(RES[[3]] == min_PSO, arr.ind = TRUE))
Determining the Fastest Converging Method
Based on the results, we can determine which method converges faster to the solution by comparing the average best values over the number of evaluations.