## ——————————————
##
## ST4060 / ST6015 / ST6040
## R script – Fri 29 Oct 2021 lecture
## Eric Wolsztynski
##
## Going back over nonlinear optimisation
## and grid search…
##
## ——————————————
rm(list=ls())
attach(pressure)
x = temperature
y = pressure
plot(x, y, pch=20, cex=2)
# Recall how we can fit a polynomial of high order almost
# perfectly to the data, using lm():
lmo = lm(y ~ x + I(x^2) + I(x^3) + I(x^4) + I(x^5))
lines(x, fitted(lmo), lwd=4, col=4)
# How to fit a nonlinear model using nls():
nlreg <- nls(y ~ exp(a+b*x), start=list(a=0,b=.02))
summary(nlreg)
cc = coef(nlreg)
# add this new fit to the plot:
lines(x, fitted(nlreg), col='blue', lwd=3)
# how to perform a grid search
L = 10
a.grid = seq(-.9, 0, length=L)
b.grid = seq(0.0201, 0.0208, length=L)
crit = matrix(NA, nrow=L, ncol=L)
for(i in 1:L){
for(j in 1:L){
crit[i,j] = sum( (y - exp( a.grid[i] + b.grid[j] * x))^2 )
}
}
persp(a.grid, b.grid, crit, col="lightblue",theta=0)
persp(a.grid, b.grid, crit, col="lightblue",theta=45)
contour(a.grid, b.grid, crit)
# The polynomial does an increasingly better job at fitting
# as we increase its degree... which leads to "overfitting"
# (sometimes that isn't a problem though!).
# how to perform a grid search
L = 100
a.grid = seq(-.9, 0, length=L)
b.grid = seq(0.0201, 0.0208, length=L)
crit = matrix(NA, nrow=L, ncol=L)
for(i in 1:L){
for(j in 1:L){
crit[i,j] = sum( (y - exp( a.grid[i] + b.grid[j] * x))^2 )
}
}
persp(a.grid, b.grid, crit, col="lightblue",theta=45)
# How to fit any model using optim():
crit <- function(th,x,y){
# Must have theta as 1st parameter and return a single value...
# Here we implement a simple linear regression model
# for the sake fo the example:
a = th[1]
b = th[2]
return( sum( (y-exp(a+b*x))^2 ) )
}
(optim.out.alt = optim(par=c(0,.02), fn=crit, x=x, y=y))
(optim.out = optim(par=c(0,.02), fn=crit, x=x, y=y,
method='L-BFGS-B', lower=c(-1,.01), upper=c(0.01,.03)))
yhat = exp(optim.out$par[1] + optim.out$par[2]*x)
yhat.alt = exp(optim.out.alt$par[1] + optim.out.alt$par[2]*x)
plot(x, y, pch=20, cex=2,
xlab="Temperature (Celsius)",
ylab="Vapor pressure (ml of mercury)",
main="Example: polynomial regression")
lines(x, yhat.alt, col='navy', lwd=3)
lines(x, fitted(nlreg), col='blue', lwd=3)
lines(x, yhat, col=2, lwd=3)
legend("topleft", col=c(1,4,2), bty='n',
legend=c('data','nls','optim'), lwd=3, lty=1)
# Should we add a third parameter to scale the exponential?
crit <- function(th,x,y){
# Must have theta as 1st parameter and return a single value...
# Here we implement a simple linear regression model
# for the sake fo the example:
a = th[1]
b = th[2]
c = th[3] ## additional scaling parameter
return( sum( (y-c*exp(a+b*x))^2 ) )
}
(optim.out.alt = optim(par=c(0,.02,1), fn=crit, x=x, y=y)) # init c=1
# NB: don't forget to include this additional parameter when evaluating
# the fitted observations!!! (which I did during the lecture ;))
yhat.alt = optim.out.alt$par[3]*exp(optim.out.alt$par[1] + optim.out.alt$par[2]*x)
# The result is not great:
plot(x, y, pch=20, cex=2,
xlab="Temperature (Celsius)",
ylab="Vapor pressure (ml of mercury)",
main="Example: polynomial regression")
lines(x, yhat.alt, col='navy', lwd=3)
lines(x, fitted(nlreg), col='blue', lwd=3)
lines(x, yhat, col=2, lwd=3)
legend("topleft", col=c(1,4,2), bty='n',
legend=c('data','nls','optim'), lwd=3, lty=1)
# Take-home messages:
# 1. The choice of the optimisation algorithm is important
# and may yield very different fits.
# 2. No matter what alforithm we used, the exponential model
# we chose was not able to capture finer detail in the
# pattern of interest in this data...
# BONUS: a nicer example of a criterion surface plot...
set.seed(4060)
n = 100
x = runif(n,-1,1)
y = .2*x+.3*x^2
plot(x,y)
L = 100
ag = seq(-1,1,length=L)
bg = seq(-1,1,length=L)
crit = matrix(NA,nrow=L,ncol=L)
for(i in 1:L){
for(j in 1:L){
crit[i,j] = sum((y-ag[i]*x-bg[j]*x^2)^2)
}
}
persp(ag,bg,crit,col='lightblue',theta=45)
contour(ag,bg,crit)