1 Simulating data following a stationary AR(2) model

set.seed(12345)
n    = 200
sig  = 1
phi0 = 0
phi1 = 1.75
phi2 = -0.8
#phi1 = 0.9
#phi2 = 0.05
y    = rep(0,2*n)
for (t in 3:(2*n))
 y[t] = phi0+phi1*y[t-1]+phi2*y[t-2]+rnorm(1,0,sig)
y = y[(n+1):(2*n)] 
ts.plot(y)

2 Maximum likelihood estimaton of AR(3) models

The ML estimates of \(\phi\) and \(\sigma\) are \[ \phi=(0.1076,1.6913,-0.7016,-0.0437)' \ \ \mbox{and} \ \ {\hat \sigma}^2 = 0.9593, \] with \({\hat \phi}_3\) being marginally statistically insignificant.

p  = 3
yy = y[(p+1):n]
X  = cbind(1,y[p:(n-1)])
for (k in 2:p)
  X = cbind(X,y[(p-k+1):(n-k)])

ols = lm(yy~X-1)
summary(ols)
## 
## Call:
## lm(formula = yy ~ X - 1)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -2.65547 -0.58459  0.02411  0.53600  2.67453 
## 
## Coefficients:
##    Estimate Std. Error t value Pr(>|t|)    
## X1  0.10754    0.07149   1.504    0.134    
## X2  1.69131    0.07145  23.671  < 2e-16 ***
## X3 -0.70164    0.13118  -5.349 2.49e-07 ***
## X4 -0.04371    0.07123  -0.614    0.540    
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.9593 on 193 degrees of freedom
## Multiple R-squared:  0.9792, Adjusted R-squared:  0.9787 
## F-statistic:  2266 on 4 and 193 DF,  p-value: < 2.2e-16

3 MCMC-based Bayesian inference for AR(3) models

# Prior hyperparameters
a0 = rep(0,p+1)
B0 = 100*diag(1,p+1)
c0 = 2.5
d0 = 2.5

# initial values
phi = rep(0,p+1)

# Running our MCMC algorithm for posterior inference
M0 = 1000
M  = 10000
draws = matrix(0,M0+M,p+2)
for (i in 1:(M0+M)){
  # full conditional of sigma2
  par1 = c0+(n-p)/2
  par2 = d0+t(yy-X%*%phi)%*%(yy-X%*%phi)/2
  sig2 = 1/rgamma(1,par1,par2)

  # full conditional of phi
  V = solve(t(X)%*%X/sig2+solve(B0))
  m = V%*%(t(X)%*%yy/sig2+solve(B0)%*%a0)
  phi = m + t(chol(V))%*%rnorm(p+1)
  
  # storing draws
  draws[i,] = c(phi,sig2)
}

# Keeping only the final M draws (discarding the first M0 ones)
draws = draws[(M0+1):(M0+M),]

3.1 Checking the MCMC draws

# Trace plots of the Markov chains 
par(mfrow=c(3,5))
ts.plot(draws[,1],xlab="MCMC iterations",ylab="",main=expression(phi[0]))
ts.plot(draws[,2],xlab="MCMC iterations",ylab="",main=expression(phi[1]))
ts.plot(draws[,3],xlab="MCMC iterations",ylab="",main=expression(phi[2]))
ts.plot(draws[,4],xlab="MCMC iterations",ylab="",main=expression(phi[3]))
ts.plot(sqrt(draws[,5]),xlab="MCMC iterations",ylab="",main=expression(sigma))

# Investigating the autocorrelations of the draws
acf(draws[,1],main="")
acf(draws[,2],main="")
acf(draws[,3],main="")
acf(draws[,4],main="")
acf(sqrt(draws[,5]),main="")

# MCMC-based marginal posterior densities
hist(draws[,1],xlab="",prob=TRUE,main=expression(phi[0]))
hist(draws[,2],xlab="",prob=TRUE,main=expression(phi[1]))
hist(draws[,3],xlab="",prob=TRUE,main=expression(phi[2]))
hist(draws[,4],xlab="",prob=TRUE,main=expression(phi[3]))
hist(sqrt(draws[,5]),xlab="",prob=TRUE,main=expression(sigma))

3.2 MCMC-based \(E(\phi_1+\phi_2|y_1,\ldots,y_n)\)

mean(draws[,2]+draws[,3]+draws[,4])
## [1] 0.9463005

3.3 MCMC-based \(E(\sigma|y_1,\ldots,y_n)\)

mean(sqrt(draws[,5]))
## [1] 0.9633491

3.4 MCMC-based \(p(\phi_1+\phi_2+\phi_3|y_1,\ldots,y_n)\)

par(mfrow=c(1,1))
hist(draws[,2]+draws[,3]+draws[,4],xlab="",prob=TRUE,main=expression(phi[1]+phi[2]+phi[3]))

3.5 MCMC-based \(p(y_{n+1},y_{n+2}|y_1,\ldots,y_n)\)

par(mfrow=c(1,1))
ynplus1 = draws[,1]+draws[,2]*y[n]+draws[,3]*y[n-1]+draws[,4]*y[n-2]+rnorm(M,0,sqrt(draws[,5]))
ynplus2 = draws[,1]+draws[,2]*ynplus1+draws[,3]*y[n]+draws[,4]*y[n-1]+rnorm(M,0,sqrt(draws[,5]))
plot(ynplus1,ynplus2,xlab=expression(y[n+1]),ylab=expression(y[n+2]))

3.6 MCMC-based \(p(y_{n+1}+y_{n+2}|y_1,\ldots,y_n)\)

par(mfrow=c(1,1))
hist(ynplus1+ynplus2,xlab=expression(y[n+1]+y[n+2]),main="",prob=TRUE)

3.7 MCMC-based moments of the unconditional distribution of the stationary AR(3) model

sumphi = draws[,2]+draws[,3]+draws[,4]
ind = abs(sumphi)<1
mean = draws[ind,1]/(1-(draws[ind,2]+draws[ind,3]+draws[ind,4]))
sd = sqrt(draws[ind,5]/(1-(draws[ind,2]+draws[ind,3]+draws[ind,4])^2))
hist(mean,main="",prob=TRUE,xlab=expression(phi[0]/(1-(phi[1]+phi[2]+phi[3]))))

hist(sd,main="",prob=TRUE,xlab=expression(sqrt(sigma^2/(1-(phi[1]+phi[2]+phi[3])^2))))

3.8 MCMC-based h-steps ahead forecasting densities

yn1 = y[n]
yn2 = y[n-1]
yn3 = y[n-2]
hmax = 100
yfs = matrix(0,M,hmax)
for (h in 1:hmax){
  yf = draws[,1]+draws[,2]*yn1+draws[,3]*yn2+draws[,4]*yn3+rnorm(M,0,sqrt(draws[,5]))
  yn1 = yf
  yn2 = yn1
  yn3 = yn2
  yfs[,h] = yf
}
qyf = t(apply(yfs,2,quantile,c(0.05,0.5,0.95)))

par(mfrow=c(1,1))
plot((n-20):n,y[(n-20):n],xlim=c(n-20,n+h),ylim=range(y,qyf),xlab="Time",type="b",ylab="")
abline(v=n,lty=2)
points((n+1):(n+h),qyf[,2],pch=16,col=2,cex=0.25)
points((n+1):(n+h),qyf[,1],pch=16,col=3,cex=0.25)
points((n+1):(n+h),qyf[,3],pch=16,col=3,cex=0.25)