--- title: "Bayesian inference for the AR(3) model" author: "Hedibert Freitas Lopes" date: "1/25/2020" output: html_document: number_sections: true --- ```{r setup, include=FALSE} knitr::opts_chunk$set(echo = TRUE) ``` # Simulating data following a stationary AR(2) model ```{r fig.width=12, fig.height=7} set.seed(12345) n = 200 sig = 1 phi0 = 0 phi1 = 1.75 phi2 = -0.8 #phi1 = 0.9 #phi2 = 0.05 y = rep(0,2*n) for (t in 3:(2*n)) y[t] = phi0+phi1*y[t-1]+phi2*y[t-2]+rnorm(1,0,sig) y = y[(n+1):(2*n)] ts.plot(y) ``` # Maximum likelihood estimaton of AR(3) models The ML estimates of $\phi$ and $\sigma$ are $$ \phi=(0.1076,1.6913,-0.7016,-0.0437)' \ \ \mbox{and} \ \ {\hat \sigma}^2 = 0.9593, $$ with ${\hat \phi}_3$ being marginally statistically insignificant. ```{r fig.width=12, fig.height=7} p = 3 yy = y[(p+1):n] X = cbind(1,y[p:(n-1)]) for (k in 2:p) X = cbind(X,y[(p-k+1):(n-k)]) ols = lm(yy~X-1) summary(ols) ``` # MCMC-based Bayesian inference for AR(3) models ```{r} # Prior hyperparameters a0 = rep(0,p+1) B0 = 100*diag(1,p+1) c0 = 2.5 d0 = 2.5 # initial values phi = rep(0,p+1) # Running our MCMC algorithm for posterior inference M0 = 1000 M = 10000 draws = matrix(0,M0+M,p+2) for (i in 1:(M0+M)){ # full conditional of sigma2 par1 = c0+(n-p)/2 par2 = d0+t(yy-X%*%phi)%*%(yy-X%*%phi)/2 sig2 = 1/rgamma(1,par1,par2) # full conditional of phi V = solve(t(X)%*%X/sig2+solve(B0)) m = V%*%(t(X)%*%yy/sig2+solve(B0)%*%a0) phi = m + t(chol(V))%*%rnorm(p+1) # storing draws draws[i,] = c(phi,sig2) } # Keeping only the final M draws (discarding the first M0 ones) draws = draws[(M0+1):(M0+M),] ``` ## Checking the MCMC draws ```{r fig.width=12, fig.height=7} # Trace plots of the Markov chains par(mfrow=c(3,5)) ts.plot(draws[,1],xlab="MCMC iterations",ylab="",main=expression(phi[0])) ts.plot(draws[,2],xlab="MCMC iterations",ylab="",main=expression(phi[1])) ts.plot(draws[,3],xlab="MCMC iterations",ylab="",main=expression(phi[2])) ts.plot(draws[,4],xlab="MCMC iterations",ylab="",main=expression(phi[3])) ts.plot(sqrt(draws[,5]),xlab="MCMC iterations",ylab="",main=expression(sigma)) # Investigating the autocorrelations of the draws acf(draws[,1],main="") acf(draws[,2],main="") acf(draws[,3],main="") acf(draws[,4],main="") acf(sqrt(draws[,5]),main="") # MCMC-based marginal posterior densities hist(draws[,1],xlab="",prob=TRUE,main=expression(phi[0])) hist(draws[,2],xlab="",prob=TRUE,main=expression(phi[1])) hist(draws[,3],xlab="",prob=TRUE,main=expression(phi[2])) hist(draws[,4],xlab="",prob=TRUE,main=expression(phi[3])) hist(sqrt(draws[,5]),xlab="",prob=TRUE,main=expression(sigma)) ``` ## MCMC-based $E(\phi_1+\phi_2|y_1,\ldots,y_n)$ ```{r} mean(draws[,2]+draws[,3]+draws[,4]) ``` ## MCMC-based $E(\sigma|y_1,\ldots,y_n)$ ```{r} mean(sqrt(draws[,5])) ``` ## MCMC-based $p(\phi_1+\phi_2+\phi_3|y_1,\ldots,y_n)$ ```{r fig.width=12, fig.height=7} par(mfrow=c(1,1)) hist(draws[,2]+draws[,3]+draws[,4],xlab="",prob=TRUE,main=expression(phi[1]+phi[2]+phi[3])) ``` ## MCMC-based $p(y_{n+1},y_{n+2}|y_1,\ldots,y_n)$ ```{r fig.width=12, fig.height=7} par(mfrow=c(1,1)) ynplus1 = draws[,1]+draws[,2]*y[n]+draws[,3]*y[n-1]+draws[,4]*y[n-2]+rnorm(M,0,sqrt(draws[,5])) ynplus2 = draws[,1]+draws[,2]*ynplus1+draws[,3]*y[n]+draws[,4]*y[n-1]+rnorm(M,0,sqrt(draws[,5])) plot(ynplus1,ynplus2,xlab=expression(y[n+1]),ylab=expression(y[n+2])) ``` ## MCMC-based $p(y_{n+1}+y_{n+2}|y_1,\ldots,y_n)$ ```{r fig.width=12, fig.height=7} par(mfrow=c(1,1)) hist(ynplus1+ynplus2,xlab=expression(y[n+1]+y[n+2]),main="",prob=TRUE) ``` ## MCMC-based moments of the unconditional distribution of the stationary AR(3) model ```{r fig.width=12, fig.height=7} sumphi = draws[,2]+draws[,3]+draws[,4] ind = abs(sumphi)<1 mean = draws[ind,1]/(1-(draws[ind,2]+draws[ind,3]+draws[ind,4])) sd = sqrt(draws[ind,5]/(1-(draws[ind,2]+draws[ind,3]+draws[ind,4])^2)) hist(mean,main="",prob=TRUE,xlab=expression(phi[0]/(1-(phi[1]+phi[2]+phi[3])))) hist(sd,main="",prob=TRUE,xlab=expression(sqrt(sigma^2/(1-(phi[1]+phi[2]+phi[3])^2)))) ``` ## MCMC-based h-steps ahead forecasting densities ```{r fig.width=12, fig.height=7} yn1 = y[n] yn2 = y[n-1] yn3 = y[n-2] hmax = 100 yfs = matrix(0,M,hmax) for (h in 1:hmax){ yf = draws[,1]+draws[,2]*yn1+draws[,3]*yn2+draws[,4]*yn3+rnorm(M,0,sqrt(draws[,5])) yn1 = yf yn2 = yn1 yn3 = yn2 yfs[,h] = yf } qyf = t(apply(yfs,2,quantile,c(0.05,0.5,0.95))) par(mfrow=c(1,1)) plot((n-20):n,y[(n-20):n],xlim=c(n-20,n+h),ylim=range(y,qyf),xlab="Time",type="b",ylab="") abline(v=n,lty=2) points((n+1):(n+h),qyf[,2],pch=16,col=2,cex=0.25) points((n+1):(n+h),qyf[,1],pch=16,col=3,cex=0.25) points((n+1):(n+h),qyf[,3],pch=16,col=3,cex=0.25) ```