###################################################### # # Stationary Gaussian AR(1) process # MLE and Conjugate Bayesian inference # # z(t) = alpha+beta*z(t-1)+e(t) # e(t) ~ N(0,sig^2) # # for known sig # ###################################################### ###################################################### # Simulating stationary and Gaussian AR(1) data ###################################################### set.seed(1234) n = 100 alpha = 0 beta = 0.9 sig = 1 z = rep(0,n) z[1] = alpha/(1-beta) e = rnorm(n,0,sig) for (t in 2:n) z[t] = alpha+beta*z[t-1]+e[t] par(mfrow=c(2,2)) ts.plot(z,main="Simulated AR(1) data") ###################################################### # Classical inference approach ###################################################### y = z[2:n] X = cbind(1,z[1:(n-1)]) th.mle = solve(t(X)%*%X)%*%t(X)%*%y V.th.mle = sig*solve(t(X)%*%X) corr.mle.alpha.beta = V.th.mle[1,2]/sqrt(V.th.mle[1,1]*V.th.mle[2,2]) # likelihood contour plot M = 200 alphas = seq(th.mle[1]-3*sqrt(V.th.mle[1,1]), th.mle[1]+3*sqrt(V.th.mle[1,1]),length=M) betas = seq(th.mle[2]-3*sqrt(V.th.mle[2,2]), th.mle[2]+3*sqrt(V.th.mle[2,2]),length=M) like = matrix(0,M,M) for (i in 1:M) for (j in 1:M) like[i,j] = prod(dnorm(y,alphas[i]+betas[j]*X[,2])) contour(alphas,betas,like,xlab=expression(alpha),ylab=expression(beta)) title("Likelihood function") points(th.mle[1],th.mle[2],pch=16) points(alpha,beta,pch=16,col=2) ########################### # Bayesian approach ########################### b0 = c(0,0.8) B0 = diag(0.01,2) # A priori, alpha is in (0-2*0.1,0+2*0.1)=(-0.2,0.2) with 95% probability # A priori, beta is in (0.8-2*0.1,0.8+2*0.1)=(0.6,1.0) with 95% probability alphas1 = seq(b0[1]-3*sqrt(B0[1,1]), b0[1]+3*sqrt(B0[1,1]),length=M) betas1 = seq(b0[2]-3*sqrt(B0[2,2]), b0[2]+3*sqrt(B0[2,2]),length=M) prior = matrix(0,M,M) for (i in 1:M) for (j in 1:M) prior[i,j] = prod(dnorm(c(alphas1[i],betas1[j]),b0,sqrt(diag(B0)))) image(alphas1,betas1,prior,xlab=expression(alpha),ylab=expression(beta)) contour(alphas,betas,like,add=TRUE,drawlabels=FALSE) title("Prior vs likelihood") V.th.bayes = solve(t(X)%*%X/sig^2+solve(B0)) th.bayes = V.th.bayes%*%(t(X)%*%y/sig^2+solve(B0)%*%b0) corr.bayes.alpha.beta = V.th.bayes[1,2]/sqrt(V.th.bayes[1,1]*V.th.bayes[2,2]) corr.bayes.alpha.beta post = matrix(0,M,M) for (i in 1:M) for (j in 1:M) post[i,j] = like[i,j]*prod(dnorm(c(alphas[i],betas[j]),b0,sqrt(diag(B0)))) image(alphas1,betas1,prior,xlab=expression(alpha),ylab=expression(beta)) contour(alphas,betas,post,add=TRUE,drawlabels=FALSE) title("Prior vs posterior")