##################################################################################### # # DYNAMIC LINEAR MODELING - AR(1) plus noise model # # y(t) = x(t) + e(t) e(t) ~ N(0,sig2) # x(t) = alpha + beta*x(t) + w(t) w(t) ~ N(0,tau2) # # for t=1,...,n and x0 ~ N(m0,C0) # #################################################################################### # Hedibert Freitas Lopes # Associate Professor of Econometrics and Statistics # The University of Chicago Booth School of Business # Email: hlopes@chicagobooth.edu # URL: http://faculty.chicagobooth.edu/hedibert.lopes # #################################################################################### rm(list=ls()) set.seed(15497) pdf(file="AR1plusnoise.pdf",width=10,height=10) # Simulating the data n = 200 alpha = 0.0 beta = 0.98 sig2 = 1.0 tau2 = 0.5 x0 = alpha/(1-beta) e = rnorm(n,0,sqrt(sig2)) w = rnorm(n,0,sqrt(tau2)) y = rep(0,n) x = rep(0,n) x[1] = rnorm(1,alpha+beta*x0,sqrt(tau2)) y[1] = rnorm(1,x[1],sqrt(sig2)) for (t in 2:n){ x[t] = rnorm(1,alpha+beta*x[t-1],sqrt(tau2)) y[t] = rnorm(1,x[t],sqrt(sig2)) } x.true = x sig2.true = sig2 tau2.true = tau2 x0.true = x0 alpha.true = alpha beta.true = beta par(mfrow=c(1,1)) plot(y,xlab="Time",ylab="",pch=16) lines(x,col=2,type="b",pch=16) ############################################################## # MCMC scheme for learning x(1),...,x(n),alpha,beta,sig2,tau2 ############################################################## # Set up of prior hyperparameters m0 = 0 C0 = 100 nu0 = 10 sig20 = sig2.true eta0 = 10 tau20 = tau2.true nu0sig20 = nu0*sig20 eta0tau20 = eta0*tau20 nu0n = (nu0+n)/2 eta0n = (eta0+n)/2 # Sampling the xt's individually # initial values x = x.true x0 = x0.true alpha = alpha.true beta = beta.true # MCMC set up thin = 1 burnin = 5000 M = 5000 niter = burnin+thin*M draws = matrix(0,niter,n+5) for (iter in 1:niter){ # Learning sig2 sig2 = 1/rgamma(1,nu0n,(nu0sig20+sum((y-x)^2))/2) # Learning tau2 xx = c(x0,x[1:(n-1)]) tau2 = 1/rgamma(1,eta0n,(eta0tau20+sum((x-alpha-beta*xx)^2))/2) # Learning (alpha,beta) X = cbind(1,xx) v = solve(t(X)%*%X) m = v%*%t(X)%*%x ab = m+t(chol(v))%*%rnorm(2,0,sqrt(tau2)) alpha = ab[1] beta = ab[2] # Learning x0 var = 1/(1/C0+beta^2/tau2) mean = var*(m0/C0+(x[1]-alpha)*beta/tau2) x0 = rnorm(1,mean,sqrt(var)) # Learning x(1) var = 1/((1+beta^2)/tau2+1/sig2) mean = var*(beta*(x[2]+x0)/tau2+alpha*(1-beta)/tau2+y[1]/sig2) x[1] = rnorm(1,mean,sqrt(var)) # Learning x(2)....x(n-1) var = 1/((1+beta^2)/tau2+1/sig2) for (t in 2:(n-1)){ mean = var*(beta*(x[t+1]+x[t-1])/tau2+alpha*(1-beta)/tau2+y[t]/sig2) x[t] = rnorm(1,mean,sqrt(var)) } # Learning x(n) var = 1/(1/tau2+1/sig2) mean = var*((alpha+beta*x[n-1])/tau2+y[n]/sig2) x[n] = rnorm(1,mean,sqrt(var)) # Storing the draws draws[iter,] = c(x,sig2,tau2,alpha,beta,x0) } ind = seq(burnin+1,niter,by=thin) draws = draws[ind,] par(mfrow=c(4,3)) ts.plot(draws[,n+1],xlab="iterations",ylab="",main=expression(sigma^2)) abline(h=sig2.true,col=2) acf(draws[,n+1],main="") hist(draws[,n+1],prob=TRUE,main="",xlab="") abline(v=sig2.true,col=2) ts.plot(draws[,n+2],xlab="iterations",ylab="",main=expression(tau^2)) abline(h=tau2.true,col=2) acf(draws[,n+2],main="") hist(draws[,n+2],prob=TRUE,main="",xlab="") abline(v=tau2.true,col=2) ts.plot(draws[,n+3],xlab="iterations",ylab="",main=expression(alpha)) abline(h=alpha.true,col=2) acf(draws[,n+3],main="") hist(draws[,n+3],prob=TRUE,main="",xlab="") abline(v=alpha.true,col=2) ts.plot(draws[,n+4],xlab="iterations",ylab="",main=expression(beta)) abline(h=beta.true,col=2) acf(draws[,n+4],main="") hist(draws[,n+4],prob=TRUE,main="",xlab="") abline(v=beta.true,col=2) par(mfrow=c(2,1)) acf1 = acf(draws[,1],plot=FALSE) plot(acf1$acf,type="l",xlab="Lag",ylab="ACF",col=grey(0.75),ylim=c(-0.2,1)) for (i in 2:n){ acf1 = acf(draws[,i],plot=FALSE) lines(acf1$acf,col=grey(0.75)) } title("ACF for each x(t)") abline(h=0,lty=3) qx = t(apply(draws[,1:n],2,quantile,c(0.025,0.5,0.975))) ts.plot(qx) # Sampling x(1),...,x(n) jointly (the FFBS scheme) # initial values x = x.true x0 = x0.true alpha = alpha.true beta = beta.true # MCMC set up thin = 1 burnin = 5000 M = 5000 niter = burnin+thin*M draws1 = matrix(0,niter,n+5) mts = rep(0,n) Cts = rep(0,n) for (iter in 1:niter){ # Learning sig2 sig2 = 1/rgamma(1,nu0n,(nu0sig20+sum((y-x)^2))/2) # Learning tau2 xx = c(x0,x[1:(n-1)]) tau2 = 1/rgamma(1,eta0n,(eta0tau20+sum((x-alpha-beta*xx)^2))/2) # Learning (alpha,beta) X = cbind(1,xx) v = solve(t(X)%*%X) m = v%*%t(X)%*%x ab = m+t(chol(v))%*%rnorm(2,0,sqrt(tau2)) alpha = ab[1] beta = ab[2] # Learning x0 var = 1/(1/C0+beta^2/tau2) mean = var*(m0/C0+(x[1]-alpha)*beta/tau2) x0 = rnorm(1,mean,sqrt(var)) # Learning x1,...,xn mt = m0 Ct = C0 for (t in 1:n){ at = alpha+beta*mt Rt = beta^2*Ct+tau2 Qt = Rt+sig2 At = Rt/Qt mt = (1-At)*at + At*y[t] Ct = Rt - At^2*Qt mts[t] = mt Cts[t] = Ct } x[n] = rnorm(1,mts[n],sqrt(Cts[n])) for (t in (n-1):1){ v = 1/(beta^2/tau2+1/Cts[t]) m = v*(beta*(x[t+1]-alpha)/tau2+mts[t]/Cts[t]) x[t] = rnorm(1,m,sqrt(v)) } # Storing the draws draws1[iter,] = c(x,sig2,tau2,alpha,beta,x0) } ind = seq(burnin+1,niter,by=thin) draws1 = draws1[ind,] par(mfrow=c(4,3)) ts.plot(draws1[,n+1],xlab="iterations",ylab="",main=expression(sigma^2)) abline(h=sig2.true,col=2) acf(draws1[,n+1],main="") hist(draws1[,n+1],prob=TRUE,main="",xlab="") abline(v=sig2.true,col=2) ts.plot(draws1[,n+2],xlab="iterations",ylab="",main=expression(tau^2)) abline(h=tau2.true,col=2) acf(draws1[,n+2],main="") hist(draws1[,n+2],prob=TRUE,main="",xlab="") abline(v=tau2.true,col=2) ts.plot(draws1[,n+3],xlab="iterations",ylab="",main=expression(alpha)) abline(h=alpha.true,col=2) acf(draws1[,n+3],main="") hist(draws1[,n+3],prob=TRUE,main="",xlab="") abline(v=alpha.true,col=2) ts.plot(draws1[,n+4],xlab="iterations",ylab="",main=expression(beta)) abline(h=beta.true,col=2) acf(draws1[,n+4],main="") hist(draws1[,n+4],prob=TRUE,main="",xlab="") abline(v=beta.true,col=2) par(mfrow=c(2,1)) acf1 = acf(draws1[,1],plot=FALSE) plot(acf1$acf,type="l",xlab="Lag",ylab="ACF",col=grey(0.75),ylim=c(-0.2,1)) for (i in 2:n){ acf1 = acf(draws1[,i],plot=FALSE) lines(acf1$acf,col=grey(0.75)) } title("ACF for each x(t)") abline(h=0,lty=3) qx = t(apply(draws1[,1:n],2,quantile,c(0.025,0.5,0.975))) ts.plot(qx) # Comparing both MCMC schemes par(mfrow=c(2,2)) acf1 = acf(draws[,1],plot=FALSE) plot(acf1$acf,type="l",xlab="Lag",ylab="ACF",col=grey(0.75),ylim=c(-0.2,1)) for (i in 2:n){ acf1 = acf(draws[,i],plot=FALSE) lines(acf1$acf,col=grey(0.75)) } title("ACF for each x(t)") abline(h=0,lty=3) qx = t(apply(draws[,1:n],2,quantile,c(0.025,0.5,0.975))) ts.plot(qx) acf1 = acf(draws1[,1],plot=FALSE) plot(acf1$acf,type="l",xlab="Lag",ylab="ACF",col=grey(0.75),ylim=c(-0.2,1)) for (i in 2:n){ acf1 = acf(draws1[,i],plot=FALSE) lines(acf1$acf,col=grey(0.75)) } title("ACF for each x(t)") abline(h=0,lty=3) qx = t(apply(draws1[,1:n],2,quantile,c(0.025,0.5,0.975))) ts.plot(qx) par(mfrow=c(2,2)) plot(density(draws[,n+1]),xlab="",main=expression(sigma^2)) lines(density(draws1[,n+1]),col=2) abline(v=sig2.true,col=4) plot(density(draws[,n+2]),xlab="",main=expression(tau^2)) lines(density(draws1[,n+2]),col=2) abline(v=tau2.true,col=4) plot(density(draws[,n+3]),xlab="",main=expression(alpha)) lines(density(draws1[,n+3]),col=2) abline(v=alpha.true,col=4) plot(density(draws[,n+4]),xlab="",main=expression(beta)) lines(density(draws1[,n+4]),col=2) abline(v=beta.true,col=4) dev.off()