##################################################################################### # DYNAMIC LINEAR MODELING - AR(1) plus noise model # # y(t) = x(t) + e(t) e(t) ~ N(0,sig2) # x(t) = alpha + beta*x(t) + w(t) w(t) ~ N(0,tau2) # # for t=1,...,n and x0 ~ N(m0,C0) # #################################################################################### # Hedibert Freitas Lopes # Associate Professor of Econometrics and Statistics # The University of Chicago Booth School of Business # Email: hlopes@chicagobooth.edu # URL: http://faculty.chicagobooth.edu/hedibert.lopes # ####################################################################################rm(list=ls()) pdf(file="hw3-solution.pdf",width=13,height=10) set.seed(15497) # Simulating the data n = 200 alpha = 0 beta = 0.95 sig2 = 1 tau2 = 0.25 x0 = alpha/(1-beta) e = rnorm(n,0,sqrt(sig2)) w = rnorm(n,0,sqrt(tau2)) y = rep(0,n) x = rep(0,n) x[1] = rnorm(1,alpha+beta*x0,sqrt(tau2)) y[1] = rnorm(1,x[1],sqrt(sig2)) for (t in 2:n){ x[t] = rnorm(1,alpha+beta*x[t-1],sqrt(tau2)) y[t] = rnorm(1,x[t],sqrt(sig2)) } x.true = x sig2.true = sig2 tau2.true = tau2 x0.true = x0 alpha.true = alpha beta.true = beta par(mfrow=c(1,1)) plot(y,xlab="Time",ylab="",pch=16) lines(x,col=2,type="b",pch=16) ############################################################## # MCMC scheme for learning x(1),...,x(n),alpha,beta,sig2,tau2 ############################################################## # Set up of prior hyperparameters m0 = 0 C0 = 100 nu0 = 5 sig20 = sig2.true eta0 = 5 tau20 = tau2.true nu0sig20 = nu0*sig20 eta0tau20 = eta0*tau20 nu0n = (nu0+n)/2 eta0n = (eta0+n)/2 # initial values x = x.true x0 = x0.true alpha = alpha.true beta = beta.true # MCMC set up thin = 10 burnin = 10000 M = 1000 niter = burnin+thin*M draws = matrix(0,niter,n+5) for (iter in 1:niter){ # Learning sig2 sig2 = 1/rgamma(1,nu0n,(nu0sig20+sum((y-x)^2))/2) # Learning tau2 xx = c(x0,x[1:(n-1)]) tau2 = 1/rgamma(1,eta0n,(eta0tau20+sum((x-alpha-beta*xx)^2))/2) # Learning (alpha,beta) X = cbind(1,xx) v = solve(t(X)%*%X) m = v%*%t(X)%*%x ab = m+t(chol(v))%*%rnorm(2,0,sqrt(tau2)) alpha = ab[1] beta = ab[2] # Learning x0 var = 1/(1/C0+beta^2/tau2) mean = var*(m0/C0+(x[1]-alpha)*beta/tau2) x0 = rnorm(1,mean,sqrt(var)) # Learning x(1) var = 1/((1+beta^2)/tau2+1/sig2) mean = var*(beta*(x[2]+x0)/tau2+alpha*(1-beta)/tau2+y[1]/sig2) x[1] = rnorm(1,mean,sqrt(var)) # Learning x(2)....x(n-1) var = 1/((1+beta^2)/tau2+1/sig2) for (t in 2:(n-1)){ mean = var*(beta*(x[t+1]+x[t-1])/tau2+alpha*(1-beta)/tau2+y[t]/sig2) x[t] = rnorm(1,mean,sqrt(var)) } # Learning x(n) var = 1/(1/tau2+1/sig2) mean = var*((alpha+beta*x[n-1])/tau2+y[n]/sig2) x[n] = rnorm(1,mean,sqrt(var)) # Storing the draws draws[iter,] = c(x,sig2,tau2,alpha,beta,x0) } ind = seq(burnin+1,niter,by=thin) draws = draws[ind,] par(mfrow=c(4,3)) ts.plot(draws[,n+1],xlab="iterations",ylab="",main=expression(sigma^2)) abline(h=sig2.true,col=2) acf(draws[,n+1],main="") hist(draws[,n+1],prob=TRUE,main="",xlab="") abline(v=sig2.true,col=2) ts.plot(draws[,n+2],xlab="iterations",ylab="",main=expression(tau^2)) abline(h=tau2.true,col=2) acf(draws[,n+2],main="") hist(draws[,n+2],prob=TRUE,main="",xlab="") abline(v=tau2.true,col=2) ts.plot(draws[,n+3],xlab="iterations",ylab="",main=expression(alpha)) abline(h=alpha.true,col=2) acf(draws[,n+3],main="") hist(draws[,n+3],prob=TRUE,main="",xlab="") abline(v=alpha.true,col=2) ts.plot(draws[,n+4],xlab="iterations",ylab="",main=expression(beta)) abline(h=beta.true,col=2) acf(draws[,n+4],main="") hist(draws[,n+4],prob=TRUE,main="",xlab="") abline(v=beta.true,col=2) par(mfrow=c(2,1)) acf1 = acf(draws[,1],plot=FALSE) plot(acf1$acf,type="l",xlab="Lag",ylab="ACF",col=grey(0.75),ylim=c(-0.2,1)) for (i in 2:n){ acf1 = acf(draws[,i],plot=FALSE) lines(acf1$acf,col=grey(0.75)) } title("ACF for each x(t)") abline(h=0,lty=3) qx = t(apply(draws[,1:n],2,quantile,c(0.025,0.5,0.975))) ts.plot(qx) # REAL DATA data = matrix(scan("rv-alcoa.txt"),byrow=TRUE,ncol=3) data = log(data) n = nrow(data) par(mfrow=c(1,1)) ts.plot(data,col=1:3) draws.all = array(0,c(3,M,n+5)) for (column in 1:3){ y = data[,column] # initial values alpha = 0 beta = 1 sig2 = 1 tau2 = 0.5 x0 = y[1] x = y # MCMC scheme # Set up of prior hyperparameters m0 = 0 C0 = 100 nu0 = 5 sig20 = sig2 eta0 = 5 tau20 = tau2 nu0sig20 = nu0*sig20 eta0tau20 = eta0*tau20 nu0n = (nu0+n)/2 eta0n = (eta0+n)/2 # MCMC set up thin = 10 burnin = 10000 M = 1000 niter = burnin+thin*M draws = matrix(0,niter,n+5) for (iter in 1:niter){ # Learning sig2 sig2 = 1/rgamma(1,nu0n,(nu0sig20+sum((y-x)^2))/2) # Learning tau2 xx = c(x0,x[1:(n-1)]) tau2 = 1/rgamma(1,eta0n,(eta0tau20+sum((x-alpha-beta*xx)^2))/2) # Learning (alpha,beta) X = cbind(1,xx) v = solve(t(X)%*%X) m = v%*%t(X)%*%x ab = m+t(chol(v))%*%rnorm(2,0,sqrt(tau2)) alpha = ab[1] beta = ab[2] # Learning x0 var = 1/(1/C0+beta^2/tau2) mean = var*(m0/C0+(x[1]-alpha)*beta/tau2) x0 = rnorm(1,mean,sqrt(var)) # Learning x(1) var = 1/((1+beta^2)/tau2+1/sig2) mean = var*(beta*(x[2]+x0)/tau2+alpha*(1-beta)/tau2+y[1]/sig2) x[1] = rnorm(1,mean,sqrt(var)) # Learning x(2)....x(n-1) var = 1/((1+beta^2)/tau2+1/sig2) for (t in 2:(n-1)){ mean = var*(beta*(x[t+1]+x[t-1])/tau2+alpha*(1-beta)/tau2+y[t]/sig2) x[t] = rnorm(1,mean,sqrt(var)) } # Learning x(n) var = 1/(1/tau2+1/sig2) mean = var*((alpha+beta*x[n-1])/tau2+y[n]/sig2) x[n] = rnorm(1,mean,sqrt(var)) # Storing the draws draws[iter,] = c(x,sig2,tau2,alpha,beta,x0) } ind = seq(burnin+1,niter,by=thin) draws = draws[ind,] par(mfrow=c(4,3)) ts.plot(draws[,n+1],xlab="iterations",ylab="",main=expression(sigma^2)) abline(h=sig2.true,col=2) acf(draws[,n+1],main="") hist(draws[,n+1],prob=TRUE,main="",xlab="") abline(v=sig2.true,col=2) ts.plot(draws[,n+2],xlab="iterations",ylab="",main=expression(tau^2)) abline(h=tau2.true,col=2) acf(draws[,n+2],main="") hist(draws[,n+2],prob=TRUE,main="",xlab="") abline(v=tau2.true,col=2) ts.plot(draws[,n+3],xlab="iterations",ylab="",main=expression(alpha)) abline(h=alpha.true,col=2) acf(draws[,n+3],main="") hist(draws[,n+3],prob=TRUE,main="",xlab="") abline(v=alpha.true,col=2) ts.plot(draws[,n+4],xlab="iterations",ylab="",main=expression(beta)) abline(h=beta.true,col=2) acf(draws[,n+4],main="") hist(draws[,n+4],prob=TRUE,main="",xlab="") abline(v=beta.true,col=2) par(mfrow=c(2,1)) acf1 = acf(draws[,1],plot=FALSE) plot(acf1$acf,type="l",xlab="Lag",ylab="ACF",col=grey(0.75),ylim=c(-0.2,1)) for (i in 2:n){ acf1 = acf(draws[,i],plot=FALSE) lines(acf1$acf,col=grey(0.75)) } title("ACF for each x(t)") abline(h=0,lty=3) qx = t(apply(draws[,1:n],2,quantile,c(0.025,0.5,0.975))) ts.plot(qx) draws.all[column,,] = draws } par(mfrow=c(3,1)) names = c("5 min","10 min","20 min") for (i in 1:3){ qx = t(apply(draws.all[i,,1:n],2,quantile,c(0.025,0.5,0.97))) ts.plot(qx,col=c(3,2,3),xlab="Day",main=names[i],ylab="Log volatility") } pars = c("sig2","tau2","alpha","beta") par(mfrow=c(3,4)) for (i in 1:3) for (j in 1:4){ hist(draws.all[i,,n+j],xlim=range(draws.all[,,n+j]),prob=TRUE, xlab=pars[j],main=names[i]) box() } dev.off()