############################################################ # # AR(1) plus noise model # # y(t) = x(t) + e(t) e(t) ~ N(0,1) # x(t) = phi*x(t-1) + w(t) w(t) ~ N(0,1) # # with E(e(t)w(l))=0 for all t,l # # # It is not difficulty to see that # # y|x ~ N(x,In), where In is the identity of order n # x|phi ~ N(0,B(phi)), where B(phi) depends only on phi # y|phi ~ N(0,In+Omega(phi)) where Omega(phi) = In+B(phi) # x|y,phi ~ N(A(phi)*y,A(phi)), where A(phi) = inv(In+inv(B(phi))) # ############################################################ install.packages("mvtnorm") library("mvtnorm") ############################################################ # Likelihood of phi given y1,...,yn ############################################################ pdf(file="AR1plusnoise-figure1.pdf",width=12,height=12) set.seed(2345) par(mfrow=c(4,4)) for (j in 1:16){ n = 300 phi = 0.9 x = rep(0,n) x[1] = rnorm(1) for (t in 2:n) x[t] = phi*x[t-1]+rnorm(1) y = rnorm(n,x,1) N = 100 phis = seq(0.7,1.1,length=N) loglike = rep(0,N) A = diag(1,n) for (i in 1:N){ for (t in 2:n) A[t,t-1] = -phis[i] iA = solve(A) Omega = diag(1,n)+iA%*%t(iA) loglike[i] = dmvnorm(y,rep(0,n),Omega,log=TRUE) } like = exp(loglike-max(loglike)) plot(phis,like,xlab=expression(phi),ylab="Likelihood",type="l",pch=16) legend("topright",legend=c(paste("true=",phi,sep=""),paste("MLE=",round(phis[like==max(like)],2),sep=""))) } dev.off() ############################################################ # Sampling from the posterior distribution of (x1,...xn,phi) given (y1,...,yn) ############################################################ loglike = function(phi){ A = diag(1,n) for (t in 2:n) A[t,t-1] = -phi iA = solve(A) Omega = iA%*%t(iA) loglike = dmvnorm(x,zero,Omega,log=TRUE) return(loglike) } Vx = function(phi){ A = diag(1,n) for (t in 2:n) A[t,t-1] = -phi iA = solve(A) B = iA%*%t(iA) return(solve(one+solve(B))) } set.seed(2345) n = 100 phi = 0.9 x = rep(0,n) x[1] = rnorm(1) for (t in 2:n) x[t] = phi*x[t-1]+rnorm(1) y = rnorm(n,x,1) # Step 1: Sampling phis from the posterior distribution of phi given y1,...,yn ########################################################### one = diag(1,n) zero = rep(0,n) M = 10000 phidraws = runif(M,0,1) loglikes = rep(0,M) for (i in 1:M) loglikes[i] = loglike(phidraws[i]) w = exp(loglikes-max(loglikes)) w = w/sum(w) phis = sample(phidraws,size=M,replace=TRUE,prob=w) # Step 2: Sampling x1,...,xn from the posterior distribution of x1,...,xn given y1,...,yn,phi ##################################################################### xs = matrix(0,M,n) for (i in 1:M){ var = Vx(phis[i]) mean = var%*%y xs[i,] = rmvnorm(1,mean,var) } qx = t(apply(xs,2,quantile,c(0.025,0.5,0.975))) pdf(file="AR1plusnoise-figure1.pdf",width=9,height=6) par(mfrow=c(1,2)) plot(y,type="b",xlab="Time",ylab="",main="y(t) vs x(t)") lines(x,col=2) hist(phis,prob=TRUE,main=expression(phi),xlab="") abline(v=phi,col=4,lwd=2) par(mfrow=c(1,1)) ts.plot(qx,lty=c(2,1,2),col=4,main="Latent process x(t)") lines(x,col=2) legend("topleft",legend=c("x(t)","xhat(t)"),col=c(2,4),lty=1) par(mfrow=c(1,1)) plot(y,xlab="Time",ylab="",main="",pch=16) lines(x,col=2) lines(qx[,2],col=4) legend("topleft",legend=c("y(t)","x(t)","xhat(t)"),col=c(1,2,4),lty=1) par(mfrow=c(2,2)) hist(xs[,25],prob=TRUE,main="x(25)",xlab="") abline(v=x[25],col=2,lwd=2) hist(xs[,50],prob=TRUE,main="x(50)",xlab="") abline(v=x[50],col=2,lwd=2) hist(xs[,75],prob=TRUE,main="x(75)",xlab="") abline(v=x[75],col=2,lwd=2) hist(xs[,100],prob=TRUE,main="x(100)",xlab="") abline(v=x[100],col=2,lwd=2) dev.off()