############################################################### # Histogram approximation of p(theta|y^n) ############################################################### set.seed(2425) par(mfrow=c(2,2)) for (n in c(10,20,50,100)){ x = rnorm(n) a = 0.9 xbar = mean(x) V = (1-a^2)*var(x) m = a*x+(1-a)*xbar xx = seq(-5,5,length=1000) fx = rep(0,1000) for (i in 1:1000) fx[i] = mean(dnorm(xx[i],m,sqrt(V))) plot(xx,fx,col=2,type="l",xlab="",ylab="Density") title(paste("n=",n)) lines(xx,dnorm(xx),col=4) for (i in 1:n) points(x[i],0,pch=16,cex=0.5) } ################################################################# # Parameter learning: resampling, but not propagating parameters ################################################################# APF1 = function(y,x,phi,V,W){ M = length(x) n = length(y) xs = matrix(0,M,n) phis = matrix(0,M,n) unique = rep(0,n) for (t in 1:n){ # calcular pesos e reamostrar w = dnorm(y[t],phi*x,sqrt(V)) k = sample(1:M,size=M,replace=TRUE,prob=w) xx = x[k] phi1 = phi[k] # propagacao x1 = rnorm(M,phi1*xx,sqrt(W)) # calculando novos pesos e reamostrando w = dnorm(y[t],phi1*x1,sqrt(V))/dnorm(y[t],phi1*xx,sqrt(V)) k = sample(1:M,size=M,replace=TRUE,prob=w) x = x1[k] phi = phi1[k] xs[,t] = x phis[,t] = phi unique[t] = length(unique(phi)) } return(list(xs=xs,phis=phis,unique=unique)) } ############################################################################## # Parameter learning: adding artificial dynamics for time-invariant parameters ############################################################################## APF2 = function(y,x,phi,V,W,sd){ M = length(x) n = length(y) xs = matrix(0,M,n) phis = matrix(0,M,n) unique = rep(0,n) for (t in 1:n){ # calcular pesos e reamostrar w = dnorm(y[t],phi*x,sqrt(V)) k = sample(1:M,size=M,replace=TRUE,prob=w) xx = x[k] phi1 = phi[k] # propagacao phi1 = phi1 + rnorm(M,0,sd) x1 = rnorm(M,phi1*xx,sqrt(W)) # calculando novos pesos e reamostrando w = dnorm(y[t],phi1*x1,sqrt(V))/dnorm(y[t],phi1*xx,sqrt(V)) k = sample(1:M,size=M,replace=TRUE,prob=w) x = x1[k] phi = phi1[k] xs[,t] = x phis[,t] = phi unique[t] = length(unique(phi)) } return(list(xs=xs,phis=phis,unique=unique)) } ############################################################################## # Parameter learning: Liu and West filter ############################################################################## APF3 = function(y,x,phi,V,W,a){ M = length(x) n = length(y) xs = matrix(0,M,n) phis = matrix(0,M,n) unique = rep(0,n) for (t in 1:n){ phibar = mean(phi) Vphi = (1-a^2)*var(phi) mphi = a*phi+(1-a)*phibar # calcular pesos e reamostrar w = dnorm(y[t],mphi*x,sqrt(V)) k = sample(1:M,size=M,replace=TRUE,prob=w) xx = x[k] phi1 = phi[k] mphi1 = mphi[k] # propagacao phi1 = rnorm(M,mphi1,sqrt(Vphi)) x1 = rnorm(M,phi1*xx,sqrt(W)) # calculando novos pesos e reamostrando w = dnorm(y[t],phi1*x1,sqrt(V))/dnorm(y[t],mphi1*xx,sqrt(V)) k = sample(1:M,size=M,replace=TRUE,prob=w) x = x1[k] phi = phi1[k] xs[,t] = x phis[,t] = phi unique[t] = length(unique(phi)) } return(list(xs=xs,phis=phis,unique=unique)) } ############################################################################## # Particle learning: optimal filtering + sufficient statistics ############################################################################## APF4 = function(y,x,V,W,f0,F0){ n = length(y) M = length(x) xs = matrix(0,M,n) phis = matrix(0,M,n) s1s = matrix(0,M,n) s2s = matrix(0,M,n) unique = rep(0,n) S1 = rep(0,M) S2 = rep(0,M) VV = 1/(1/F0+S1/W) mm = VV*(f0/F0+S2/W) phi = rnorm(M,mm,sqrt(VV)) for (t in 1:n){ # calcular pesos e reamostrar w = dnorm(y[t],phi*x,sqrt(V+W)) k = sample(1:M,size=M,replace=TRUE,prob=w) xx = x[k] phi1 = phi[k] S11 = S1[k] S22 = S2[k] # propagacao VV = 1/(1/V+1/W) mm = VV*(y[t]/V+phi1*xx/W) x = rnorm(M,mm,sqrt(VV)) # atualizando estatisticas suficientes S1 = S11 + xx^2 S2 = S22 + xx*x # sample phis VV = 1/(1/F0+S1/W) mm = VV*(f0/F0+S2/W) phi = rnorm(M,mm,sqrt(VV)) # armazenando as particulas phis[,t] = phi xs[,t] = x s1s[,t] = S1 s2s[,t] = S2 } return(list(xs=xs,s1s=s1s,s2s=s2s,phis=phis)) } ############################################################################## # Bayesian inference via MCMC (FFBS: forward filtering, backward sampling) ############################################################################## ffbs = function(y,m0,c0,phi,V,W){ n = length(y) mf = rep(0,n) Cf = rep(0,n) m = m0 C = C0 for (t in 1:n){ a = phi*m R = phi^2*C+W C = 1/(1/R+1/V) m = C*(a/R+y[t]/V) Cf[t] = C mf[t] = m } x = rep(0,n) x[n] = rnorm(1,mf[n],sqrt(Cf[n])) for (t in (n-1):1){ var = 1/(1/Cf[t]+phi^2/W) mean = var*(mf[t]/Cf[t]+phi*x[t+1]/W) x[t] = rnorm(1,mean,sqrt(var)) } return(x) } ############################################################### # Simulating some data ############################################################### set.seed(31415) n = 100 phi = 0.8 W = 1 V = 2 x0 = 0 x = rep(0,n) x[1] = rnorm(1,phi*x0,sqrt(W)) for (t in 2:n) x[t] = rnorm(1,phi*x[t-1],sqrt(W)) y = rnorm(n,x,sqrt(V)) x.true = x #y[11] = 20 #y[12] = 0 #y[13] = 20 plot(y,type="b",xlab="Time") ############################################################### # Posterior inference ############################################################### # x0 ~ N(m0,C0) # phi ~ N(phi0,Vphi) m0 = 0 C0 = 9 phi0 = 0.9 Vphi = 1.0 # Particle filters set.seed(1234) M = 10000 x = rnorm(M,m0,sqrt(C0)) phi = rnorm(M,phi0,sqrt(Vphi)) run1 = APF1(y,x,phi,V,W) qphi1 = t(apply(run1$phis,2,quantile,c(0.1,0.5,0.9))) qx1 = t(apply(run1$xs,2,quantile,c(0.1,0.5,0.9))) par(mfrow=c(1,2)) ts.plot(qphi1) abline(h=0.8,col=2) ts.plot(qx1) lines(x.true,col=2) particle.degeneracy1 = 100*run1$unique/M set.seed(1234) M = 10000 sd = 0.01 x = rnorm(M,m0,sqrt(C0)) phi = rnorm(M,phi0,sqrt(Vphi)) run2 = APF2(y,x,phi,V,W,sd) qphi2 = t(apply(run2$phis,2,quantile,c(0.1,0.5,0.9))) qx2 = t(apply(run2$xs,2,quantile,c(0.1,0.5,0.9))) par(mfrow=c(1,2)) ts.plot(qphi2) abline(h=0.8,col=2) ts.plot(qx2) lines(x.true,col=2) particle.degeneracy2 = 100*run2$unique/M set.seed(1234) M = 10000 a = 0.9 x = rnorm(M,m0,sqrt(C0)) phi = rnorm(M,phi0,sqrt(Vphi)) run3 = APF3(y,x,phi,V,W,a) qphi3 = t(apply(run3$phis,2,quantile,c(0.1,0.5,0.9))) qx3 = t(apply(run3$xs,2,quantile,c(0.1,0.5,0.9))) par(mfrow=c(1,2)) ts.plot(qphi3) abline(h=0.8,col=2) ts.plot(qx3) lines(x.true,col=2) particle.degeneracy3 = 100*run3$unique/M set.seed(1234) M = 10000 x = rnorm(M,m0,sqrt(C0)) run4 = APF4(y,x,V,W,phi0,Vphi) qphi4 = t(apply(run4$phis,2,quantile,c(0.1,0.5,0.9))) qx4 = t(apply(run4$xs,2,quantile,c(0.1,0.5,0.9))) par(mfrow=c(1,2)) ts.plot(qphi4) abline(h=0.8,col=2) ts.plot(qx4) lines(x.true,col=2) # comparison par(mfrow=c(1,1)) ts.plot(qphi1,ylim=range(qphi1,qphi2,qphi3)) lines(qphi2[,1],col=2) lines(qphi2[,2],col=2) lines(qphi2[,3],col=2) lines(qphi3[,1],col=3) lines(qphi3[,2],col=3) lines(qphi3[,3],col=3) lines(qphi4[,1],col=4) lines(qphi4[,2],col=4) lines(qphi4[,3],col=4) legend("bottomright",legend=c("Particle degeneracy","Artificial dynamics","LW","PL"),col=1:4,lty=1) Degeneracy = cbind(particle.degeneracy1,particle.degeneracy2,particle.degeneracy3) ts.plot(degeneracy,col=1:3,ylim=range(0,100)) legend("topright",legend=c("Particle degeneracy","Artificial dynamics","LW"),col=1:3,lty=1) # MCMC phi1 = 1.0 M0 = 10000 M = 10000 L = 1 niter = M0+M*L draws = matrix(0,niter,n+1) for (iter in 1:niter){ # sampling the states x1 = ffbs(y,m0,C0,phi1,V,W) # sampling phi var = 1/(1/Vphi+sum(x1[1:(n-1)]^2)/W) mean = var*(phi0/Vphi+sum(x1[1:(n-1)]*x1[2:n])/W) phi1 = rnorm(1,mean,sqrt(var)) draws[iter,] = c(phi1,x1) } ind = seq((M0+1),niter,by=L) draws = draws[ind,] # Comparions par(mfrow=c(1,1)) plot(density(draws[,1]),xlab=expression(phi),main="",ylim=c(0,10)) lines(density(run1$phis[,n]),col=2) lines(density(run2$phis[,n]),col=3) lines(density(run3$phis[,n]),col=4) lines(density(run4$phis[,n]),col=6) legend("topleft",legend=c("MCMC","Particle degeneracy","Artificial dynamics","LW","PL"),col=c(1:4,6),lty=1) # Studying the Monte Carlo error ################################ par(mfrow=c(2,2)) set.seed(1234) M = 10000 x = rnorm(M,m0,sqrt(C0)) phi = rnorm(M,phi0,sqrt(Vphi)) run1 = APF1(y,x,phi,V,W) qphi1 = t(apply(run1$phis,2,quantile,c(0.1,0.5,0.9))) ts.plot(qphi1,col=1:3,main="Particle degeneracy",ylim=c(-1,2)) for (i in 1:10){ run1 = APF1(y,x,phi,V,W) qphi1 = t(apply(run1$phis,2,quantile,c(0.1,0.5,0.9))) for (j in 1:3) lines(qphi1[,j],col=j) } set.seed(1234) M = 10000 sd = 0.01 x = rnorm(M,m0,sqrt(C0)) phi = rnorm(M,phi0,sqrt(Vphi)) run2 = APF2(y,x,phi,V,W,sd) qphi2 = t(apply(run2$phis,2,quantile,c(0.1,0.5,0.9))) ts.plot(qphi2,col=1:3,main="Artificial dynamics",ylim=c(-1,2)) for (i in 1:10){ run2 = APF2(y,x,phi,V,W,sd) qphi2 = t(apply(run2$phis,2,quantile,c(0.1,0.5,0.9))) for (j in 1:3) lines(qphi2[,j],col=j) } set.seed(1234) M = 1000 a = 0.9 x = rnorm(M,m0,sqrt(C0)) phi = rnorm(M,phi0,sqrt(Vphi)) run3 = APF3(y,x,phi,V,W,a) qphi3 = t(apply(run3$phis,2,quantile,c(0.1,0.5,0.9))) ts.plot(qphi3,col=1:3,main="Liu-West filter",ylim=c(-1,2)) for (i in 1:10){ run3 = APF3(y,x,phi,V,W,a) qphi3 = t(apply(run3$phis,2,quantile,c(0.1,0.5,0.9))) for (j in 1:3) lines(qphi3[,j],col=j) } set.seed(1234) M = 1000 x = rnorm(M,m0,sqrt(C0)) run4 = APF4(y,x,V,W,phi0,Vphi) qphi4 = t(apply(run4$phis,2,quantile,c(0.1,0.5,0.9))) ts.plot(qphi4,col=1:3,main="Particle learning",ylim=c(-1,2)) for (i in 1:10){ run4 = APF4(y,x,V,W,phi0,Vphi) qphi4 = t(apply(run4$phis,2,quantile,c(0.1,0.5,0.9))) for (j in 1:3) lines(qphi4[,j],col=j) }