############################################################################################ # # STANDARD NORMAL LINEAR FACTOR MODEL - BAYESIAN INFERENCE # ############################################################################################ # # Author : Hedibert Freitas Lopes # Graduate School of Business # University of Chicago # 5807 South Woodlawn Avenue # Chicago, Illinois, 60637 # Email : hlopes@ChicagoGSB.edu # ############################################################################################ source("factormodel-routines-R.txt") # A total of n=100 observations, 9-dimensional vectors, 3-factor model # -------------------------------------------------------------------- set.seed(12375) n = 100 p = 6 k.true = 1 b.true = matrix(c(0.99,0.98,0.95,0.92,0.89,0.80),p,k.true) S.true = diag(c(0.01,0.05,0.10,0.15,0.2,0.3)) sigma2.true = diag(S.true) f.true = rnorm(n) e.true = matrix(rnorm(n*p),n,p)%*%sqrt(S.true) y = f.true%*%t(b.true) + e.true # Variance decomposition (percentage) - the percentage of each variable's # variance that is explained by the common factor VD = round(diag(b.true%*%t(b.true))/diag(b.true%*%t(b.true)+S.true),2) VD = cbind(VD,1-VD) VD # Prior specification # ------------------- m0 = 0.0 C0 = 1.0 c0 = sqrt(C0) iC0 = 1/C0 m0c0 = m0/c0 m0C0 = m0*iC0 E = mean(diag(S.true)) D = 0.25 a = (E/D)^2+2 b = (a-1)*E # FITTING THE ONE-FACTOR MODEL GIBBS SAMPLER # ------------------------------------------ set.seed(23733) M0 = 2000 # burn-in length M = 2000 # length of the chain, after burn-in) # Inivial values beta = b.true sigma2 = diag(S.true) # Storage variables betas = matrix(0,M,p) sigma2s = matrix(0,M,p) mfactor = 0.0 VD1 = array(0,c(M,p,2)) # Algorithm for (iter in 1:(M0+M)){ f = sampling.f(beta,sigma2) sigma2 = sampling.sigma2(f,beta) beta = sampling.beta(f,sigma2) if (iter>M0){ mfactor = mfactor + f sigma2s[iter-M0,] = sigma2 betas[iter-M0,] = beta[,1] V = diag(beta%*%t(beta)) O = V + sigma2 VD1[iter-M0,,1] = V/O VD1[iter-M0,,2] = sigma2/O } } mfactor = mfactor/M par(mfrow=c(p,3)) for (i in 1:p){ ts.plot(betas[,i],xlab="iterations",ylab="",main=paste("beta",sep="")) abline(h=b.true[i,1],col=2) acf(betas[,i],main="") hist(betas[,i],xlab="",ylab="",main="",prob=T) abline(v=b.true[i,1],col=2) } par(mfrow=c(p,3)) for (i in 1:p){ ts.plot(sigma2s[,i],xlab="iterations",ylab="",main=paste("sigma2",i,sep="")) abline(h=sigma2.true[i],col=2) acf(sigma2s[,i],main="") hist(sigma2s[,i],xlab="",ylab="",main="",prob=T) abline(v=sigma2.true[i],col=2) } par(mfrow=c(p,3)) for (i in 1:p){ ts.plot(VD1[,i,1],xlab="iteration",ylab="",main=paste("VD",i,sep="")) abline(h=VD[i,1],col=2) acf(VD1[,i,1],main="") hist(VD1[,i,1],xlab="",ylab="",main="",prob=T) abline(v=VD[i,1],col=2) } par(mfrow=c(2,1)) ts.plot(f.true,xlab="time",ylab="",main="") lines(mfactor,col=2) plot(f.true,mfactor,xlab="True common factor",ylab="Estimated common factor") abline(0,1)