############################################################################################ # # STANDARD NORMAL LINEAR FACTOR MODEL - BAYESIAN INFERENCE # ############################################################################################ # # Author : Hedibert Freitas Lopes # Graduate School of Business # University of Chicago # 5807 South Woodlawn Avenue # Chicago, Illinois, 60637 # Email : hlopes@ChicagoGSB.edu # ############################################################################################ # Prior evaluation # Input: theta # Output: evaluation of the prior at theta # Description: theta is a matrix with k+1 columns, where the first k columns corresponds # to the factor loadings matrix and the (k+1)-th column corresponds to the # idiosyncratic variances. prior = function(theta){ k = ncol(theta)-1 sum(dnorm(theta[,1:k],m0,c0,log=T))+ sum(dgamma(1/theta[,k+1],a,b,log=T))-2*sum(log(theta[,k+1]))- k*pnorm(m0C0,log=T) } # Likelihood evaluation # Input: theta # Output: evaluation of the likelihood at theta # Description: theta is a matrix with k+1 columns, where the first k columns corresponds # to the factor loadings matrix and the (k+1)-th column corresponds to the # idiosyncratic variances. It is assumed that the data is in matrix y. like = function(theta){ k = ncol(theta)-1 S = theta[,1:k]%*%t(theta[,1:k])+diag(theta[,k+1]) z=y%*%chol(solve(S)) return(sum(dnorm(z,log=T))) } # Draw from a normal truncated at zero, i.e., only positive values are drawn # Input: n - number of draws # m - mean of the untruncated normal # s - standard deviation of the untruncated normal # Output: n draws rtnorm = function(n,m,s){ u = runif(n) A = pnorm(-m/s) return(m+s*qnorm(u*(1-A)+A)) } # Evaluates at x the density of a normal truncated # Input: x - coordinate # m - mean of the untruncated normal # s - standard deviation of the untruncated normal # Output: density evaluation dtnorm = function(x,m,s){ dnorm(x,m,s)/(1-pnorm(0,m,s)) } # Draw from a truncated multivariate normal where the truncation # appears only in the last element of the random vector. # Input: n - number of draws # m - mean vector of the untruncated multivariate normal # S - covariance matrix of the untruncated multivariate normal # Output: n draws rmtnorm = function(n,m,S){ k = length(m) m1 = m[1:(k-1)] S1 = S[1:(k-1),1:(k-1)] draw = matrix(rnorm(n*(k-1)),n,k-1)%*%chol(S1) x1 = matrix(m[1:(k-1)],n,k-1,byrow=T) + draw V = solve(S1) S2 = S[k,k]-(S[k,1:(k-1)]%*%V%*%S[1:(k-1),k])[1,1] m2 = m[k] + draw%*%V%*%S[k,1:(k-1)] x2 = rtnorm(n,m2,sqrt(S2)) return(cbind(x1,x2)) } # Draw from a multivariate normal # Input: n - number of draws # m - mean vector of the multivariate normal # S - covariance matrix of the multivariate normal # Output: n draws rmnorm = function(n,m,S){ p = length(m) return(matrix(m,n,p,byrow=T)+rnorm(n*p,n,p)%*%chol(S)) } # Full conditional of the diagonal elements of the idiosyncratic variance matrix sampling.sigma2 = function(f,beta){ e = y-f%*%t(beta) return(1/rgamma(p,a+n/2,b+diag(t(e)%*%e)/2)) } # Full conditional of the common factors sampling.f = function(beta,sigma2){ iV = diag(1/sigma2) n = nrow(y) p = nrow(beta) k = ncol(beta) if (k==1){ O=1/(1+t(beta)%*%iV%*%beta)[1,1] return(y%*%iV%*%beta*O + sqrt(O)*matrix(rnorm(n*k),n,k)) }else{ O=solve(diag(1,k)+t(beta)%*%iV%*%beta) return(y%*%iV%*%beta%*%O + matrix(rnorm(n*k),n,k)%*%chol(O)) } } # Full conditional of the factor loadings matrix sampling.beta = function(f,sigma2){ k = ncol(f) # sampling beta[1,1] # ------------------ var = 1/(iC0+sum(f[,1]^2)/sigma2[1]) mean = var*(m0C0+sum(f[,1]*y[,1])/sigma2[1]) beta[1,1] = rtnorm(1,mean,var) if (k==1){ for (i in 2:p){ var = 1/(iC0+sum(f[,1]^2)/sigma2[i]) mean = var*(m0C0+sum(f[,1]*y[,i])/sigma2[i]) beta[i,1] = rtnorm(1,mean,var) } } if (k>1){ for (i in 2:k){ var = solve(iC0+t(f[,1:i])%*%f[,1:i]/sigma2[i]) mean = var%*%(m0C0+t(f[,1:i])%*%y[,i]/sigma2[i]) beta[i,1:i] = rmnorm(1,mean,var) } for (i in (k+1):p){ var = solve(iC0+t(f)%*%f/sigma2[i]) mean = var%*%(m0C0+t(f)%*%y[,i]/sigma2[i]) beta[i,1:k] = rmnorm(1,mean,var) } } return(beta) }