############################################################################## # # Simple normal linear regression - Gibbs sampler # ############################################################################## set.seed(12345) n = 200 beta.true = 0.8 sigma.true = 0.5 x = rnorm(n) y = rnorm(n,beta.true*x,sigma.true) plot(x,y) # OLS/ML inference # ---------------- fit.ols = lm(y~x-1) beta.ols = fit.ols$coef sigma.ols = summary(fit.ols)$sigma c(beta.ols,sigma.ols) # Bayesian inference via Gibbs sampler # ------------------------------------ # Prior of beta ~ N(beta0,tau02) beta0 = 0 tau02 = 9 # Prior of sigma2 ~ IG(alpha0,gamma0) # E,DP are the mean and standard deviation # of the inverse gamma distribution # alpha0 and gamma0 are derived from them E = 0.5 DP = 1.5 alpha0 = 2+E^2/DP^2 gamma0 = (alpha0-1)*E betas = rnorm(10000,beta0,sqrt(tau02)) sigma2 = 1/rgamma(10000,alpha0,gamma0) par(mfrow=c(1,2)) hist(betas,xlab="beta",main="",ylab="Prior density of beta") hist(sqrt(sigma2),xlab="sigma2",main="",ylab="Prior density of sigma") # Drawing from full conditional distributions draw.full.sigma2 = function(beta){ ystar = y-beta*x alpha1 = alpha0+n/2 gamma1 = gamma0+sum(ystar^2)/2 return(1/rgamma(1,alpha1,gamma1)) } draw.full.beta = function(sigma2){ tau12 = 1/(1/tau02+sum(x^2)/sigma2) beta1 = tau12*(beta0/tau02+sum(x*y)/sigma2) return(rnorm(1,beta1,sqrt(tau12))) } # Initial value for beta and Gibbs sampler set up set.seed(2355) burnin = 1000 M = 10000 niter = burnin+M draws = matrix(0,niter,2) beta = 0 for (iter in 1:niter){ sigma2 = draw.full.sigma2(beta) beta = draw.full.beta(sigma2) draws[iter,] = c(beta,sqrt(sigma2)) } draws1 = draws[(burnin+1):niter,] beta.post = mean(draws1[,1]) sigma.post = mean(draws1[,2]) # Trace plots par(mfrow=c(2,2)) ts.plot(draws[,1],main="beta",ylab="",xlab="Gibbs sampler iteration") abline(v=burnin,col=2,lwd=3,lty=2) ts.plot(draws[,2],main="sigma",ylab="",xlab="Gibbs sampler iteration") abline(v=burnin,col=2,lwd=3,lty=2) legend("topleft",legend="Burn-in",bty="n") legend("topright",legend="Keep for inference",bty="n") hist(draws1[,1],prob=TRUE,main="",xlab="",ylab="Posterior density of beta") points(beta.true,0,col=2,pch=16,cex=1.2) points(beta.ols,0,col=3,pch=16,cex=1.2) points(beta.post,0,col=4,pch=16,cex=1.2) hist(draws1[,2],prob=TRUE,main="",xlab="",ylab="Posterior density of sigma") points(sigma.true,0,col=2,pch=16,cex=1.2) points(sigma.ols,0,col=3,pch=16,cex=1.2) points(sigma.post,0,col=4,pch=16,cex=1.2) legend("topleft",legend=c("TRUE","MLE","BAYES"),col=2:4,bty="n",pch=16,cex=1.2) par(mfrow=c(1,1)) plot(draws1,xlab="beta",ylab="sigma",main="Joint posterior (beta,sigma)",pch=16,col=grey(0.7)) points(beta.true,sigma.true,col=2,pch=16,cex=2) points(beta.ols,sigma.ols,col=3,pch=16,cex=2) points(beta.post,sigma.post,col=4,pch=16,cex=2) legend("topleft",legend=c("TRUE","MLE","BAYES"),col=2:4,bty="n",pch=16,cex=1.2)