Simulating some linear regression data

First we simulated \(n\) pairs of observations, \((y_i,x_i)\) as follows \[ x_i \sim N(0,1) \ \ \ \mbox{and} \ \ \ y_i|x_i \sim N(\alpha+\beta x_i,\sigma^2), \] and randomly select \(d\) regressores \(x_i\) as missing. For convenience, we rearrange the pairs \((y_i,x_i)\) so the last \(d\) are the missing cases.

set.seed(3141593)
set.seed(12345)
n     = 10
d     = 5
x1    = rnorm(n)
alpha = 0
beta  = 1
sig   = 1
true  = c(alpha,beta,sig)
y1    = alpha+beta*x1+rnorm(n,0,sig)
missing = sort(sample(1:n,size=d,replace=FALSE))
y = c(y1[-missing],y1[missing])
x = c(x1[-missing],x1[missing])
 
par(mfrow=c(1,1))
plot(x,y,pch=16)
points(x[(n-d+1):n],y[(n-d+1):n],col=4,pch=16,cex=2)
abline(lm(y~x)$coef,col=4,lwd=2)
abline(lm(y[1:(n-d)]~x[1:(n-d)])$coef,col=1,lwd=2)
legend("topleft",legend=c("OLS fit n-ds obs",
"OLS fit n obs"),col=c(1,4),lwd=2)

OLS estimation

ols = lm(y~x)
summary(ols)
## 
## Call:
## lm(formula = y ~ x)
## 
## Residuals:
##      Min       1Q   Median       3Q      Max 
## -0.95785 -0.54544 -0.07149  0.13093  1.76813 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)  
## (Intercept)   0.2486     0.2757   0.902   0.3936  
## x             0.7189     0.3520   2.042   0.0754 .
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.8593 on 8 degrees of freedom
## Multiple R-squared:  0.3427, Adjusted R-squared:  0.2605 
## F-statistic: 4.171 on 1 and 8 DF,  p-value: 0.07541
ols = lm(y[1:(n-d)]~x[1:(n-d)])
summary(ols)
## 
## Call:
## lm(formula = y[1:(n - d)] ~ x[1:(n - d)])
## 
## Residuals:
##        1        2        3        4        5 
##  0.07834 -0.01394 -0.04091 -0.10670  0.08322 
## 
## Coefficients:
##              Estimate Std. Error t value Pr(>|t|)   
## (Intercept)  -0.41399    0.04482  -9.237  0.00268 **
## x[1:(n - d)]  0.31526    0.04486   7.027  0.00592 **
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.09366 on 3 degrees of freedom
## Multiple R-squared:  0.9427, Adjusted R-squared:  0.9236 
## F-statistic: 49.39 on 1 and 3 DF,  p-value: 0.005919

Bayesian inference

We will assume the missing values are completely at random and perform Bayesian inference where the unknowns are \[ \theta = (\alpha,\beta,\sigma^2,x_{d+1},\ldots,x_n), \] with independent prior \[ p(\theta) = p(\alpha)p(\beta)p(\sigma^2)\prod_{i=(n-d)+1}^n p(x_{n-d+i}), \] where \(\mu \sim N(a_0,A_0)\), \(\beta \sim N(b_0,B_0)\), \(\sigma^2 \sim IG(c_0,d_0)\) and \(x_{n-d+i} \sim N(0,1)\), for \(i=1,\ldots,d\).

a0 = 0
A0 = 1
b0 = 0
B0 = 1
c0 = 3
d0 = 4

Gibbs sampler

The MCMC scheme cycles through the following \(d+3\) full conditionals: \[\begin{eqnarray*} p(\alpha|y_{1:n},x_{1:n},\beta,\sigma^2)\\ p(\beta|y_{1:n},x_{1:n},\beta,\sigma^2)\\ p(\sigma^2|y_{1:n},x_{1:n},\beta,\sigma^2)\\ p(x_i|y_{1:n},x_{-i},\alpha,\beta,\sigma^2), \end{eqnarray*}\] for \(i=n-d+1,\ldots,n\). Show that these full conditionals are as follows. \[\begin{eqnarray*} [\alpha] &\sim& N\left\{\left(\frac{1}{A_0}+\frac{n}{\sigma^2}\right)^{-1}\left( \frac{a_0}{A_0}+\frac{\sum_{i=1}^n (y_i-\beta x_i)}{\sigma^2} \right) ;\left(\frac{1}{A_0}+\frac{n}{\sigma^2}\right)^{-1}\right\}\\ [\beta] &\sim& N\left\{\left(\frac{1}{B_0}+\frac{\sum_{i=1}^n x_i^2}{\sigma^2}\right)^{-1}\left( \frac{b_0}{B_0}+\frac{\sum_{i=1}^n (y_i-\alpha)x_i}{\sigma^2} \right) ;\left(\frac{1}{B_0}+\frac{\sum_{i=1}^n x_i^2}{\sigma^2}\right)^{-1}\right\}\\ [\sigma^2] &\sim& IG\left\{c_0+\frac{n}{2};d_0+\frac{\sum_{i=1}^n (y_i-\alpha-\beta x_i)/\sigma^2}{2}\right\}\\ [x_i] &\sim& N\left\{ \left(1+\frac{\beta^2}{\sigma^2}\right)^{-1}\beta(y_i-\alpha); \left(1+\frac{\beta^2}{\sigma^2}\right)^{-1}\right\}, \qquad i=n-d+1,\ldots,n. \end{eqnarray*}\]

# Initial values
xmiss = rep(mean(x),d)
alpha = 0
beta  = 0

# Markov chain parameters
M0    = 10000
M     = 1000
niter = M0+M
draws = matrix(0,niter,3+d)


# Gibbs sampler
for (iter in 1:niter){
  xx    = c(x[1:(n-d)],xmiss)
  
  # Sampling sig2
  sig2 = 1/rgamma(1,c0+n/2,d0+(sum((y-alpha-beta*xx)^2))/2)

  # Sampling alpha
  A1 = 1/(1/A0+n/sig2)
  a1 = A1*(a0/A0+sum(y-beta*xx)/sig2)
  alpha = rnorm(1,a1,sqrt(A1))

  # Sampling beta
  B1 = 1/(1/B0+sum(xx^2)/sig2)
  b1 = B1*(b0/B0+sum((y-alpha)*xx)/sig2)
  beta = rnorm(1,b1,sqrt(B1))

  # Sampling missing x's
  vx = 1/(1+beta^2/sig2)
  mx   = vx*(beta*(y[(n-d+1):n]-alpha)/sig2)
  xmiss = rnorm(d,mx,sqrt(vx))

  # Storing the Monte Carlo chains
  draws[iter,] = c(alpha,beta,sqrt(sig2),xmiss)
}
draws = draws[(M0+1):niter,]

Trace plots, acfs and marginal posteriors

par(mfrow=c(3,3))
ts.plot(draws[,1],xlab="Iterations",ylab="",main=expression(alpha))
abline(v=M0,col=2,lwd=2)
acf(draws[,1],main="")
hist(draws[,1],prob=TRUE,main="",xlab=expression(alpha))
abline(v=true[1],col=2,lwd=3)

ts.plot(draws[,2],xlab="Iterations",ylab="",main=expression(beta))
abline(v=M0,col=2,lwd=2)
acf(draws[,2],main="")
hist(draws[,2],prob=TRUE,main="",xlab=expression(beta))
abline(v=true[2],col=2,lwd=3)

ts.plot(draws[,3],xlab="Iterations",ylab="",main=expression(sigma))
abline(v=M0,col=2,lwd=2)
acf(draws[,3],main="")
hist(draws[,3],prob=TRUE,main="",xlab=expression(sigma))
abline(v=true[3],col=2,lwd=3)

Bivariate marginal posteriors

pairs(draws[,1:3],labels=c(expression(mu),expression(beta),expression(sigma)))

Estimating the missing values

qx = t(apply(draws[,4:(3+d)],2,quantile,c(0.25,0.5,0.75)))
par(mfrow=c(1,1))
plot(x,y,pch=16,xlim=range(x,qx))
points(x[(n-d+1):n],y[(n-d+1):n],col=4,pch=16,cex=1.2)
points(qx[,2],y[(n-d+1):n],col=6,pch=16,cex=1.2)
for (i in 1:d)
  segments(qx[i,1],y[n-d+i],qx[i,3],y[n-d+i],col=6)

Comparing linear predictors

xxx = seq(min(x),max(x),length=100)
fx  = matrix(0,M,100)
for (i in 1:100)
  fx[,i] = draws[,1]+draws[,2]*xxx[i]
qfx = t(apply(fx,2,quantile,c(0.25,0.5,0.75)))

par(mfrow=c(1,1))
plot(x,y,pch=16,xlim=range(x,qx))
points(x[(n-d+1):n],y[(n-d+1):n],col=4,pch=16,cex=1.2)
lines(xxx,qfx[,1],col=2,lty=2,lwd=2)
lines(xxx,qfx[,2],col=2,lwd=2)
lines(xxx,qfx[,3],col=2,lty=2,lwd=2)
abline(lm(y[1:(n-d)]~x[1:(n-d)])$coef,lwd=2)
abline(lm(y~x)$coef,col=4,lwd=2)
legend("topleft",legend=c("OLS fit n-ds obs","OLS fit n obs","Bayes"),col=c(1,4,2),lwd=2)

LS0tCnRpdGxlOiAiTGluZWFyIHJlZ3Jlc3Npb24gd2l0aCBhIGZldyAkeCRzIG1pc3NpbmciCnN1YnRpdGxlOiAiQmF5ZXNpYW4gaW5mZXJlbmNlIHZpYSBHaWJicyBzYW1wbGVyIgphdXRob3I6ICJIZWRpYmVydCBGcmVpdGFzIExvcGVzIgpkYXRlOiAiMi8yOC8yMDIyIgpvdXRwdXQ6CiAgaHRtbF9kb2N1bWVudDoKICAgIHRvYzogdHJ1ZQogICAgdG9jX2RlcHRoOiAyCiAgICBjb2RlX2Rvd25sb2FkOiB5ZXMKLS0tCgpgYGB7ciBzZXR1cCwgaW5jbHVkZT1GQUxTRX0Ka25pdHI6Om9wdHNfY2h1bmskc2V0KGVjaG8gPSBUUlVFKQpgYGAKCiMgU2ltdWxhdGluZyBzb21lIGxpbmVhciByZWdyZXNzaW9uIGRhdGEKRmlyc3Qgd2Ugc2ltdWxhdGVkICRuJCBwYWlycyBvZiBvYnNlcnZhdGlvbnMsICQoeV9pLHhfaSkkIGFzIGZvbGxvd3MKJCQKeF9pIFxzaW0gTigwLDEpIFwgXCBcIFxtYm94e2FuZH0gXCBcIFwgeV9pfHhfaSBcc2ltIE4oXGFscGhhK1xiZXRhIHhfaSxcc2lnbWFeMiksCiQkCmFuZCByYW5kb21seSBzZWxlY3QgJGQkIHJlZ3Jlc3NvcmVzICR4X2kkIGFzIG1pc3NpbmcuICBGb3IgY29udmVuaWVuY2UsIHdlIHJlYXJyYW5nZSB0aGUgcGFpcnMgJCh5X2kseF9pKSQgc28gdGhlIGxhc3QgJGQkIGFyZSB0aGUgbWlzc2luZyBjYXNlcy4KCmBgYHtyIGZpZy53aWR0aD04LCBmaWcuaGVpZ2h0PTZ9CnNldC5zZWVkKDMxNDE1OTMpCnNldC5zZWVkKDEyMzQ1KQpuICAgICA9IDEwCmQgICAgID0gNQp4MSAgICA9IHJub3JtKG4pCmFscGhhID0gMApiZXRhICA9IDEKc2lnICAgPSAxCnRydWUgID0gYyhhbHBoYSxiZXRhLHNpZykKeTEgICAgPSBhbHBoYStiZXRhKngxK3Jub3JtKG4sMCxzaWcpCm1pc3NpbmcgPSBzb3J0KHNhbXBsZSgxOm4sc2l6ZT1kLHJlcGxhY2U9RkFMU0UpKQp5ID0gYyh5MVstbWlzc2luZ10seTFbbWlzc2luZ10pCnggPSBjKHgxWy1taXNzaW5nXSx4MVttaXNzaW5nXSkKIApwYXIobWZyb3c9YygxLDEpKQpwbG90KHgseSxwY2g9MTYpCnBvaW50cyh4WyhuLWQrMSk6bl0seVsobi1kKzEpOm5dLGNvbD00LHBjaD0xNixjZXg9MikKYWJsaW5lKGxtKHl+eCkkY29lZixjb2w9NCxsd2Q9MikKYWJsaW5lKGxtKHlbMToobi1kKV1+eFsxOihuLWQpXSkkY29lZixjb2w9MSxsd2Q9MikKbGVnZW5kKCJ0b3BsZWZ0IixsZWdlbmQ9YygiT0xTIGZpdCBuLWRzIG9icyIsCiJPTFMgZml0IG4gb2JzIiksY29sPWMoMSw0KSxsd2Q9MikKYGBgCgojIE9MUyBlc3RpbWF0aW9uCgpgYGB7cn0Kb2xzID0gbG0oeX54KQpzdW1tYXJ5KG9scykKCm9scyA9IGxtKHlbMToobi1kKV1+eFsxOihuLWQpXSkKc3VtbWFyeShvbHMpCmBgYAoKIyBCYXllc2lhbiBpbmZlcmVuY2UgCldlIHdpbGwgYXNzdW1lIHRoZSBtaXNzaW5nIHZhbHVlcyBhcmUgY29tcGxldGVseSBhdCByYW5kb20gYW5kIHBlcmZvcm0gQmF5ZXNpYW4gaW5mZXJlbmNlIHdoZXJlIHRoZSB1bmtub3ducyBhcmUKJCQKXHRoZXRhID0gKFxhbHBoYSxcYmV0YSxcc2lnbWFeMix4X3tkKzF9LFxsZG90cyx4X24pLAokJAp3aXRoIGluZGVwZW5kZW50IHByaW9yCiQkCnAoXHRoZXRhKSA9IHAoXGFscGhhKXAoXGJldGEpcChcc2lnbWFeMilccHJvZF97aT0obi1kKSsxfV5uIHAoeF97bi1kK2l9KSwKJCQKd2hlcmUgJFxtdSBcc2ltIE4oYV8wLEFfMCkkLCAkXGJldGEgXHNpbSBOKGJfMCxCXzApJCwgJFxzaWdtYV4yIFxzaW0gSUcoY18wLGRfMCkkIGFuZCAkeF97bi1kK2l9IFxzaW0gTigwLDEpJCwgZm9yICRpPTEsXGxkb3RzLGQkLgoKYGBge3J9CmEwID0gMApBMCA9IDEKYjAgPSAwCkIwID0gMQpjMCA9IDMKZDAgPSA0CmBgYAoKIyMgR2liYnMgc2FtcGxlcgpUaGUgTUNNQyBzY2hlbWUgY3ljbGVzIHRocm91Z2ggdGhlIGZvbGxvd2luZyAkZCszJCBmdWxsIGNvbmRpdGlvbmFsczoKXGJlZ2lue2VxbmFycmF5Kn0KcChcYWxwaGF8eV97MTpufSx4X3sxOm59LFxiZXRhLFxzaWdtYV4yKVxcCnAoXGJldGF8eV97MTpufSx4X3sxOm59LFxiZXRhLFxzaWdtYV4yKVxcCnAoXHNpZ21hXjJ8eV97MTpufSx4X3sxOm59LFxiZXRhLFxzaWdtYV4yKVxcCnAoeF9pfHlfezE6bn0seF97LWl9LFxhbHBoYSxcYmV0YSxcc2lnbWFeMiksClxlbmR7ZXFuYXJyYXkqfQpmb3IgJGk9bi1kKzEsXGxkb3RzLG4kLiAgU2hvdyB0aGF0IHRoZXNlIGZ1bGwgY29uZGl0aW9uYWxzIGFyZSBhcyBmb2xsb3dzLgpcYmVnaW57ZXFuYXJyYXkqfQpbXGFscGhhXSAmXHNpbSYgTlxsZWZ0XHtcbGVmdChcZnJhY3sxfXtBXzB9K1xmcmFje259e1xzaWdtYV4yfVxyaWdodCleey0xfVxsZWZ0KApcZnJhY3thXzB9e0FfMH0rXGZyYWN7XHN1bV97aT0xfV5uICh5X2ktXGJldGEgeF9pKX17XHNpZ21hXjJ9ClxyaWdodCkKO1xsZWZ0KFxmcmFjezF9e0FfMH0rXGZyYWN7bn17XHNpZ21hXjJ9XHJpZ2h0KV57LTF9XHJpZ2h0XH1cXApbXGJldGFdICZcc2ltJiBOXGxlZnRce1xsZWZ0KFxmcmFjezF9e0JfMH0rXGZyYWN7XHN1bV97aT0xfV5uIHhfaV4yfXtcc2lnbWFeMn1ccmlnaHQpXnstMX1cbGVmdCgKXGZyYWN7Yl8wfXtCXzB9K1xmcmFje1xzdW1fe2k9MX1ebiAoeV9pLVxhbHBoYSl4X2l9e1xzaWdtYV4yfQpccmlnaHQpCjtcbGVmdChcZnJhY3sxfXtCXzB9K1xmcmFje1xzdW1fe2k9MX1ebiB4X2leMn17XHNpZ21hXjJ9XHJpZ2h0KV57LTF9XHJpZ2h0XH1cXApbXHNpZ21hXjJdICZcc2ltJiBJR1xsZWZ0XHtjXzArXGZyYWN7bn17Mn07ZF8wK1xmcmFje1xzdW1fe2k9MX1ebiAoeV9pLVxhbHBoYS1cYmV0YSB4X2kpL1xzaWdtYV4yfXsyfVxyaWdodFx9XFwKW3hfaV0gJlxzaW0mIE5cbGVmdFx7ClxsZWZ0KDErXGZyYWN7XGJldGFeMn17XHNpZ21hXjJ9XHJpZ2h0KV57LTF9XGJldGEoeV9pLVxhbHBoYSk7ClxsZWZ0KDErXGZyYWN7XGJldGFeMn17XHNpZ21hXjJ9XHJpZ2h0KV57LTF9XHJpZ2h0XH0sIFxxcXVhZCBpPW4tZCsxLFxsZG90cyxuLgpcZW5ke2VxbmFycmF5Kn0KCmBgYHtyfQojIEluaXRpYWwgdmFsdWVzCnhtaXNzID0gcmVwKG1lYW4oeCksZCkKYWxwaGEgPSAwCmJldGEgID0gMAoKIyBNYXJrb3YgY2hhaW4gcGFyYW1ldGVycwpNMCAgICA9IDEwMDAwCk0gICAgID0gMTAwMApuaXRlciA9IE0wK00KZHJhd3MgPSBtYXRyaXgoMCxuaXRlciwzK2QpCgoKIyBHaWJicyBzYW1wbGVyCmZvciAoaXRlciBpbiAxOm5pdGVyKXsKICB4eCAgICA9IGMoeFsxOihuLWQpXSx4bWlzcykKICAKICAjIFNhbXBsaW5nIHNpZzIKICBzaWcyID0gMS9yZ2FtbWEoMSxjMCtuLzIsZDArKHN1bSgoeS1hbHBoYS1iZXRhKnh4KV4yKSkvMikKCiAgIyBTYW1wbGluZyBhbHBoYQogIEExID0gMS8oMS9BMCtuL3NpZzIpCiAgYTEgPSBBMSooYTAvQTArc3VtKHktYmV0YSp4eCkvc2lnMikKICBhbHBoYSA9IHJub3JtKDEsYTEsc3FydChBMSkpCgogICMgU2FtcGxpbmcgYmV0YQogIEIxID0gMS8oMS9CMCtzdW0oeHheMikvc2lnMikKICBiMSA9IEIxKihiMC9CMCtzdW0oKHktYWxwaGEpKnh4KS9zaWcyKQogIGJldGEgPSBybm9ybSgxLGIxLHNxcnQoQjEpKQoKICAjIFNhbXBsaW5nIG1pc3NpbmcgeCdzCiAgdnggPSAxLygxK2JldGFeMi9zaWcyKQogIG14ICAgPSB2eCooYmV0YSooeVsobi1kKzEpOm5dLWFscGhhKS9zaWcyKQogIHhtaXNzID0gcm5vcm0oZCxteCxzcXJ0KHZ4KSkKCiAgIyBTdG9yaW5nIHRoZSBNb250ZSBDYXJsbyBjaGFpbnMKICBkcmF3c1tpdGVyLF0gPSBjKGFscGhhLGJldGEsc3FydChzaWcyKSx4bWlzcykKfQpkcmF3cyA9IGRyYXdzWyhNMCsxKTpuaXRlcixdCmBgYAoKIyMjIFRyYWNlIHBsb3RzLCBhY2ZzIGFuZCBtYXJnaW5hbCBwb3N0ZXJpb3JzCmBgYHtyIGZpZy53aWR0aD0xMiwgZmlnLmhlaWdodD0xMH0KcGFyKG1mcm93PWMoMywzKSkKdHMucGxvdChkcmF3c1ssMV0seGxhYj0iSXRlcmF0aW9ucyIseWxhYj0iIixtYWluPWV4cHJlc3Npb24oYWxwaGEpKQphYmxpbmUodj1NMCxjb2w9Mixsd2Q9MikKYWNmKGRyYXdzWywxXSxtYWluPSIiKQpoaXN0KGRyYXdzWywxXSxwcm9iPVRSVUUsbWFpbj0iIix4bGFiPWV4cHJlc3Npb24oYWxwaGEpKQphYmxpbmUodj10cnVlWzFdLGNvbD0yLGx3ZD0zKQoKdHMucGxvdChkcmF3c1ssMl0seGxhYj0iSXRlcmF0aW9ucyIseWxhYj0iIixtYWluPWV4cHJlc3Npb24oYmV0YSkpCmFibGluZSh2PU0wLGNvbD0yLGx3ZD0yKQphY2YoZHJhd3NbLDJdLG1haW49IiIpCmhpc3QoZHJhd3NbLDJdLHByb2I9VFJVRSxtYWluPSIiLHhsYWI9ZXhwcmVzc2lvbihiZXRhKSkKYWJsaW5lKHY9dHJ1ZVsyXSxjb2w9Mixsd2Q9MykKCnRzLnBsb3QoZHJhd3NbLDNdLHhsYWI9Ikl0ZXJhdGlvbnMiLHlsYWI9IiIsbWFpbj1leHByZXNzaW9uKHNpZ21hKSkKYWJsaW5lKHY9TTAsY29sPTIsbHdkPTIpCmFjZihkcmF3c1ssM10sbWFpbj0iIikKaGlzdChkcmF3c1ssM10scHJvYj1UUlVFLG1haW49IiIseGxhYj1leHByZXNzaW9uKHNpZ21hKSkKYWJsaW5lKHY9dHJ1ZVszXSxjb2w9Mixsd2Q9MykKYGBgCgojIyMgQml2YXJpYXRlIG1hcmdpbmFsIHBvc3RlcmlvcnMKYGBge3IgZmlnLndpZHRoPTEyLCBmaWcuaGVpZ2h0PTEwfQpwYWlycyhkcmF3c1ssMTozXSxsYWJlbHM9YyhleHByZXNzaW9uKG11KSxleHByZXNzaW9uKGJldGEpLGV4cHJlc3Npb24oc2lnbWEpKSkKYGBgCgojIyMgRXN0aW1hdGluZyB0aGUgbWlzc2luZyB2YWx1ZXMKYGBge3IgZmlnLndpZHRoPTgsIGZpZy5oZWlnaHQ9Nn0KcXggPSB0KGFwcGx5KGRyYXdzWyw0OigzK2QpXSwyLHF1YW50aWxlLGMoMC4yNSwwLjUsMC43NSkpKQpwYXIobWZyb3c9YygxLDEpKQpwbG90KHgseSxwY2g9MTYseGxpbT1yYW5nZSh4LHF4KSkKcG9pbnRzKHhbKG4tZCsxKTpuXSx5WyhuLWQrMSk6bl0sY29sPTQscGNoPTE2LGNleD0xLjIpCnBvaW50cyhxeFssMl0seVsobi1kKzEpOm5dLGNvbD02LHBjaD0xNixjZXg9MS4yKQpmb3IgKGkgaW4gMTpkKQogIHNlZ21lbnRzKHF4W2ksMV0seVtuLWQraV0scXhbaSwzXSx5W24tZCtpXSxjb2w9NikKYGBgCgoKIyMjIENvbXBhcmluZyBsaW5lYXIgcHJlZGljdG9ycwpgYGB7ciBmaWcud2lkdGg9OCwgZmlnLmhlaWdodD02fQp4eHggPSBzZXEobWluKHgpLG1heCh4KSxsZW5ndGg9MTAwKQpmeCAgPSBtYXRyaXgoMCxNLDEwMCkKZm9yIChpIGluIDE6MTAwKQogIGZ4WyxpXSA9IGRyYXdzWywxXStkcmF3c1ssMl0qeHh4W2ldCnFmeCA9IHQoYXBwbHkoZngsMixxdWFudGlsZSxjKDAuMjUsMC41LDAuNzUpKSkKCnBhcihtZnJvdz1jKDEsMSkpCnBsb3QoeCx5LHBjaD0xNix4bGltPXJhbmdlKHgscXgpKQpwb2ludHMoeFsobi1kKzEpOm5dLHlbKG4tZCsxKTpuXSxjb2w9NCxwY2g9MTYsY2V4PTEuMikKbGluZXMoeHh4LHFmeFssMV0sY29sPTIsbHR5PTIsbHdkPTIpCmxpbmVzKHh4eCxxZnhbLDJdLGNvbD0yLGx3ZD0yKQpsaW5lcyh4eHgscWZ4WywzXSxjb2w9MixsdHk9Mixsd2Q9MikKYWJsaW5lKGxtKHlbMToobi1kKV1+eFsxOihuLWQpXSkkY29lZixsd2Q9MikKYWJsaW5lKGxtKHl+eCkkY29lZixjb2w9NCxsd2Q9MikKbGVnZW5kKCJ0b3BsZWZ0IixsZWdlbmQ9YygiT0xTIGZpdCBuLWRzIG9icyIsIk9MUyBmaXQgbiBvYnMiLCJCYXllcyIpLGNvbD1jKDEsNCwyKSxsd2Q9MikKYGBgCgo=