1 Weight vs height

We collected data from 15 volunteers (my students and myself) and below we summarize the data information graphically and entertain two competing simple linear regression models: i) Gaussian errors, ii) Student’s \(t\) errors.

hedi = 115
n    = 15
data = matrix(c(
178,70,
175,80,
184,82,
200,89,
183,80,
173,65,
185,70,
162,70,
190,85,
174,80,
180,87,
165,74,
176,72,
174,63,
196,hedi),n,2,byrow=TRUE)

height = data[,1]
weight = data[,2]

par(mfrow=c(1,1))
plot(height,weight,xlab="Height (in cm)",ylab="Weight (in kg)")
points(height[n],weight[n],col=2,pch=16)
abline(lm(weight[1:(n-1)]~height[1:(n-1)]),lwd=2)
abline(lm(weight~height),col=2,lwd=2)

1.1 Fitted lines

Below we use the lm (linear model) function from R to run MLE/OLS for the regression \[ \mbox{weight}_i = \alpha + \beta \ \mbox{height}_i + \varepsilon_i \qquad\qquad \varepsilon_i \ \ \mbox{iid} \ \ N(0,\sigma^2). \] The two fitted lines, including and excluing the outlier (observation 15) are presented in the above picture.

Let us explore how the outlier affects the linear regression fits.

  • The adjusted \(R^2\) goes from 33% to 43% (including outlier).

  • The height coefficient is estimated at \(0.52\) with p-value at \(0.02\) for the null hypothesis \(H_0:\beta=0\), when excluding the outlier.

  • The height coefficient is estimated at \(0.84\) with p-value at \(0.005\) for the null hypothesis \(H_0:\beta=0\), when including the outlier.

  • Therefore, at the 1% level, one would rejected the null hypothesis when the outlier is included and fail to reject when the oulier is excluded.

  • The estimates of \(\sigma\) are \(6.7\) (excluding the outlier) and \(9.7\) (including the outlier), ie. a significant increase in the variability of the unexplained error when the outlier is included.

summary(lm(weight[1:(n-1)]~height[1:(n-1)]))
## 
## Call:
## lm(formula = weight[1:(n - 1)] ~ height[1:(n - 1)])
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -10.889  -5.197   1.994   4.308  10.011 
## 
## Coefficients:
##                   Estimate Std. Error t value Pr(>|t|)  
## (Intercept)       -16.0385    34.2319  -0.469   0.6478  
## height[1:(n - 1)]   0.5168     0.1915   2.699   0.0194 *
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 6.726 on 12 degrees of freedom
## Multiple R-squared:  0.3777, Adjusted R-squared:  0.3258 
## F-statistic: 7.283 on 1 and 12 DF,  p-value: 0.01936
summary(lm(weight~height))
## 
## Call:
## lm(formula = weight ~ height)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -13.262  -7.108  -1.589   5.960  22.536 
## 
## Coefficients:
##             Estimate Std. Error t value Pr(>|t|)   
## (Intercept) -71.5004    44.5970  -1.603  0.13289   
## height        0.8366     0.2478   3.375  0.00497 **
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 9.66 on 13 degrees of freedom
## Multiple R-squared:  0.4671, Adjusted R-squared:  0.4261 
## F-statistic: 11.39 on 1 and 13 DF,  p-value: 0.004971

2 Bayesian inference

loglike.g = function(alpha,beta,sig,x,y){
  sum(dnorm(y,alpha+beta*x,sig,log=TRUE))
}
loglike.t = function(alpha,beta,sig,x,y){
    tau = sqrt((nu-2)*sig^2/nu)
  sum(dt((y-alpha-beta*x)/tau,df=nu,log=TRUE))-n*log(tau)
}


y  = weight
x  = height
y1 = y[1:(n-1)]
x1 = x[1:(n-1)]

nu      = 3
M       = 100000
alpha.d = runif(M,-250,150)
beta.d  = runif(M,0,2)
sig.d   = runif(M,1,20)
w       = matrix(0,M,2)
w1      = matrix(0,M,2)
for (i in 1:M){
  w[i,1] = loglike.g(alpha.d[i],beta.d[i],sig.d[i],x,y)
  w[i,2] = loglike.t(alpha.d[i],beta.d[i],sig.d[i],x,y)
  w1[i,1] = loglike.g(alpha.d[i],beta.d[i],sig.d[i],x1,y1)
  w1[i,2] = loglike.t(alpha.d[i],beta.d[i],sig.d[i],x1,y1)
}
ww  = exp(w)
ww1 = exp(w1)
w[,1]   = exp(w[,1]-max(w[,1]))
w[,2]   = exp(w[,2]-max(w[,2]))
w1[,1]  = exp(w1[,1]-max(w1[,1]))
w1[,2]  = exp(w1[,2]-max(w1[,2]))
ind.n   = sample(1:M,size=M,replace=TRUE,prob=w[,1])
ind.t   = sample(1:M,size=M,replace=TRUE,prob=w[,2])
ind1.n  = sample(1:M,size=M,replace=TRUE,prob=w1[,1])
ind1.t  = sample(1:M,size=M,replace=TRUE,prob=w1[,2])

pars = array(0,c(2,M,3))
pars[1,,] = cbind(alpha.d[ind.n],beta.d[ind.n],sig.d[ind.n])
pars[2,,] = cbind(alpha.d[ind.t],beta.d[ind.t],sig.d[ind.t])

pars1 = array(0,c(2,M,3))
pars1[1,,] = cbind(alpha.d[ind1.n],beta.d[ind1.n],sig.d[ind1.n])
pars1[2,,] = cbind(alpha.d[ind1.t],beta.d[ind1.t],sig.d[ind1.t])

names = c("alpha","beta","sigma")
model = c("Gaussian errors","Student's t errors")

2.1 Marginal posterior distributions (with outlier)

par(mfrow=c(2,3))
for (j in 1:2)
  for (i in 1:3)
    hist(pars[j,,i],prob=TRUE,main=names[i],xlab=model[j])

2.2 Marginal posterior distributions (without outlier)

par(mfrow=c(2,3))
for (j in 1:2)
  for (i in 1:3)
    hist(pars1[j,,i],prob=TRUE,main=names[i],xlab=model[j])

2.3 Comparison

par(mfrow=c(2,3))
for (j in 1:2)
  for (i in 1:3)
    boxplot(pars[j,,i],pars1[j,,i],main=names[i],xlab=model[j],outline=FALSE,
            ylim=range(pars[,,i],pars1[,,i]),names=c("with outlier","without outlier"))

par(mfrow=c(3,1))
for (i in 1:3)
    boxplot(pars[1,,i],pars1[1,,i],pars[2,,i],pars1[2,,i],main=names[i],outline=FALSE,
            ylim=range(pars[,,i],pars1[,,i]),
            names=c("G with outlier","G without outlier","t with outlier","t without outlier"))

2.4 Predictive, Bayes factor and posterior model probability

2.4.1 With outlier

The Bayes factor is around 0.57, which indicates that both Gaussian and Student’t quite similar in terms of model adequacy. The posterior model probabilities are roughly 1/3 for the Gaussian model and 2/3 for the Student’s \(t\) model.

pred  = apply(ww,2,mean)
pmp   = pred/sum(pred)
B12   = pred[1]/pred[2]

pred
## [1] 3.191276e-27 5.511371e-27
B12
## [1] 0.5790348
pmp
## [1] 0.3667018 0.6332982

2.4.2 Without outlier

When removing the outlier, the Bayes factor is around 10, which indicates that the Gaussian model is moderately better than theStudent’t model. The posterior model probabilities are roughly 90% for the Gaussian model and 10% for the Student’s \(t\) model. In words, the presence of the outlier gives the Student’s \(t\) model a fitting strengh. Such strengh disappear when the outlier is removed and the Gaussian model suffices as a linear model.

pred1 = apply(ww1,2,mean)
pmp1  = pred1/sum(pred1)
B121  = pred1[1]/pred1[2]

pred1
## [1] 7.521713e-24 7.705170e-25
B121
## [1] 9.761904
pmp1
## [1] 0.90707964 0.09292036
LS0tCnRpdGxlOiAiU2ltcGxlIGxpbmVhciByZWdyZXNzaW9uIgphdXRob3I6ICJHYXVzc2lhbiB2cyBTdHVkZW50J3MgdCBlcnJvcnMiCmRhdGU6ICIwNS8wNi8yMDI1IgpvdXRwdXQ6CiAgaHRtbF9kb2N1bWVudDoKICAgIHRvYzogdHJ1ZQogICAgdG9jX2RlcHRoOiAzCiAgICB0b2NfY29sbGFwc2VkOiB0cnVlCiAgICBjb2RlX2Rvd25sb2FkOiB5ZXMKICAgIG51bWJlcl9zZWN0aW9uczogdHJ1ZQotLS0KCiMgV2VpZ2h0IHZzIGhlaWdodAoKV2UgY29sbGVjdGVkIGRhdGEgZnJvbSAxNSB2b2x1bnRlZXJzIChteSBzdHVkZW50cyBhbmQgbXlzZWxmKSBhbmQgYmVsb3cgd2Ugc3VtbWFyaXplIHRoZSBkYXRhIGluZm9ybWF0aW9uIGdyYXBoaWNhbGx5IGFuZCBlbnRlcnRhaW4gdHdvIGNvbXBldGluZyBzaW1wbGUgbGluZWFyIHJlZ3Jlc3Npb24gbW9kZWxzOiAgaSkgR2F1c3NpYW4gZXJyb3JzLCBpaSkgU3R1ZGVudCdzICR0JCBlcnJvcnMuCgpgYGB7ciBmaWcud2lkdGg9NiwgZmlnLmhlaWdodD02LCBmaWcuYWxpZ249J2NlbnRlcid9CmhlZGkgPSAxMTUKbiAgICA9IDE1CmRhdGEgPSBtYXRyaXgoYygKMTc4LDcwLAoxNzUsODAsCjE4NCw4MiwKMjAwLDg5LAoxODMsODAsCjE3Myw2NSwKMTg1LDcwLAoxNjIsNzAsCjE5MCw4NSwKMTc0LDgwLAoxODAsODcsCjE2NSw3NCwKMTc2LDcyLAoxNzQsNjMsCjE5NixoZWRpKSxuLDIsYnlyb3c9VFJVRSkKCmhlaWdodCA9IGRhdGFbLDFdCndlaWdodCA9IGRhdGFbLDJdCgpwYXIobWZyb3c9YygxLDEpKQpwbG90KGhlaWdodCx3ZWlnaHQseGxhYj0iSGVpZ2h0IChpbiBjbSkiLHlsYWI9IldlaWdodCAoaW4ga2cpIikKcG9pbnRzKGhlaWdodFtuXSx3ZWlnaHRbbl0sY29sPTIscGNoPTE2KQphYmxpbmUobG0od2VpZ2h0WzE6KG4tMSldfmhlaWdodFsxOihuLTEpXSksbHdkPTIpCmFibGluZShsbSh3ZWlnaHR+aGVpZ2h0KSxjb2w9Mixsd2Q9MikKYGBgCgojIyBGaXR0ZWQgbGluZXMKQmVsb3cgd2UgdXNlIHRoZSAqbG0qIChsaW5lYXIgbW9kZWwpIGZ1bmN0aW9uIGZyb20gUiB0byBydW4gTUxFL09MUyBmb3IgdGhlIHJlZ3Jlc3Npb24KJCQKXG1ib3h7d2VpZ2h0fV9pID0gXGFscGhhICsgXGJldGEgXCBcbWJveHtoZWlnaHR9X2kgKyBcdmFyZXBzaWxvbl9pIFxxcXVhZFxxcXVhZCBcdmFyZXBzaWxvbl9pIFwgXCBcbWJveHtpaWR9IFwgXCBOKDAsXHNpZ21hXjIpLgokJApUaGUgdHdvIGZpdHRlZCBsaW5lcywgaW5jbHVkaW5nIGFuZCBleGNsdWluZyB0aGUgb3V0bGllciAob2JzZXJ2YXRpb24gMTUpIGFyZSBwcmVzZW50ZWQgaW4gdGhlIGFib3ZlIHBpY3R1cmUuICAKCkxldCB1cyBleHBsb3JlIGhvdyB0aGUgb3V0bGllciBhZmZlY3RzIHRoZSBsaW5lYXIgcmVncmVzc2lvbiBmaXRzLiAgCgoqIFRoZSBhZGp1c3RlZCAkUl4yJCBnb2VzIGZyb20gMzMlIHRvIDQzJSAoaW5jbHVkaW5nIG91dGxpZXIpLiAgCgoqIFRoZSBoZWlnaHQgY29lZmZpY2llbnQgaXMgZXN0aW1hdGVkIGF0ICQwLjUyJCB3aXRoIHAtdmFsdWUgYXQgJDAuMDIkIGZvciB0aGUgbnVsbCBoeXBvdGhlc2lzICRIXzA6XGJldGE9MCQsIHdoZW4gZXhjbHVkaW5nIHRoZSBvdXRsaWVyLiAgCgoqIFRoZSBoZWlnaHQgY29lZmZpY2llbnQgaXMgZXN0aW1hdGVkIGF0ICQwLjg0JCB3aXRoIHAtdmFsdWUgYXQgJDAuMDA1JCBmb3IgdGhlIG51bGwgaHlwb3RoZXNpcyAkSF8wOlxiZXRhPTAkLCB3aGVuIGluY2x1ZGluZyB0aGUgb3V0bGllci4gCgoqIFRoZXJlZm9yZSwgYXQgdGhlIDElIGxldmVsLCBvbmUgd291bGQgcmVqZWN0ZWQgdGhlIG51bGwgaHlwb3RoZXNpcyB3aGVuIHRoZSBvdXRsaWVyIGlzIGluY2x1ZGVkIGFuZCBmYWlsIHRvIHJlamVjdCB3aGVuIHRoZSBvdWxpZXIgaXMgZXhjbHVkZWQuIAoKKiBUaGUgZXN0aW1hdGVzIG9mICRcc2lnbWEkIGFyZSAkNi43JCAoZXhjbHVkaW5nIHRoZSBvdXRsaWVyKSBhbmQgJDkuNyQgKGluY2x1ZGluZyB0aGUgb3V0bGllciksIGllLiBhIHNpZ25pZmljYW50IGluY3JlYXNlIGluIHRoZSB2YXJpYWJpbGl0eSBvZiB0aGUgdW5leHBsYWluZWQgZXJyb3Igd2hlbiB0aGUgb3V0bGllciBpcyBpbmNsdWRlZC4KCmBgYHtyfQpzdW1tYXJ5KGxtKHdlaWdodFsxOihuLTEpXX5oZWlnaHRbMToobi0xKV0pKQpzdW1tYXJ5KGxtKHdlaWdodH5oZWlnaHQpKQpgYGAKCiMgQmF5ZXNpYW4gaW5mZXJlbmNlCmBgYHtyfQpsb2dsaWtlLmcgPSBmdW5jdGlvbihhbHBoYSxiZXRhLHNpZyx4LHkpewogIHN1bShkbm9ybSh5LGFscGhhK2JldGEqeCxzaWcsbG9nPVRSVUUpKQp9CmxvZ2xpa2UudCA9IGZ1bmN0aW9uKGFscGhhLGJldGEsc2lnLHgseSl7Cgl0YXUgPSBzcXJ0KChudS0yKSpzaWdeMi9udSkKICBzdW0oZHQoKHktYWxwaGEtYmV0YSp4KS90YXUsZGY9bnUsbG9nPVRSVUUpKS1uKmxvZyh0YXUpCn0KCgp5ICA9IHdlaWdodAp4ICA9IGhlaWdodAp5MSA9IHlbMToobi0xKV0KeDEgPSB4WzE6KG4tMSldCgpudSAgICAgID0gMwpNICAgICAgID0gMTAwMDAwCmFscGhhLmQgPSBydW5pZihNLC0yNTAsMTUwKQpiZXRhLmQgID0gcnVuaWYoTSwwLDIpCnNpZy5kICAgPSBydW5pZihNLDEsMjApCncgICAgICAgPSBtYXRyaXgoMCxNLDIpCncxICAgICAgPSBtYXRyaXgoMCxNLDIpCmZvciAoaSBpbiAxOk0pewogIHdbaSwxXSA9IGxvZ2xpa2UuZyhhbHBoYS5kW2ldLGJldGEuZFtpXSxzaWcuZFtpXSx4LHkpCiAgd1tpLDJdID0gbG9nbGlrZS50KGFscGhhLmRbaV0sYmV0YS5kW2ldLHNpZy5kW2ldLHgseSkKICB3MVtpLDFdID0gbG9nbGlrZS5nKGFscGhhLmRbaV0sYmV0YS5kW2ldLHNpZy5kW2ldLHgxLHkxKQogIHcxW2ksMl0gPSBsb2dsaWtlLnQoYWxwaGEuZFtpXSxiZXRhLmRbaV0sc2lnLmRbaV0seDEseTEpCn0Kd3cgID0gZXhwKHcpCnd3MSA9IGV4cCh3MSkKd1ssMV0gICA9IGV4cCh3WywxXS1tYXgod1ssMV0pKQp3WywyXSAgID0gZXhwKHdbLDJdLW1heCh3WywyXSkpCncxWywxXSAgPSBleHAodzFbLDFdLW1heCh3MVssMV0pKQp3MVssMl0gID0gZXhwKHcxWywyXS1tYXgodzFbLDJdKSkKaW5kLm4gICA9IHNhbXBsZSgxOk0sc2l6ZT1NLHJlcGxhY2U9VFJVRSxwcm9iPXdbLDFdKQppbmQudCAgID0gc2FtcGxlKDE6TSxzaXplPU0scmVwbGFjZT1UUlVFLHByb2I9d1ssMl0pCmluZDEubiAgPSBzYW1wbGUoMTpNLHNpemU9TSxyZXBsYWNlPVRSVUUscHJvYj13MVssMV0pCmluZDEudCAgPSBzYW1wbGUoMTpNLHNpemU9TSxyZXBsYWNlPVRSVUUscHJvYj13MVssMl0pCgpwYXJzID0gYXJyYXkoMCxjKDIsTSwzKSkKcGFyc1sxLCxdID0gY2JpbmQoYWxwaGEuZFtpbmQubl0sYmV0YS5kW2luZC5uXSxzaWcuZFtpbmQubl0pCnBhcnNbMiwsXSA9IGNiaW5kKGFscGhhLmRbaW5kLnRdLGJldGEuZFtpbmQudF0sc2lnLmRbaW5kLnRdKQoKcGFyczEgPSBhcnJheSgwLGMoMixNLDMpKQpwYXJzMVsxLCxdID0gY2JpbmQoYWxwaGEuZFtpbmQxLm5dLGJldGEuZFtpbmQxLm5dLHNpZy5kW2luZDEubl0pCnBhcnMxWzIsLF0gPSBjYmluZChhbHBoYS5kW2luZDEudF0sYmV0YS5kW2luZDEudF0sc2lnLmRbaW5kMS50XSkKCm5hbWVzID0gYygiYWxwaGEiLCJiZXRhIiwic2lnbWEiKQptb2RlbCA9IGMoIkdhdXNzaWFuIGVycm9ycyIsIlN0dWRlbnQncyB0IGVycm9ycyIpCmBgYAoKIyMgTWFyZ2luYWwgcG9zdGVyaW9yIGRpc3RyaWJ1dGlvbnMgKHdpdGggb3V0bGllcikKYGBge3IgZmlnLndpZHRoPTEwLCBmaWcuaGVpZ2h0PTYsIGZpZy5hbGlnbj0nY2VudGVyJ30KcGFyKG1mcm93PWMoMiwzKSkKZm9yIChqIGluIDE6MikKICBmb3IgKGkgaW4gMTozKQogICAgaGlzdChwYXJzW2osLGldLHByb2I9VFJVRSxtYWluPW5hbWVzW2ldLHhsYWI9bW9kZWxbal0pCmBgYAoKIyMgTWFyZ2luYWwgcG9zdGVyaW9yIGRpc3RyaWJ1dGlvbnMgKHdpdGhvdXQgb3V0bGllcikKYGBge3IgZmlnLndpZHRoPTEwLCBmaWcuaGVpZ2h0PTYsIGZpZy5hbGlnbj0nY2VudGVyJ30KcGFyKG1mcm93PWMoMiwzKSkKZm9yIChqIGluIDE6MikKICBmb3IgKGkgaW4gMTozKQogICAgaGlzdChwYXJzMVtqLCxpXSxwcm9iPVRSVUUsbWFpbj1uYW1lc1tpXSx4bGFiPW1vZGVsW2pdKQpgYGAKCiMjIENvbXBhcmlzb24KYGBge3IgZmlnLndpZHRoPTEwLCBmaWcuaGVpZ2h0PTgsIGZpZy5hbGlnbj0nY2VudGVyJ30KcGFyKG1mcm93PWMoMiwzKSkKZm9yIChqIGluIDE6MikKICBmb3IgKGkgaW4gMTozKQogICAgYm94cGxvdChwYXJzW2osLGldLHBhcnMxW2osLGldLG1haW49bmFtZXNbaV0seGxhYj1tb2RlbFtqXSxvdXRsaW5lPUZBTFNFLAogICAgICAgICAgICB5bGltPXJhbmdlKHBhcnNbLCxpXSxwYXJzMVssLGldKSxuYW1lcz1jKCJ3aXRoIG91dGxpZXIiLCJ3aXRob3V0IG91dGxpZXIiKSkKYGBgCgpgYGB7ciBmaWcud2lkdGg9NiwgZmlnLmhlaWdodD0xMiwgZmlnLmFsaWduPSdjZW50ZXInfQpwYXIobWZyb3c9YygzLDEpKQpmb3IgKGkgaW4gMTozKQogICAgYm94cGxvdChwYXJzWzEsLGldLHBhcnMxWzEsLGldLHBhcnNbMiwsaV0scGFyczFbMiwsaV0sbWFpbj1uYW1lc1tpXSxvdXRsaW5lPUZBTFNFLAogICAgICAgICAgICB5bGltPXJhbmdlKHBhcnNbLCxpXSxwYXJzMVssLGldKSwKICAgICAgICAgICAgbmFtZXM9YygiRyB3aXRoIG91dGxpZXIiLCJHIHdpdGhvdXQgb3V0bGllciIsInQgd2l0aCBvdXRsaWVyIiwidCB3aXRob3V0IG91dGxpZXIiKSkKYGBgCgojIyBQcmVkaWN0aXZlLCBCYXllcyBmYWN0b3IgYW5kIHBvc3RlcmlvciBtb2RlbCBwcm9iYWJpbGl0eQoKIyMjIFdpdGggb3V0bGllcgpUaGUgQmF5ZXMgZmFjdG9yIGlzIGFyb3VuZCAwLjU3LCB3aGljaCBpbmRpY2F0ZXMgdGhhdCBib3RoIEdhdXNzaWFuIGFuZCBTdHVkZW50J3QgcXVpdGUgc2ltaWxhciBpbiB0ZXJtcyBvZiBtb2RlbCBhZGVxdWFjeS4gIFRoZSBwb3N0ZXJpb3IgbW9kZWwgcHJvYmFiaWxpdGllcyBhcmUgcm91Z2hseSAxLzMgZm9yIHRoZSBHYXVzc2lhbiBtb2RlbCBhbmQgMi8zIGZvciB0aGUgU3R1ZGVudCdzICR0JCBtb2RlbC4KCmBgYHtyfQpwcmVkICA9IGFwcGx5KHd3LDIsbWVhbikKcG1wICAgPSBwcmVkL3N1bShwcmVkKQpCMTIgICA9IHByZWRbMV0vcHJlZFsyXQoKcHJlZApCMTIKcG1wCmBgYAoKIyMjIFdpdGhvdXQgb3V0bGllcgoKV2hlbiByZW1vdmluZyB0aGUgb3V0bGllciwgdGhlIEJheWVzIGZhY3RvciBpcyBhcm91bmQgMTAsIHdoaWNoIGluZGljYXRlcyB0aGF0IHRoZSBHYXVzc2lhbiBtb2RlbCBpcyBtb2RlcmF0ZWx5IGJldHRlciB0aGFuIHRoZVN0dWRlbnQndCBtb2RlbC4gIFRoZSBwb3N0ZXJpb3IgbW9kZWwgcHJvYmFiaWxpdGllcyBhcmUgcm91Z2hseSA5MCUgZm9yIHRoZSBHYXVzc2lhbiBtb2RlbCBhbmQgMTAlIGZvciB0aGUgU3R1ZGVudCdzICR0JCBtb2RlbC4gIEluIHdvcmRzLCB0aGUgcHJlc2VuY2Ugb2YgdGhlIG91dGxpZXIgZ2l2ZXMgdGhlIFN0dWRlbnQncyAkdCQgbW9kZWwgYSBmaXR0aW5nIHN0cmVuZ2guICBTdWNoIHN0cmVuZ2ggZGlzYXBwZWFyIHdoZW4gdGhlIG91dGxpZXIgaXMgcmVtb3ZlZCBhbmQgdGhlIEdhdXNzaWFuIG1vZGVsIHN1ZmZpY2VzIGFzIGEgbGluZWFyIG1vZGVsLgoKYGBge3J9CnByZWQxID0gYXBwbHkod3cxLDIsbWVhbikKcG1wMSAgPSBwcmVkMS9zdW0ocHJlZDEpCkIxMjEgID0gcHJlZDFbMV0vcHJlZDFbMl0KCnByZWQxCkIxMjEKcG1wMQpgYGAK