Uploading the data (downloaded from yahoo finance)
pbr.daily = read.csv("PBR-daily.csv",sep=",")
r.d = 100*(exp(diff(log(pbr.daily[,6])))-1)
r.d = r.d-mean(r.d)
head(pbr.daily)
## Date Open High Low Close Adj.Close Volume
## 1 2000-08-10 6.312500 7.156250 6.125000 7.156250 2.793920 143929200
## 2 2000-08-11 6.843750 7.281250 6.750000 7.140625 2.787819 25139200
## 3 2000-08-14 7.031250 7.218750 7.000000 7.109375 2.775619 6935200
## 4 2000-08-15 7.109375 7.359375 7.015625 7.140625 2.787819 9078800
## 5 2000-08-16 7.203125 7.500000 7.125000 7.328125 2.861023 11728000
## 6 2000-08-17 7.390625 7.781250 7.359375 7.687500 3.001328 7148000
tail(pbr.daily)
## Date Open High Low Close Adj.Close Volume
## 5103 2020-11-19 9.13 9.27 9.08 9.20 9.20 15046700
## 5104 2020-11-20 9.13 9.21 9.01 9.07 9.07 14427600
## 5105 2020-11-23 9.32 9.38 9.21 9.35 9.35 25540200
## 5106 2020-11-24 9.85 10.09 9.74 10.00 10.00 43398800
## 5107 2020-11-25 9.95 10.16 9.84 10.03 10.03 22298400
## 5108 2020-11-27 9.72 9.94 9.70 9.94 9.94 16310700
par(mfrow=c(1,1))
n = length(r.d)
ind = trunc(seq(1,n,length=10))
plot(pbr.daily[,6],xlab="Days",ylab="Price",axes=FALSE,type="l")
axis(2);box();axis(1,at=ind,lab=pbr.daily[1+ind,1])
par(mfrow=c(1,1))
ts.plot(r.d,xlab="Days",ylab="Return")
par(mfrow=c(1,1))
hist(r.d,prob=TRUE,breaks=seq(-65,65,by=1),xlim=c(-40,40),xlab="",main="")
Bayesian GARCH(1,1) with Student’s t errors
M0 = 10000 # to be discarded (burn-in)
M = 10000 # kept for posterior inference
niter = M0+M
time.garch = system.time({
fit.garch = bayesGARCH(r.d,mu.alpha=c(0,0),Sigma.alpha=1000*diag(1,2),
mu.beta=0,Sigma.beta=1000,lambda=0.01,delta=2,
control=list(n.chain=1,l.chain=niter,refresh=100))
})
## chain: 1 iteration: 100 parameters: 0.2264 0.0955 0.8757 26.217
## chain: 1 iteration: 200 parameters: 0.1946 0.0768 0.9027 7.2502
## chain: 1 iteration: 300 parameters: 0.2244 0.0846 0.8908 6.8931
## chain: 1 iteration: 400 parameters: 0.1872 0.0876 0.8951 6.5798
## chain: 1 iteration: 500 parameters: 0.1705 0.0675 0.9141 7.0416
## chain: 1 iteration: 600 parameters: 0.1867 0.0688 0.9099 7.2466
## chain: 1 iteration: 700 parameters: 0.1677 0.0786 0.9036 7.3829
## chain: 1 iteration: 800 parameters: 0.202 0.0839 0.8989 6.0414
## chain: 1 iteration: 900 parameters: 0.1812 0.0669 0.9129 6.8876
## chain: 1 iteration: 1000 parameters: 0.1771 0.072 0.9087 6.9031
## chain: 1 iteration: 1100 parameters: 0.1468 0.0873 0.8998 6.7628
## chain: 1 iteration: 1200 parameters: 0.1838 0.0841 0.898 6.9434
## chain: 1 iteration: 1300 parameters: 0.2886 0.0858 0.885 6.9954
## chain: 1 iteration: 1400 parameters: 0.2918 0.0851 0.8813 6.3011
## chain: 1 iteration: 1500 parameters: 0.2155 0.085 0.8963 6.6659
## chain: 1 iteration: 1600 parameters: 0.1872 0.0786 0.9019 6.2586
## chain: 1 iteration: 1700 parameters: 0.2165 0.0953 0.8832 6.4468
## chain: 1 iteration: 1800 parameters: 0.2421 0.1015 0.8754 7.4813
## chain: 1 iteration: 1900 parameters: 0.1979 0.0918 0.8894 6.1665
## chain: 1 iteration: 2000 parameters: 0.1845 0.0683 0.9144 6.8746
## chain: 1 iteration: 2100 parameters: 0.2701 0.0814 0.8933 5.4173
## chain: 1 iteration: 2200 parameters: 0.2294 0.0883 0.8922 6.1135
## chain: 1 iteration: 2300 parameters: 0.2777 0.1 0.8725 6.9232
## chain: 1 iteration: 2400 parameters: 0.2086 0.076 0.8961 7.52
## chain: 1 iteration: 2500 parameters: 0.1429 0.0718 0.9076 6.9692
## chain: 1 iteration: 2600 parameters: 0.2769 0.0896 0.878 7.1153
## chain: 1 iteration: 2700 parameters: 0.2029 0.0799 0.8994 6.5668
## chain: 1 iteration: 2800 parameters: 0.1666 0.074 0.912 6.5661
## chain: 1 iteration: 2900 parameters: 0.165 0.0726 0.9114 6.9053
## chain: 1 iteration: 3000 parameters: 0.2253 0.0783 0.897 6.0623
## chain: 1 iteration: 3100 parameters: 0.2979 0.089 0.8747 6.3897
## chain: 1 iteration: 3200 parameters: 0.2374 0.0848 0.8904 7.3542
## chain: 1 iteration: 3300 parameters: 0.2596 0.0774 0.891 7.0244
## chain: 1 iteration: 3400 parameters: 0.2089 0.089 0.8934 5.6406
## chain: 1 iteration: 3500 parameters: 0.2456 0.0844 0.8935 5.6831
## chain: 1 iteration: 3600 parameters: 0.2277 0.0833 0.8955 6.2133
## chain: 1 iteration: 3700 parameters: 0.1636 0.0674 0.9133 6.7414
## chain: 1 iteration: 3800 parameters: 0.2555 0.0827 0.8813 7.5471
## chain: 1 iteration: 3900 parameters: 0.1936 0.072 0.9057 7.0204
## chain: 1 iteration: 4000 parameters: 0.1644 0.0822 0.9028 6.2541
## chain: 1 iteration: 4100 parameters: 0.1985 0.0782 0.8973 7.8451
## chain: 1 iteration: 4200 parameters: 0.18 0.0822 0.9007 6.5343
## chain: 1 iteration: 4300 parameters: 0.1266 0.0634 0.9205 7.4639
## chain: 1 iteration: 4400 parameters: 0.2514 0.0807 0.8878 7.5874
## chain: 1 iteration: 4500 parameters: 0.2341 0.0858 0.8889 6.2424
## chain: 1 iteration: 4600 parameters: 0.2471 0.0943 0.88 6.7091
## chain: 1 iteration: 4700 parameters: 0.177 0.0822 0.8962 7.1328
## chain: 1 iteration: 4800 parameters: 0.234 0.0855 0.8886 7.0594
## chain: 1 iteration: 4900 parameters: 0.2021 0.0798 0.8948 7.6199
## chain: 1 iteration: 5000 parameters: 0.19 0.0868 0.8922 7.2919
## chain: 1 iteration: 5100 parameters: 0.1784 0.0653 0.9115 6.864
## chain: 1 iteration: 5200 parameters: 0.2223 0.0818 0.8888 6.6721
## chain: 1 iteration: 5300 parameters: 0.1894 0.071 0.9081 7.0624
## chain: 1 iteration: 5400 parameters: 0.2125 0.0737 0.9047 6.0148
## chain: 1 iteration: 5500 parameters: 0.155 0.0743 0.9077 6.6065
## chain: 1 iteration: 5600 parameters: 0.3549 0.0904 0.8655 6.6892
## chain: 1 iteration: 5700 parameters: 0.2145 0.1028 0.8753 6.7405
## chain: 1 iteration: 5800 parameters: 0.2159 0.0791 0.8995 5.328
## chain: 1 iteration: 5900 parameters: 0.209 0.0865 0.8934 6.2375
## chain: 1 iteration: 6000 parameters: 0.2969 0.1127 0.8568 7.2834
## chain: 1 iteration: 6100 parameters: 0.2145 0.0889 0.8893 7.5222
## chain: 1 iteration: 6200 parameters: 0.2538 0.092 0.8818 6.3522
## chain: 1 iteration: 6300 parameters: 0.1909 0.0685 0.9124 6.8149
## chain: 1 iteration: 6400 parameters: 0.2211 0.0813 0.8898 6.9393
## chain: 1 iteration: 6500 parameters: 0.1869 0.0922 0.8903 6.6892
## chain: 1 iteration: 6600 parameters: 0.2342 0.0668 0.9066 6.7138
## chain: 1 iteration: 6700 parameters: 0.2169 0.0909 0.8924 6.3946
## chain: 1 iteration: 6800 parameters: 0.2149 0.0828 0.8957 7.3579
## chain: 1 iteration: 6900 parameters: 0.2311 0.0909 0.8828 6.8956
## chain: 1 iteration: 7000 parameters: 0.1791 0.0865 0.8966 6.387
## chain: 1 iteration: 7100 parameters: 0.2348 0.0934 0.8867 5.8351
## chain: 1 iteration: 7200 parameters: 0.2876 0.1064 0.8645 7.6139
## chain: 1 iteration: 7300 parameters: 0.2999 0.1035 0.8671 7.5315
## chain: 1 iteration: 7400 parameters: 0.1933 0.0879 0.8934 6.4465
## chain: 1 iteration: 7500 parameters: 0.1683 0.0889 0.8936 7.3489
## chain: 1 iteration: 7600 parameters: 0.2436 0.0865 0.8864 6.9773
## chain: 1 iteration: 7700 parameters: 0.239 0.0866 0.8879 6.5631
## chain: 1 iteration: 7800 parameters: 0.1556 0.0944 0.8966 6.4384
## chain: 1 iteration: 7900 parameters: 0.2322 0.075 0.8949 8.0084
## chain: 1 iteration: 8000 parameters: 0.1793 0.0721 0.9093 6.3174
## chain: 1 iteration: 8100 parameters: 0.1644 0.0807 0.9051 6.4327
## chain: 1 iteration: 8200 parameters: 0.2637 0.0855 0.8867 5.9771
## chain: 1 iteration: 8300 parameters: 0.2234 0.0683 0.9051 6.4056
## chain: 1 iteration: 8400 parameters: 0.1449 0.0818 0.9057 6.4023
## chain: 1 iteration: 8500 parameters: 0.1856 0.0837 0.8956 7.5239
## chain: 1 iteration: 8600 parameters: 0.2722 0.0996 0.8763 6.784
## chain: 1 iteration: 8700 parameters: 0.1736 0.0982 0.8871 7.0461
## chain: 1 iteration: 8800 parameters: 0.2936 0.0805 0.8842 6.9968
## chain: 1 iteration: 8900 parameters: 0.2095 0.0815 0.9024 5.8191
## chain: 1 iteration: 9000 parameters: 0.2361 0.0954 0.8834 6.3576
## chain: 1 iteration: 9100 parameters: 0.2267 0.0745 0.898 6.5924
## chain: 1 iteration: 9200 parameters: 0.2677 0.0915 0.8803 6.5781
## chain: 1 iteration: 9300 parameters: 0.1666 0.0707 0.9088 7.0374
## chain: 1 iteration: 9400 parameters: 0.1532 0.0769 0.9086 5.9737
## chain: 1 iteration: 9500 parameters: 0.2478 0.0918 0.8842 6.6164
## chain: 1 iteration: 9600 parameters: 0.1264 0.0772 0.9123 5.9768
## chain: 1 iteration: 9700 parameters: 0.1894 0.0867 0.8987 5.7601
## chain: 1 iteration: 9800 parameters: 0.1763 0.074 0.9093 6.4697
## chain: 1 iteration: 9900 parameters: 0.2003 0.0729 0.9019 6.9848
## chain: 1 iteration: 10000 parameters: 0.2203 0.0643 0.9096 6.9888
## chain: 1 iteration: 10100 parameters: 0.1804 0.0761 0.9036 6.6636
## chain: 1 iteration: 10200 parameters: 0.1899 0.0802 0.903 6.6596
## chain: 1 iteration: 10300 parameters: 0.1658 0.0644 0.9191 6.0988
## chain: 1 iteration: 10400 parameters: 0.2498 0.0802 0.8949 6.524
## chain: 1 iteration: 10500 parameters: 0.1833 0.0835 0.897 7.6175
## chain: 1 iteration: 10600 parameters: 0.2819 0.0868 0.8755 7.0923
## chain: 1 iteration: 10700 parameters: 0.2658 0.0927 0.8881 5.5471
## chain: 1 iteration: 10800 parameters: 0.2446 0.0911 0.8852 6.8986
## chain: 1 iteration: 10900 parameters: 0.2021 0.0959 0.8864 6.6358
## chain: 1 iteration: 11000 parameters: 0.2347 0.085 0.891 6.6208
## chain: 1 iteration: 11100 parameters: 0.2445 0.0839 0.8888 7.111
## chain: 1 iteration: 11200 parameters: 0.224 0.0785 0.8931 7.3372
## chain: 1 iteration: 11300 parameters: 0.3271 0.0902 0.8745 6.8394
## chain: 1 iteration: 11400 parameters: 0.1901 0.0699 0.9073 6.4328
## chain: 1 iteration: 11500 parameters: 0.235 0.0868 0.891 7.5306
## chain: 1 iteration: 11600 parameters: 0.2149 0.0854 0.8918 5.8305
## chain: 1 iteration: 11700 parameters: 0.2503 0.0866 0.8891 7.3182
## chain: 1 iteration: 11800 parameters: 0.2624 0.1073 0.8667 6.5559
## chain: 1 iteration: 11900 parameters: 0.2371 0.1 0.8755 6.6018
## chain: 1 iteration: 12000 parameters: 0.2762 0.1076 0.865 6.6443
## chain: 1 iteration: 12100 parameters: 0.1821 0.0914 0.8912 7.1359
## chain: 1 iteration: 12200 parameters: 0.1992 0.0818 0.8972 6.7577
## chain: 1 iteration: 12300 parameters: 0.1679 0.0874 0.8968 6.7074
## chain: 1 iteration: 12400 parameters: 0.1872 0.0799 0.9013 6.2669
## chain: 1 iteration: 12500 parameters: 0.1697 0.0693 0.9126 6.8488
## chain: 1 iteration: 12600 parameters: 0.1884 0.0932 0.8911 6.6564
## chain: 1 iteration: 12700 parameters: 0.251 0.1054 0.8783 5.7448
## chain: 1 iteration: 12800 parameters: 0.2561 0.0822 0.8935 7.1604
## chain: 1 iteration: 12900 parameters: 0.2031 0.0853 0.8945 6.4614
## chain: 1 iteration: 13000 parameters: 0.2139 0.0772 0.8966 7.0026
## chain: 1 iteration: 13100 parameters: 0.203 0.0732 0.9057 5.4063
## chain: 1 iteration: 13200 parameters: 0.1884 0.0857 0.8977 6.2921
## chain: 1 iteration: 13300 parameters: 0.1982 0.0828 0.8953 6.8042
## chain: 1 iteration: 13400 parameters: 0.2129 0.0922 0.8862 7.5892
## chain: 1 iteration: 13500 parameters: 0.2166 0.1029 0.8726 7.3515
## chain: 1 iteration: 13600 parameters: 0.2707 0.1051 0.8712 6.4284
## chain: 1 iteration: 13700 parameters: 0.2733 0.084 0.886 7.9572
## chain: 1 iteration: 13800 parameters: 0.1819 0.0777 0.9035 6.0785
## chain: 1 iteration: 13900 parameters: 0.1856 0.0854 0.896 6.8244
## chain: 1 iteration: 14000 parameters: 0.2167 0.0692 0.9055 6.4743
## chain: 1 iteration: 14100 parameters: 0.2322 0.0853 0.8943 5.6773
## chain: 1 iteration: 14200 parameters: 0.205 0.0829 0.8977 7.1132
## chain: 1 iteration: 14300 parameters: 0.1873 0.0825 0.892 6.9571
## chain: 1 iteration: 14400 parameters: 0.216 0.0821 0.8945 6.852
## chain: 1 iteration: 14500 parameters: 0.1609 0.0615 0.9201 6.7712
## chain: 1 iteration: 14600 parameters: 0.2351 0.088 0.8892 6.4236
## chain: 1 iteration: 14700 parameters: 0.2986 0.1099 0.8638 5.8449
## chain: 1 iteration: 14800 parameters: 0.1437 0.0813 0.9072 6.7767
## chain: 1 iteration: 14900 parameters: 0.1992 0.0743 0.9049 6.0612
## chain: 1 iteration: 15000 parameters: 0.1624 0.0808 0.9071 6.1284
## chain: 1 iteration: 15100 parameters: 0.1981 0.0749 0.9029 6.6326
## chain: 1 iteration: 15200 parameters: 0.1691 0.0764 0.9066 6.3076
## chain: 1 iteration: 15300 parameters: 0.1013 0.073 0.921 6.8682
## chain: 1 iteration: 15400 parameters: 0.1752 0.0783 0.9044 6.2296
## chain: 1 iteration: 15500 parameters: 0.2179 0.0959 0.8836 6.7515
## chain: 1 iteration: 15600 parameters: 0.1735 0.0976 0.8917 6.3409
## chain: 1 iteration: 15700 parameters: 0.1919 0.0985 0.886 5.9773
## chain: 1 iteration: 15800 parameters: 0.2187 0.0931 0.8821 6.6407
## chain: 1 iteration: 15900 parameters: 0.2346 0.0821 0.8911 6.9907
## chain: 1 iteration: 16000 parameters: 0.1653 0.0784 0.9067 5.99
## chain: 1 iteration: 16100 parameters: 0.1711 0.0757 0.9062 7.2294
## chain: 1 iteration: 16200 parameters: 0.1914 0.0842 0.8967 6.5946
## chain: 1 iteration: 16300 parameters: 0.1959 0.0942 0.8931 6.5928
## chain: 1 iteration: 16400 parameters: 0.2419 0.0753 0.8941 7.2614
## chain: 1 iteration: 16500 parameters: 0.2126 0.0886 0.8904 6.1916
## chain: 1 iteration: 16600 parameters: 0.2 0.068 0.9101 5.6019
## chain: 1 iteration: 16700 parameters: 0.245 0.0772 0.8943 6.7796
## chain: 1 iteration: 16800 parameters: 0.1859 0.0834 0.8969 6.387
## chain: 1 iteration: 16900 parameters: 0.1935 0.094 0.8868 6.6526
## chain: 1 iteration: 17000 parameters: 0.1815 0.0948 0.8898 7.0161
## chain: 1 iteration: 17100 parameters: 0.1703 0.0879 0.9004 7.3022
## chain: 1 iteration: 17200 parameters: 0.2229 0.0857 0.8913 6.997
## chain: 1 iteration: 17300 parameters: 0.1926 0.0955 0.8853 6.5606
## chain: 1 iteration: 17400 parameters: 0.2328 0.0836 0.8887 6.5105
## chain: 1 iteration: 17500 parameters: 0.1411 0.0849 0.9074 5.8312
## chain: 1 iteration: 17600 parameters: 0.2489 0.0903 0.8789 6.7725
## chain: 1 iteration: 17700 parameters: 0.2487 0.0864 0.8869 6.7854
## chain: 1 iteration: 17800 parameters: 0.3369 0.1192 0.8619 5.754
## chain: 1 iteration: 17900 parameters: 0.191 0.0734 0.9053 6.5762
## chain: 1 iteration: 18000 parameters: 0.1425 0.0579 0.926 7.0106
## chain: 1 iteration: 18100 parameters: 0.2539 0.1018 0.879 6.383
## chain: 1 iteration: 18200 parameters: 0.167 0.0976 0.8895 5.846
## chain: 1 iteration: 18300 parameters: 0.2458 0.0917 0.8784 7.4464
## chain: 1 iteration: 18400 parameters: 0.1939 0.1079 0.8772 6.368
## chain: 1 iteration: 18500 parameters: 0.1756 0.0819 0.9028 6.1718
## chain: 1 iteration: 18600 parameters: 0.2204 0.0889 0.8914 6.3132
## chain: 1 iteration: 18700 parameters: 0.2733 0.0929 0.8765 7.793
## chain: 1 iteration: 18800 parameters: 0.3044 0.0807 0.8803 7.5384
## chain: 1 iteration: 18900 parameters: 0.2015 0.085 0.8948 6.0718
## chain: 1 iteration: 19000 parameters: 0.2055 0.0771 0.9036 5.6287
## chain: 1 iteration: 19100 parameters: 0.1878 0.0811 0.902 6.927
## chain: 1 iteration: 19200 parameters: 0.1894 0.0751 0.9101 6.4618
## chain: 1 iteration: 19300 parameters: 0.1811 0.0816 0.9029 6.1969
## chain: 1 iteration: 19400 parameters: 0.2637 0.0762 0.8941 6.2848
## chain: 1 iteration: 19500 parameters: 0.2066 0.085 0.8974 5.9055
## chain: 1 iteration: 19600 parameters: 0.1175 0.0801 0.9125 6.2983
## chain: 1 iteration: 19700 parameters: 0.2308 0.0707 0.8982 7.1799
## chain: 1 iteration: 19800 parameters: 0.1586 0.0773 0.907 6.2226
## chain: 1 iteration: 19900 parameters: 0.1645 0.0818 0.9044 5.8754
## chain: 1 iteration: 20000 parameters: 0.2023 0.0794 0.8985 6.089
time.garch
## user system elapsed
## 100.199 12.158 112.456
draws.garch = fit.garch$chain1
range = (M0+1):niter
draws.garch = draws.garch[range,]
pers = draws.garch[,2]+draws.garch[,3]
ind = (draws.garch[,2]+draws.garch[,3])<1
# Unconditional standard deviation
un.sd = sqrt(draws.garch[ind,4]/(draws.garch[ind,4]-2)*draws.garch[ind,1]/(1-draws.garch[ind,2]-draws.garch[ind,3]))
# Time-varying standard deviations and its quantiles
n = length(r.d)
h = matrix(0,M,n)
for (t in 2:n)
h[,t] = draws.garch[,1]+draws.garch[,2]*(r.d[t-1]^2)+draws.garch[,3]*h[,t-1]
qh = t(apply(sqrt(h),2,quantile,c(0.025,0.5,0.975)))
Trace plots and autocorrelations
par(mfrow=c(2,4))
ts.plot(draws.garch[,1],xlab="iterations",main=expression(alpha[0]),ylab="")
abline(v=M0,col=2)
ts.plot(draws.garch[,2],xlab="iterations",main=expression(alpha[1]),ylab="")
abline(v=M0,col=2)
ts.plot(draws.garch[,3],xlab="iterations",main=expression(beta),ylab="")
abline(v=M0,col=2)
ts.plot(draws.garch[,4],xlab="iterations",main=expression(nu),ylab="",ylim=c(0,20))
abline(v=M0,col=2)
for (i in 1:4)
acf(draws.garch[,i],main="")
Marginal posterior of \((\alpha_0,\alpha_1,\beta,\nu)\)
par(mfrow=c(2,2))
hist(draws.garch[,1],prob=TRUE,xlab="",main=expression(alpha[0]))
hist(draws.garch[,2],prob=TRUE,xlab="",main=expression(alpha[1]))
hist(draws.garch[,3],prob=TRUE,xlab="",main=expression(beta))
hist(draws.garch[,4],prob=TRUE,xlab="",main=expression(nu))
\(\alpha_1+\beta\) and \(\nu/(\nu-2)\alpha_0/(1-\alpha_1-\beta)\)
par(mfrow=c(1,2))
plot(density(pers),xlab="",main=expression(alpha[1]+beta))
plot(density(un.sd[un.sd<10]),xlim=c(2.5,8),xlab="",main="Unconditional st.dev.")
Time-varying standard deviations
par(mfrow=c(1,1))
ts.plot(qh[10:n,c(1,3,2)],col=c(3,3,2),ylim=c(0,15),xlab="Days",ylab="Time-varying standard deviation")
lines(0.075*abs(r.d[10:n]),type="h")
title("Posterior quantiles - 2.5%, 50%, 97.5%\n Absolute returns")
Bayesian SV-AR(1) with Student’s t errors
time.sv = system.time({
fit.sv = svtsample(r.d,draws=M,burnin=M0)
})
## Done!
## Summarizing posterior draws...
time.sv
## user system elapsed
## 56.086 5.596 62.813
draws.sv = fit.sv$para[[1]]
logvol = (fit.sv$latent)[[1]]
sd.sv = exp(logvol/2)*sqrt(draws.sv[,4]/(draws.sv[,4]-2))
qvol = t(apply(sd.sv,2,quantile,c(0.025,0.5,0.975)))
Marginal posterior of \((\mu,\phi,\sigma,\nu)\)
par(mfrow=c(2,2))
hist(draws.sv[,1],prob=TRUE,xlab="",main=expression(mu))
hist(draws.sv[,2],prob=TRUE,xlab="",main=expression(phi))
hist(draws.sv[,3],prob=TRUE,xlab="",main=expression(sigma))
hist(draws.sv[,4],prob=TRUE,xlab="",main=expression(nu))
Time-varying standard deviations
par(mfrow=c(1,1))
ts.plot(qvol[10:n,c(1,3,2)],col=c(3,3,2),ylim=c(0,15),xlab="Days",ylab="Time-varying standard deviation")
lines(0.075*abs(r.d[10:n]),type="h")
title("Posterior quantiles - 2.5%, 50%, 97.5%\n Absolute returns")