Installing packages
#install.packages("fGarch")
library(tidyquant)
## Loading required package: lubridate
##
## Attaching package: 'lubridate'
## The following objects are masked from 'package:base':
##
## date, intersect, setdiff, union
## Loading required package: PerformanceAnalytics
## Loading required package: xts
## Loading required package: zoo
##
## Attaching package: 'zoo'
## The following objects are masked from 'package:base':
##
## as.Date, as.Date.numeric
##
## Attaching package: 'PerformanceAnalytics'
## The following object is masked from 'package:graphics':
##
## legend
## Loading required package: quantmod
## Loading required package: TTR
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
## ══ Need to Learn tidyquant? ════════════════════════════════════════════════════
## Business Science offers a 1-hour course - Learning Lab #9: Performance Analysis & Portfolio Optimization with tidyquant!
## </> Learn more at: https://university.business-science.io/p/learning-labs-pro </>
library(ggplot2)
options(stringsAsFactors = FALSE)
library(fGarch)
## Loading required package: timeDate
##
## Attaching package: 'timeDate'
## The following objects are masked from 'package:PerformanceAnalytics':
##
## kurtosis, skewness
## Loading required package: timeSeries
##
## Attaching package: 'timeSeries'
## The following object is masked from 'package:zoo':
##
## time<-
## Loading required package: fBasics
##
## Attaching package: 'fBasics'
## The following object is masked from 'package:TTR':
##
## volatility
library(bayesGARCH)
library(stochvol)
Getting PBR data from August 10th 2000 up to last friday, March 5th 2021.
getSymbols("PBR",from='2000-08-10',to='2021-03-05',warnings=FALSE,auto.assign=TRUE)
## 'getSymbols' currently uses auto.assign=TRUE by default, but will
## use auto.assign=FALSE in 0.5-0. You will still be able to use
## 'loadSymbols' to automatically load data. getOption("getSymbols.env")
## and getOption("getSymbols.auto.assign") will still be checked for
## alternate defaults.
##
## This message is shown once per session and may be disabled by setting
## options("getSymbols.warning4.0"=FALSE). See ?getSymbols for details.
## [1] "PBR"
n = nrow(PBR)
price = as.numeric(PBR[,6])
ret = log(price[2:n]/price[1:(n-1)])
ret = ret-mean(ret)
ret = ret/sqrt(var(ret))
n = length(ret)
par(mfrow=c(1,2))
ts.plot(price,xlab="August 10th 2000 - March 5th 2021",ylab="Price")
ts.plot(ret,xlab="August 10th 2000 - March 5th 2021",ylab="Log returns (standardized)")
Fitting standard GARCH(1,1) with Gaussian errors
fit.garch = garchFit(~garch(1,1),data=ret,cond.dist="std",trace=F,include.mean=FALSE)
## Warning: Using formula(x) is deprecated when x is a character vector of length > 1.
## Consider formula(paste(x, collapse = " ")) instead.
res.garch = fit.garch@residuals
sigmat.garch = fit.garch@sigma.t
ret.std.garch = ret/sigmat.garch
fit.garch
##
## Title:
## GARCH Modelling
##
## Call:
## garchFit(formula = ~garch(1, 1), data = ret, cond.dist = "std",
## include.mean = FALSE, trace = F)
##
## Mean and Variance Equation:
## data ~ garch(1, 1)
## <environment: 0x7f98c7369320>
## [data = ret]
##
## Conditional Distribution:
## std
##
## Coefficient(s):
## omega alpha1 beta1 shape
## 0.014364 0.073478 0.910218 7.025885
##
## Std. Errors:
## based on Hessian
##
## Error Analysis:
## Estimate Std. Error t value Pr(>|t|)
## omega 0.014364 0.003164 4.539 5.65e-06 ***
## alpha1 0.073478 0.009216 7.973 1.55e-15 ***
## beta1 0.910218 0.011001 82.739 < 2e-16 ***
## shape 7.025885 0.621874 11.298 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Log Likelihood:
## -6371.019 normalized: -1.231829
##
## Description:
## Mon Mar 8 19:53:42 2021 by user:
Fitting Bayesian GARCH(1,1) with Student’s t errors
M0 = 20000
M = 20000
niter = M0+M
range = (M0+1):niter
run = bayesGARCH(ret,mu.alpha=c(0,0),Sigma.alpha=1000*diag(1,2),mu.beta=0,Sigma.beta=1000,lambda=0.01,delta=2,control=list(n.chain=1,l.chain=niter,refresh=100))
## chain: 1 iteration: 100 parameters: 0.0184 0.0919 0.8878 35.3165
## chain: 1 iteration: 200 parameters: 0.022 0.0776 0.8917 9.6044
## chain: 1 iteration: 300 parameters: 0.0162 0.079 0.9048 6.5168
## chain: 1 iteration: 400 parameters: 0.0232 0.0901 0.8825 6.4116
## chain: 1 iteration: 500 parameters: 0.0219 0.0883 0.8826 8.4539
## chain: 1 iteration: 600 parameters: 0.0212 0.0876 0.8892 7.2853
## chain: 1 iteration: 700 parameters: 0.0208 0.0992 0.8778 6.883
## chain: 1 iteration: 800 parameters: 0.0103 0.0707 0.9182 6.6779
## chain: 1 iteration: 900 parameters: 0.0147 0.0713 0.9103 6.4248
## chain: 1 iteration: 1000 parameters: 0.0163 0.0804 0.9012 7.8198
## chain: 1 iteration: 1100 parameters: 0.0183 0.0877 0.8996 6.1911
## chain: 1 iteration: 1200 parameters: 0.0161 0.0903 0.8904 8.1392
## chain: 1 iteration: 1300 parameters: 0.0144 0.0779 0.9083 6.7252
## chain: 1 iteration: 1400 parameters: 0.0187 0.0817 0.8993 6.6121
## chain: 1 iteration: 1500 parameters: 0.0196 0.0885 0.8875 7.7898
## chain: 1 iteration: 1600 parameters: 0.0176 0.0797 0.8984 8.1122
## chain: 1 iteration: 1700 parameters: 0.0185 0.0941 0.8865 7.7207
## chain: 1 iteration: 1800 parameters: 0.0212 0.0957 0.8829 6.9665
## chain: 1 iteration: 1900 parameters: 0.0148 0.0701 0.9093 7.8009
## chain: 1 iteration: 2000 parameters: 0.0164 0.0799 0.9063 6.0473
## chain: 1 iteration: 2100 parameters: 0.0235 0.101 0.8753 6.4787
## chain: 1 iteration: 2200 parameters: 0.0237 0.1009 0.8771 6.6877
## chain: 1 iteration: 2300 parameters: 0.0164 0.0765 0.905 8.3347
## chain: 1 iteration: 2400 parameters: 0.0163 0.0756 0.9085 6.3259
## chain: 1 iteration: 2500 parameters: 0.02 0.0834 0.8952 6.8086
## chain: 1 iteration: 2600 parameters: 0.0177 0.0653 0.9105 7.4426
## chain: 1 iteration: 2700 parameters: 0.0195 0.0881 0.8941 6.5672
## chain: 1 iteration: 2800 parameters: 0.0152 0.0762 0.9057 7.4264
## chain: 1 iteration: 2900 parameters: 0.0162 0.0727 0.9087 8.5095
## chain: 1 iteration: 3000 parameters: 0.0214 0.0841 0.8885 10.4063
## chain: 1 iteration: 3100 parameters: 0.02 0.0994 0.8862 8.5602
## chain: 1 iteration: 3200 parameters: 0.0166 0.0847 0.8963 7.128
## chain: 1 iteration: 3300 parameters: 0.0157 0.0789 0.9025 7.9158
## chain: 1 iteration: 3400 parameters: 0.0189 0.0838 0.89 7.3574
## chain: 1 iteration: 3500 parameters: 0.0253 0.1062 0.8619 8.7378
## chain: 1 iteration: 3600 parameters: 0.0169 0.0789 0.9008 6.5071
## chain: 1 iteration: 3700 parameters: 0.0182 0.0763 0.8995 7.499
## chain: 1 iteration: 3800 parameters: 0.0177 0.0826 0.8958 6.9817
## chain: 1 iteration: 3900 parameters: 0.0124 0.0834 0.907 6.9248
## chain: 1 iteration: 4000 parameters: 0.0232 0.0924 0.8798 7.3867
## chain: 1 iteration: 4100 parameters: 0.0234 0.0881 0.8838 7.4651
## chain: 1 iteration: 4200 parameters: 0.0162 0.0746 0.9069 6.5147
## chain: 1 iteration: 4300 parameters: 0.0226 0.081 0.8909 7.3533
## chain: 1 iteration: 4400 parameters: 0.0188 0.0861 0.8898 6.3921
## chain: 1 iteration: 4500 parameters: 0.02 0.0825 0.8971 7.0723
## chain: 1 iteration: 4600 parameters: 0.0188 0.0888 0.8901 7.6555
## chain: 1 iteration: 4700 parameters: 0.0175 0.0931 0.8867 6.4168
## chain: 1 iteration: 4800 parameters: 0.0159 0.0976 0.8932 6.6689
## chain: 1 iteration: 4900 parameters: 0.0204 0.0887 0.8905 5.9925
## chain: 1 iteration: 5000 parameters: 0.0151 0.0792 0.9096 6.1825
## chain: 1 iteration: 5100 parameters: 0.0138 0.0798 0.9078 7.427
## chain: 1 iteration: 5200 parameters: 0.0176 0.0709 0.9069 6.4808
## chain: 1 iteration: 5300 parameters: 0.018 0.0921 0.8897 6.6053
## chain: 1 iteration: 5400 parameters: 0.0147 0.0862 0.8976 7.7477
## chain: 1 iteration: 5500 parameters: 0.0155 0.0811 0.9023 7.1597
## chain: 1 iteration: 5600 parameters: 0.0226 0.0841 0.8858 7.5011
## chain: 1 iteration: 5700 parameters: 0.0132 0.0754 0.9094 7.3006
## chain: 1 iteration: 5800 parameters: 0.0186 0.0804 0.8994 6.9169
## chain: 1 iteration: 5900 parameters: 0.0188 0.0894 0.8914 6.9204
## chain: 1 iteration: 6000 parameters: 0.016 0.0898 0.8963 7.0536
## chain: 1 iteration: 6100 parameters: 0.017 0.0751 0.9055 7.7596
## chain: 1 iteration: 6200 parameters: 0.0144 0.0777 0.904 7.1953
## chain: 1 iteration: 6300 parameters: 0.0137 0.0779 0.911 6.4385
## chain: 1 iteration: 6400 parameters: 0.0171 0.0683 0.9059 7.861
## chain: 1 iteration: 6500 parameters: 0.0135 0.0898 0.8988 7.5787
## chain: 1 iteration: 6600 parameters: 0.0309 0.0991 0.8647 8.0189
## chain: 1 iteration: 6700 parameters: 0.024 0.089 0.8821 6.3202
## chain: 1 iteration: 6800 parameters: 0.0216 0.1121 0.8709 6.3978
## chain: 1 iteration: 6900 parameters: 0.013 0.0702 0.9161 6.7393
## chain: 1 iteration: 7000 parameters: 0.0147 0.093 0.8925 7.7948
## chain: 1 iteration: 7100 parameters: 0.0208 0.0923 0.8849 6.5892
## chain: 1 iteration: 7200 parameters: 0.0154 0.0725 0.9091 6.7597
## chain: 1 iteration: 7300 parameters: 0.02 0.08 0.899 6.4602
## chain: 1 iteration: 7400 parameters: 0.019 0.0871 0.8964 7.1104
## chain: 1 iteration: 7500 parameters: 0.0127 0.0609 0.9188 7.4018
## chain: 1 iteration: 7600 parameters: 0.0207 0.0853 0.8914 6.7754
## chain: 1 iteration: 7700 parameters: 0.0186 0.0924 0.8893 6.9878
## chain: 1 iteration: 7800 parameters: 0.0256 0.0822 0.8895 6.2545
## chain: 1 iteration: 7900 parameters: 0.0148 0.0817 0.9043 6.3426
## chain: 1 iteration: 8000 parameters: 0.023 0.0894 0.8864 6.9668
## chain: 1 iteration: 8100 parameters: 0.023 0.0942 0.8854 6.7138
## chain: 1 iteration: 8200 parameters: 0.0237 0.0929 0.8793 7.3971
## chain: 1 iteration: 8300 parameters: 0.0219 0.1056 0.874 7.7657
## chain: 1 iteration: 8400 parameters: 0.0191 0.0956 0.8849 7.0387
## chain: 1 iteration: 8500 parameters: 0.0117 0.072 0.9166 7.1844
## chain: 1 iteration: 8600 parameters: 0.0183 0.0839 0.8913 8.7669
## chain: 1 iteration: 8700 parameters: 0.0218 0.0862 0.888 7.4652
## chain: 1 iteration: 8800 parameters: 0.016 0.0948 0.8961 6.4227
## chain: 1 iteration: 8900 parameters: 0.0165 0.0704 0.9106 6.9333
## chain: 1 iteration: 9000 parameters: 0.0193 0.081 0.8959 7.2596
## chain: 1 iteration: 9100 parameters: 0.0178 0.0846 0.9001 6.4697
## chain: 1 iteration: 9200 parameters: 0.0228 0.0781 0.8878 7.445
## chain: 1 iteration: 9300 parameters: 0.0175 0.0724 0.9082 6.9099
## chain: 1 iteration: 9400 parameters: 0.0175 0.0773 0.9066 6.3536
## chain: 1 iteration: 9500 parameters: 0.0152 0.0737 0.9103 7.3125
## chain: 1 iteration: 9600 parameters: 0.0191 0.0885 0.8883 7.1588
## chain: 1 iteration: 9700 parameters: 0.0146 0.0689 0.9142 8.0848
## chain: 1 iteration: 9800 parameters: 0.0185 0.0786 0.9018 6.7864
## chain: 1 iteration: 9900 parameters: 0.0208 0.0936 0.8851 7.1682
## chain: 1 iteration: 10000 parameters: 0.028 0.0732 0.8893 7.622
## chain: 1 iteration: 10100 parameters: 0.0147 0.085 0.9021 7.3847
## chain: 1 iteration: 10200 parameters: 0.0186 0.0852 0.8961 6.7639
## chain: 1 iteration: 10300 parameters: 0.0127 0.0741 0.9112 7.8055
## chain: 1 iteration: 10400 parameters: 0.021 0.0767 0.9004 6.2651
## chain: 1 iteration: 10500 parameters: 0.0212 0.0862 0.8948 6.3655
## chain: 1 iteration: 10600 parameters: 0.0193 0.0831 0.8941 6.7567
## chain: 1 iteration: 10700 parameters: 0.0186 0.0901 0.8946 6.5382
## chain: 1 iteration: 10800 parameters: 0.0192 0.0971 0.8821 6.1945
## chain: 1 iteration: 10900 parameters: 0.015 0.0787 0.9006 9.5528
## chain: 1 iteration: 11000 parameters: 0.0125 0.0768 0.911 6.3726
## chain: 1 iteration: 11100 parameters: 0.0169 0.1042 0.8916 5.7729
## chain: 1 iteration: 11200 parameters: 0.0162 0.0841 0.9014 6.6608
## chain: 1 iteration: 11300 parameters: 0.0183 0.0868 0.8935 7.671
## chain: 1 iteration: 11400 parameters: 0.0257 0.1074 0.8695 7.444
## chain: 1 iteration: 11500 parameters: 0.0157 0.0714 0.905 7.9797
## chain: 1 iteration: 11600 parameters: 0.02 0.0791 0.8988 7.2808
## chain: 1 iteration: 11700 parameters: 0.0233 0.0993 0.8743 6.6178
## chain: 1 iteration: 11800 parameters: 0.0184 0.0805 0.9 6.308
## chain: 1 iteration: 11900 parameters: 0.0295 0.1041 0.8587 7.8932
## chain: 1 iteration: 12000 parameters: 0.0229 0.0869 0.882 7.0838
## chain: 1 iteration: 12100 parameters: 0.028 0.1108 0.8574 8.4519
## chain: 1 iteration: 12200 parameters: 0.0254 0.0831 0.889 7.7302
## chain: 1 iteration: 12300 parameters: 0.025 0.0806 0.8894 6.5781
## chain: 1 iteration: 12400 parameters: 0.0217 0.1044 0.8767 6.4773
## chain: 1 iteration: 12500 parameters: 0.0179 0.0952 0.8933 5.75
## chain: 1 iteration: 12600 parameters: 0.019 0.0733 0.9074 6.771
## chain: 1 iteration: 12700 parameters: 0.0161 0.097 0.8943 6.8672
## chain: 1 iteration: 12800 parameters: 0.0198 0.0887 0.8893 7.608
## chain: 1 iteration: 12900 parameters: 0.0201 0.0826 0.8976 6.9304
## chain: 1 iteration: 13000 parameters: 0.0226 0.0842 0.8928 6.3383
## chain: 1 iteration: 13100 parameters: 0.0173 0.0929 0.8914 6.482
## chain: 1 iteration: 13200 parameters: 0.0289 0.0913 0.8766 6.5323
## chain: 1 iteration: 13300 parameters: 0.014 0.0643 0.9196 6.5803
## chain: 1 iteration: 13400 parameters: 0.0167 0.089 0.8892 7.2599
## chain: 1 iteration: 13500 parameters: 0.0169 0.082 0.8986 7.4701
## chain: 1 iteration: 13600 parameters: 0.0221 0.0811 0.8919 7.5856
## chain: 1 iteration: 13700 parameters: 0.0184 0.076 0.9013 7.3954
## chain: 1 iteration: 13800 parameters: 0.0184 0.0836 0.8952 6.5541
## chain: 1 iteration: 13900 parameters: 0.0149 0.0624 0.9175 8.5589
## chain: 1 iteration: 14000 parameters: 0.0271 0.0823 0.8894 6.2896
## chain: 1 iteration: 14100 parameters: 0.0214 0.0863 0.8908 6.8002
## chain: 1 iteration: 14200 parameters: 0.0153 0.0671 0.9128 7.3155
## chain: 1 iteration: 14300 parameters: 0.0191 0.0885 0.8924 6.8537
## chain: 1 iteration: 14400 parameters: 0.0218 0.0874 0.8861 7.6469
## chain: 1 iteration: 14500 parameters: 0.0147 0.0834 0.9021 7.2637
## chain: 1 iteration: 14600 parameters: 0.0161 0.0761 0.9077 6.3542
## chain: 1 iteration: 14700 parameters: 0.0151 0.0788 0.9055 6.6474
## chain: 1 iteration: 14800 parameters: 0.0171 0.0795 0.9023 6.4424
## chain: 1 iteration: 14900 parameters: 0.022 0.0848 0.8898 8.061
## chain: 1 iteration: 15000 parameters: 0.0182 0.0889 0.889 7.3661
## chain: 1 iteration: 15100 parameters: 0.0233 0.0896 0.8841 6.7475
## chain: 1 iteration: 15200 parameters: 0.0168 0.0849 0.8975 7.2025
## chain: 1 iteration: 15300 parameters: 0.0163 0.0903 0.8959 6.5394
## chain: 1 iteration: 15400 parameters: 0.0172 0.077 0.9056 6.82
## chain: 1 iteration: 15500 parameters: 0.0195 0.089 0.8877 7.1666
## chain: 1 iteration: 15600 parameters: 0.0144 0.0856 0.9022 7.0034
## chain: 1 iteration: 15700 parameters: 0.0256 0.0977 0.8796 7.6222
## chain: 1 iteration: 15800 parameters: 0.0183 0.0662 0.9096 7.5564
## chain: 1 iteration: 15900 parameters: 0.019 0.0928 0.8882 6.6647
## chain: 1 iteration: 16000 parameters: 0.0205 0.0856 0.8907 7.0991
## chain: 1 iteration: 16100 parameters: 0.0183 0.087 0.8973 6.4231
## chain: 1 iteration: 16200 parameters: 0.0181 0.0899 0.8918 7.296
## chain: 1 iteration: 16300 parameters: 0.0243 0.0897 0.8868 7.0977
## chain: 1 iteration: 16400 parameters: 0.0208 0.0924 0.8902 5.8919
## chain: 1 iteration: 16500 parameters: 0.0192 0.0899 0.8889 7.3315
## chain: 1 iteration: 16600 parameters: 0.0213 0.0878 0.8877 6.529
## chain: 1 iteration: 16700 parameters: 0.0202 0.0942 0.8852 7.2828
## chain: 1 iteration: 16800 parameters: 0.0187 0.092 0.8892 6.9935
## chain: 1 iteration: 16900 parameters: 0.0193 0.0778 0.8979 7.6626
## chain: 1 iteration: 17000 parameters: 0.0249 0.0957 0.8803 6.4653
## chain: 1 iteration: 17100 parameters: 0.0227 0.095 0.8807 6.2361
## chain: 1 iteration: 17200 parameters: 0.023 0.0794 0.8922 6.6175
## chain: 1 iteration: 17300 parameters: 0.0226 0.0901 0.8826 6.6889
## chain: 1 iteration: 17400 parameters: 0.0135 0.0871 0.9003 7.3037
## chain: 1 iteration: 17500 parameters: 0.0178 0.0801 0.9016 5.9231
## chain: 1 iteration: 17600 parameters: 0.0247 0.0819 0.8935 6.7571
## chain: 1 iteration: 17700 parameters: 0.0203 0.0916 0.8941 6.8672
## chain: 1 iteration: 17800 parameters: 0.0236 0.0961 0.8722 6.8231
## chain: 1 iteration: 17900 parameters: 0.0196 0.0945 0.8837 6.5845
## chain: 1 iteration: 18000 parameters: 0.0195 0.0892 0.8889 7.3639
## chain: 1 iteration: 18100 parameters: 0.0253 0.0968 0.8724 7.0313
## chain: 1 iteration: 18200 parameters: 0.0201 0.0852 0.8939 6.6733
## chain: 1 iteration: 18300 parameters: 0.0214 0.087 0.8943 6.0712
## chain: 1 iteration: 18400 parameters: 0.0205 0.0949 0.8862 7.1715
## chain: 1 iteration: 18500 parameters: 0.0202 0.0818 0.891 7.7413
## chain: 1 iteration: 18600 parameters: 0.0191 0.0912 0.8877 6.8846
## chain: 1 iteration: 18700 parameters: 0.0165 0.0795 0.9083 5.9858
## chain: 1 iteration: 18800 parameters: 0.0172 0.0933 0.8881 6.3789
## chain: 1 iteration: 18900 parameters: 0.021 0.077 0.8954 7.0827
## chain: 1 iteration: 19000 parameters: 0.0224 0.0791 0.8907 7.0317
## chain: 1 iteration: 19100 parameters: 0.0149 0.082 0.9055 7.1869
## chain: 1 iteration: 19200 parameters: 0.0233 0.0808 0.8915 7.202
## chain: 1 iteration: 19300 parameters: 0.0196 0.0868 0.8943 6.3357
## chain: 1 iteration: 19400 parameters: 0.0218 0.0826 0.8943 8.3397
## chain: 1 iteration: 19500 parameters: 0.0179 0.0753 0.9033 7.1131
## chain: 1 iteration: 19600 parameters: 0.019 0.0785 0.8943 8.6508
## chain: 1 iteration: 19700 parameters: 0.0199 0.0913 0.8884 6.2427
## chain: 1 iteration: 19800 parameters: 0.0225 0.1028 0.8846 5.3345
## chain: 1 iteration: 19900 parameters: 0.027 0.0873 0.8775 6.3024
## chain: 1 iteration: 20000 parameters: 0.0171 0.0964 0.8865 7.275
## chain: 1 iteration: 20100 parameters: 0.0175 0.0813 0.8977 7.0372
## chain: 1 iteration: 20200 parameters: 0.0163 0.0782 0.9049 7.5633
## chain: 1 iteration: 20300 parameters: 0.0179 0.0926 0.8933 5.5832
## chain: 1 iteration: 20400 parameters: 0.0167 0.0787 0.9102 6.333
## chain: 1 iteration: 20500 parameters: 0.0223 0.0768 0.8925 7.9738
## chain: 1 iteration: 20600 parameters: 0.0115 0.0686 0.9204 5.9232
## chain: 1 iteration: 20700 parameters: 0.0143 0.0901 0.9016 6.9907
## chain: 1 iteration: 20800 parameters: 0.0182 0.0926 0.8864 6.1482
## chain: 1 iteration: 20900 parameters: 0.0182 0.072 0.9031 6.9349
## chain: 1 iteration: 21000 parameters: 0.0156 0.0638 0.9169 6.1995
## chain: 1 iteration: 21100 parameters: 0.0174 0.073 0.9025 8.8571
## chain: 1 iteration: 21200 parameters: 0.0175 0.0957 0.8866 7.3427
## chain: 1 iteration: 21300 parameters: 0.019 0.0889 0.8915 6.1682
## chain: 1 iteration: 21400 parameters: 0.0218 0.0832 0.8889 7.888
## chain: 1 iteration: 21500 parameters: 0.0285 0.0913 0.8784 6.0778
## chain: 1 iteration: 21600 parameters: 0.0185 0.0825 0.8931 7.0537
## chain: 1 iteration: 21700 parameters: 0.0188 0.0822 0.895 6.5842
## chain: 1 iteration: 21800 parameters: 0.0195 0.0837 0.8957 6.0787
## chain: 1 iteration: 21900 parameters: 0.0158 0.081 0.9032 6.8661
## chain: 1 iteration: 22000 parameters: 0.0185 0.097 0.8875 6.9786
## chain: 1 iteration: 22100 parameters: 0.0275 0.0862 0.8825 7.3856
## chain: 1 iteration: 22200 parameters: 0.0192 0.0938 0.8917 5.4004
## chain: 1 iteration: 22300 parameters: 0.0217 0.0822 0.8939 8.0896
## chain: 1 iteration: 22400 parameters: 0.0119 0.0756 0.9147 6.001
## chain: 1 iteration: 22500 parameters: 0.0191 0.0844 0.896 6.3938
## chain: 1 iteration: 22600 parameters: 0.0193 0.0788 0.8959 7.6833
## chain: 1 iteration: 22700 parameters: 0.018 0.075 0.9019 8.5478
## chain: 1 iteration: 22800 parameters: 0.0168 0.0851 0.8945 7.5884
## chain: 1 iteration: 22900 parameters: 0.018 0.0759 0.9003 7.5646
## chain: 1 iteration: 23000 parameters: 0.0256 0.0958 0.8759 6.9211
## chain: 1 iteration: 23100 parameters: 0.017 0.0716 0.9089 6.9733
## chain: 1 iteration: 23200 parameters: 0.0167 0.0799 0.9052 6.1425
## chain: 1 iteration: 23300 parameters: 0.0121 0.0864 0.9049 6.3704
## chain: 1 iteration: 23400 parameters: 0.0162 0.0765 0.9079 6.5997
## chain: 1 iteration: 23500 parameters: 0.0264 0.0975 0.8694 6.9066
## chain: 1 iteration: 23600 parameters: 0.0206 0.0783 0.8938 7.2351
## chain: 1 iteration: 23700 parameters: 0.0254 0.094 0.8785 6.7583
## chain: 1 iteration: 23800 parameters: 0.0157 0.0828 0.899 7.1795
## chain: 1 iteration: 23900 parameters: 0.0204 0.0756 0.8978 7.1097
## chain: 1 iteration: 24000 parameters: 0.0203 0.0958 0.8815 6.9085
## chain: 1 iteration: 24100 parameters: 0.0171 0.1032 0.8792 6.6615
## chain: 1 iteration: 24200 parameters: 0.0197 0.0865 0.8948 6.9915
## chain: 1 iteration: 24300 parameters: 0.0216 0.0856 0.8853 8.4173
## chain: 1 iteration: 24400 parameters: 0.0215 0.0995 0.877 6.3425
## chain: 1 iteration: 24500 parameters: 0.0294 0.0965 0.8725 6.0117
## chain: 1 iteration: 24600 parameters: 0.0229 0.0945 0.8752 7.6762
## chain: 1 iteration: 24700 parameters: 0.023 0.0989 0.8798 7.1398
## chain: 1 iteration: 24800 parameters: 0.019 0.0893 0.8914 6.0682
## chain: 1 iteration: 24900 parameters: 0.0207 0.0849 0.898 6.597
## chain: 1 iteration: 25000 parameters: 0.0239 0.101 0.8767 6.8343
## chain: 1 iteration: 25100 parameters: 0.0152 0.0794 0.9033 7.8719
## chain: 1 iteration: 25200 parameters: 0.018 0.0886 0.8934 6.5918
## chain: 1 iteration: 25300 parameters: 0.0202 0.0858 0.8904 6.274
## chain: 1 iteration: 25400 parameters: 0.0217 0.0964 0.8809 7.4795
## chain: 1 iteration: 25500 parameters: 0.0192 0.0901 0.8919 7.8035
## chain: 1 iteration: 25600 parameters: 0.015 0.0816 0.9053 5.8544
## chain: 1 iteration: 25700 parameters: 0.0234 0.0905 0.8834 7.2568
## chain: 1 iteration: 25800 parameters: 0.0182 0.0853 0.8965 6.493
## chain: 1 iteration: 25900 parameters: 0.0152 0.0879 0.8949 7.3323
## chain: 1 iteration: 26000 parameters: 0.0233 0.0898 0.8846 7.5166
## chain: 1 iteration: 26100 parameters: 0.0251 0.0979 0.8708 7.0956
## chain: 1 iteration: 26200 parameters: 0.0179 0.0859 0.8953 7.2335
## chain: 1 iteration: 26300 parameters: 0.0247 0.1004 0.877 6.9602
## chain: 1 iteration: 26400 parameters: 0.017 0.1 0.8864 6.2572
## chain: 1 iteration: 26500 parameters: 0.0221 0.0768 0.9025 5.9899
## chain: 1 iteration: 26600 parameters: 0.0187 0.079 0.8995 6.2342
## chain: 1 iteration: 26700 parameters: 0.0154 0.0683 0.9121 7.2622
## chain: 1 iteration: 26800 parameters: 0.0122 0.083 0.9061 6.32
## chain: 1 iteration: 26900 parameters: 0.0134 0.0808 0.9062 6.6835
## chain: 1 iteration: 27000 parameters: 0.0171 0.0859 0.9024 6.2784
## chain: 1 iteration: 27100 parameters: 0.0211 0.0828 0.8941 6.4668
## chain: 1 iteration: 27200 parameters: 0.0215 0.0789 0.8937 7.7133
## chain: 1 iteration: 27300 parameters: 0.0184 0.096 0.8813 7.3878
## chain: 1 iteration: 27400 parameters: 0.0226 0.0786 0.8946 6.8568
## chain: 1 iteration: 27500 parameters: 0.0145 0.0806 0.9 6.9348
## chain: 1 iteration: 27600 parameters: 0.0219 0.0777 0.8975 7.4163
## chain: 1 iteration: 27700 parameters: 0.0149 0.0744 0.9054 8.2137
## chain: 1 iteration: 27800 parameters: 0.0206 0.0911 0.882 6.7475
## chain: 1 iteration: 27900 parameters: 0.0202 0.0829 0.894 6.6793
## chain: 1 iteration: 28000 parameters: 0.0176 0.0836 0.8996 6.985
## chain: 1 iteration: 28100 parameters: 0.0234 0.1034 0.8719 6.7
## chain: 1 iteration: 28200 parameters: 0.0184 0.0801 0.9031 5.2569
## chain: 1 iteration: 28300 parameters: 0.0237 0.0861 0.8854 7.1287
## chain: 1 iteration: 28400 parameters: 0.0205 0.0939 0.8853 6.5221
## chain: 1 iteration: 28500 parameters: 0.0126 0.0808 0.9061 7.5212
## chain: 1 iteration: 28600 parameters: 0.021 0.0804 0.8919 7.0196
## chain: 1 iteration: 28700 parameters: 0.0147 0.0644 0.9177 7.5008
## chain: 1 iteration: 28800 parameters: 0.0193 0.0777 0.8958 7.1555
## chain: 1 iteration: 28900 parameters: 0.0207 0.0867 0.8898 6.176
## chain: 1 iteration: 29000 parameters: 0.0175 0.0825 0.9006 6.2539
## chain: 1 iteration: 29100 parameters: 0.0162 0.078 0.9047 6.4982
## chain: 1 iteration: 29200 parameters: 0.0296 0.1211 0.8491 6.9932
## chain: 1 iteration: 29300 parameters: 0.0207 0.0796 0.8958 6.5612
## chain: 1 iteration: 29400 parameters: 0.0208 0.0797 0.8957 7.8255
## chain: 1 iteration: 29500 parameters: 0.0187 0.0748 0.8993 6.7783
## chain: 1 iteration: 29600 parameters: 0.0129 0.0773 0.9085 7.0167
## chain: 1 iteration: 29700 parameters: 0.015 0.069 0.913 6.5511
## chain: 1 iteration: 29800 parameters: 0.0214 0.0987 0.8809 7.404
## chain: 1 iteration: 29900 parameters: 0.0207 0.0911 0.8842 7.1031
## chain: 1 iteration: 30000 parameters: 0.0149 0.0948 0.8908 6.9007
## chain: 1 iteration: 30100 parameters: 0.0266 0.112 0.8728 5.3898
## chain: 1 iteration: 30200 parameters: 0.0242 0.0834 0.8889 7.547
## chain: 1 iteration: 30300 parameters: 0.014 0.0757 0.9122 6.6867
## chain: 1 iteration: 30400 parameters: 0.0245 0.0835 0.888 5.9694
## chain: 1 iteration: 30500 parameters: 0.0215 0.0825 0.8909 8.0634
## chain: 1 iteration: 30600 parameters: 0.02 0.0944 0.8843 6.9216
## chain: 1 iteration: 30700 parameters: 0.0154 0.0715 0.9105 6.845
## chain: 1 iteration: 30800 parameters: 0.0222 0.0793 0.897 6.3855
## chain: 1 iteration: 30900 parameters: 0.0167 0.0843 0.8968 6.8066
## chain: 1 iteration: 31000 parameters: 0.0164 0.0753 0.9038 6.635
## chain: 1 iteration: 31100 parameters: 0.0214 0.1007 0.8803 7.9168
## chain: 1 iteration: 31200 parameters: 0.0233 0.0859 0.8856 6.8739
## chain: 1 iteration: 31300 parameters: 0.0231 0.086 0.8865 7.1858
## chain: 1 iteration: 31400 parameters: 0.0146 0.0779 0.9046 7.338
## chain: 1 iteration: 31500 parameters: 0.0161 0.0862 0.8978 6.8508
## chain: 1 iteration: 31600 parameters: 0.0189 0.0705 0.9076 6.8919
## chain: 1 iteration: 31700 parameters: 0.0212 0.0945 0.8787 7.1235
## chain: 1 iteration: 31800 parameters: 0.014 0.0835 0.9033 7.2325
## chain: 1 iteration: 31900 parameters: 0.015 0.0807 0.9029 6.4475
## chain: 1 iteration: 32000 parameters: 0.0193 0.0688 0.9091 6.9435
## chain: 1 iteration: 32100 parameters: 0.0153 0.0876 0.8969 6.9351
## chain: 1 iteration: 32200 parameters: 0.0149 0.0783 0.9042 6.8831
## chain: 1 iteration: 32300 parameters: 0.0187 0.0825 0.8977 7.6116
## chain: 1 iteration: 32400 parameters: 0.0165 0.076 0.9066 6.7066
## chain: 1 iteration: 32500 parameters: 0.0263 0.1049 0.8651 7.3817
## chain: 1 iteration: 32600 parameters: 0.0264 0.0878 0.8831 6.3064
## chain: 1 iteration: 32700 parameters: 0.0234 0.0842 0.8892 5.7806
## chain: 1 iteration: 32800 parameters: 0.0204 0.0792 0.8979 6.1894
## chain: 1 iteration: 32900 parameters: 0.0196 0.1023 0.872 6.8794
## chain: 1 iteration: 33000 parameters: 0.0187 0.0873 0.8907 8.4784
## chain: 1 iteration: 33100 parameters: 0.0149 0.0723 0.9112 6.805
## chain: 1 iteration: 33200 parameters: 0.0212 0.0973 0.882 6.9778
## chain: 1 iteration: 33300 parameters: 0.0262 0.1017 0.8689 6.651
## chain: 1 iteration: 33400 parameters: 0.0225 0.0826 0.8904 7.0231
## chain: 1 iteration: 33500 parameters: 0.0193 0.0948 0.8852 7.2503
## chain: 1 iteration: 33600 parameters: 0.0205 0.0828 0.8906 6.6439
## chain: 1 iteration: 33700 parameters: 0.014 0.0852 0.904 5.9315
## chain: 1 iteration: 33800 parameters: 0.0179 0.0787 0.903 6.8586
## chain: 1 iteration: 33900 parameters: 0.0243 0.0804 0.8954 6.033
## chain: 1 iteration: 34000 parameters: 0.0288 0.0994 0.8693 7.188
## chain: 1 iteration: 34100 parameters: 0.0366 0.1119 0.8467 6.8191
## chain: 1 iteration: 34200 parameters: 0.0166 0.069 0.915 6.4204
## chain: 1 iteration: 34300 parameters: 0.0155 0.08 0.9022 6.9536
## chain: 1 iteration: 34400 parameters: 0.0131 0.0845 0.9024 7.069
## chain: 1 iteration: 34500 parameters: 0.0269 0.0834 0.8834 7.3151
## chain: 1 iteration: 34600 parameters: 0.0184 0.09 0.8903 6.826
## chain: 1 iteration: 34700 parameters: 0.0176 0.0891 0.8915 7.8212
## chain: 1 iteration: 34800 parameters: 0.0169 0.1007 0.8887 6.1616
## chain: 1 iteration: 34900 parameters: 0.0228 0.0873 0.8872 7.2338
## chain: 1 iteration: 35000 parameters: 0.0095 0.0782 0.9137 7.8505
## chain: 1 iteration: 35100 parameters: 0.0197 0.0752 0.9021 7.6552
## chain: 1 iteration: 35200 parameters: 0.0322 0.0993 0.868 6.6112
## chain: 1 iteration: 35300 parameters: 0.0199 0.0876 0.8939 6.0948
## chain: 1 iteration: 35400 parameters: 0.0155 0.0754 0.9059 6.9941
## chain: 1 iteration: 35500 parameters: 0.0212 0.0666 0.9055 6.8122
## chain: 1 iteration: 35600 parameters: 0.0215 0.0806 0.8933 6.4992
## chain: 1 iteration: 35700 parameters: 0.0294 0.0829 0.8826 6.3174
## chain: 1 iteration: 35800 parameters: 0.0223 0.0834 0.8941 6.4895
## chain: 1 iteration: 35900 parameters: 0.0181 0.0837 0.8929 7.494
## chain: 1 iteration: 36000 parameters: 0.0215 0.0738 0.8995 7.6455
## chain: 1 iteration: 36100 parameters: 0.0155 0.0818 0.8982 7.6586
## chain: 1 iteration: 36200 parameters: 0.0157 0.0865 0.8997 6.588
## chain: 1 iteration: 36300 parameters: 0.0232 0.0932 0.8793 7.0725
## chain: 1 iteration: 36400 parameters: 0.0216 0.0964 0.8809 7.0362
## chain: 1 iteration: 36500 parameters: 0.0233 0.0919 0.8825 6.8832
## chain: 1 iteration: 36600 parameters: 0.0254 0.1031 0.8731 6.7786
## chain: 1 iteration: 36700 parameters: 0.0182 0.0841 0.8986 7.6317
## chain: 1 iteration: 36800 parameters: 0.0187 0.0833 0.8937 7.2267
## chain: 1 iteration: 36900 parameters: 0.0201 0.0921 0.8851 7.2865
## chain: 1 iteration: 37000 parameters: 0.0167 0.0771 0.8997 7.9645
## chain: 1 iteration: 37100 parameters: 0.0189 0.0758 0.9021 7.4154
## chain: 1 iteration: 37200 parameters: 0.0174 0.0788 0.9001 7.0519
## chain: 1 iteration: 37300 parameters: 0.0165 0.0779 0.8998 6.9297
## chain: 1 iteration: 37400 parameters: 0.0231 0.0893 0.8831 6.9039
## chain: 1 iteration: 37500 parameters: 0.0177 0.086 0.8876 7.9961
## chain: 1 iteration: 37600 parameters: 0.0158 0.0871 0.8995 7.1768
## chain: 1 iteration: 37700 parameters: 0.0191 0.0861 0.8886 7.0619
## chain: 1 iteration: 37800 parameters: 0.0218 0.087 0.8903 6.2887
## chain: 1 iteration: 37900 parameters: 0.0179 0.0838 0.8976 6.2284
## chain: 1 iteration: 38000 parameters: 0.0161 0.0904 0.8935 7.0468
## chain: 1 iteration: 38100 parameters: 0.0159 0.0881 0.8939 6.3614
## chain: 1 iteration: 38200 parameters: 0.0184 0.0738 0.905 8.6873
## chain: 1 iteration: 38300 parameters: 0.0204 0.0932 0.8822 7.8663
## chain: 1 iteration: 38400 parameters: 0.0191 0.0892 0.8917 6.6698
## chain: 1 iteration: 38500 parameters: 0.0263 0.1083 0.8658 6.2562
## chain: 1 iteration: 38600 parameters: 0.0213 0.0857 0.895 6.6511
## chain: 1 iteration: 38700 parameters: 0.0185 0.0832 0.8962 7.6717
## chain: 1 iteration: 38800 parameters: 0.0108 0.0582 0.9284 7.0847
## chain: 1 iteration: 38900 parameters: 0.015 0.0838 0.9016 6.7896
## chain: 1 iteration: 39000 parameters: 0.0171 0.087 0.895 7.001
## chain: 1 iteration: 39100 parameters: 0.0219 0.0941 0.8771 7.2947
## chain: 1 iteration: 39200 parameters: 0.0198 0.0769 0.8964 6.9042
## chain: 1 iteration: 39300 parameters: 0.0211 0.0874 0.887 7.548
## chain: 1 iteration: 39400 parameters: 0.0191 0.0752 0.9009 7.1351
## chain: 1 iteration: 39500 parameters: 0.0138 0.0637 0.9188 7.668
## chain: 1 iteration: 39600 parameters: 0.0128 0.075 0.9116 7.0451
## chain: 1 iteration: 39700 parameters: 0.0194 0.077 0.9041 7.4772
## chain: 1 iteration: 39800 parameters: 0.0228 0.0895 0.8849 7.0985
## chain: 1 iteration: 39900 parameters: 0.0226 0.0874 0.8865 7.7237
## chain: 1 iteration: 40000 parameters: 0.0191 0.096 0.8862 7.4437
draws = run$chain1[range,]
m = apply(draws,2,mean)
se = sqrt(apply(draws,2,var))
tab = cbind(m,se,m/se,2*(1-pnorm(m/se)))
colnames(tab) = c("Estimate","Std. Error","t value","Pr(>|t|)")
tab
## Estimate Std. Error t value Pr(>|t|)
## alpha0 0.01920888 0.003782748 5.078021 3.813865e-07
## alpha1 0.08502667 0.010199548 8.336317 0.000000e+00
## beta 0.89402444 0.012119983 73.764495 0.000000e+00
## nu 6.98379531 0.601074287 11.618856 0.000000e+00
sig2s = matrix(1,M,n)
for (t in 2:n)
sig2s[,t] = draws[,1]+draws[,2]*ret[t-1]^2+draws[,3]*sig2s[,t-1]
q.sig = t(apply(sqrt(sig2s),2,quantile,c(0.025,0.5,0.975)))
ret.std.bgarch = ret/q.sig[,2]
Comparing Bayesian and non-Bayesian GARCH(1,1) approaches
par(mfrow=c(1,2))
ts.plot(q.sig[100:n,2],xlab="August 10th 2000 - March 5th 2021",ylab="Standard deviation")
lines(sigmat.garch[100:n],col=2)
title("GARCH(1,1) with Student's t errors\nClassical and Bayesian")
ts.plot(q.sig[(n-500):n,2],xlab="Last 500 days",ylab="Standard deviation")
lines(sigmat.garch[(n-500):n],col=2)
legend("topleft",legend=c("GARCH(1,1)","Bayesian GARCH(1,1)"),col=2:1,lty=1,bty="n")
Fitting SV-AR(1)
fit.svar1 = svtsample(ret,draws=M,burnin=M0,thin=1)
## Done!
## Summarizing posterior draws...
params = fit.svar1$para[[1]][,1:4]
m = apply(params,2,mean)
se = sqrt(apply(params,2,var))
tab = cbind(m,se,m/se,2*(1-pnorm(m/se)))
colnames(tab) = c("Estimate","Std. Error","t value","Pr(>|t|)")
tab
## Estimate Std. Error t value Pr(>|t|)
## mu -0.4190952 0.129966962 -3.224629 1.998739e+00
## phi 0.9842135 0.003798037 259.137425 0.000000e+00
## sigma 0.1387539 0.014296698 9.705311 0.000000e+00
## nu 12.1267402 2.303886021 5.263602 1.412597e-07
q.vol = t(apply(exp(fit.svar1$latent[[1]]/2),2,quantile,c(0.025,0.5,0.975)))
ret.std.bsvar1 = ret/q.vol[,2]
More comparisons
par(mfrow=c(1,2))
ts.plot(q.sig[10:n,2],xlab="August 10th 2000 - March 5th 2021",ylab="Standard deviation")
lines(sigmat.garch[10:n],col=2)
lines(q.vol[10:n,2],col=4)
title("GARCH(1,1) with Student's t errors\nClassical and Bayesian")
ts.plot(q.sig[(n-1000):n,2],xlab="Last 1000 days",ylab="Standard deviation")
lines(sigmat.garch[(n-1000):n],col=2)
lines(q.vol[(n-1000):n,2],col=4)
legend("topleft",legend=c("GARCH(1,1)","Bayesian GARCH(1,1)","Bayesian SV-AR(1)"),col=c(2,1,4),lty=1,bty="n")
Checking whether the stanrdized residuals are iid Student’s t
par(mfrow=c(3,3),mai=c(0.6,0.6,0.2,0.2))
ts.plot(ret.std.garch,ylab="Standardized returns",main="Student's t GARCH(1,1)")
ts.plot(ret.std.bgarch,ylab="Standardized returns",main="Bayesian Student's t GARCH(1,1)")
ts.plot(ret.std.bsvar1,ylab="Standardized returns",main="Bayesian Student's t SV-AR(1)")
breaks = seq(-10,10,length=30)
hist(ret.std.garch,prob=TRUE,breaks=breaks,xlab="",main="")
curve(dnorm,-10,10,add=TRUE,col=2,n=1000)
hist(ret.std.bgarch,prob=TRUE,breaks=breaks,xlab="",main="")
curve(dnorm,-10,10,add=TRUE,col=2,n=1000)
hist(ret.std.bsvar1,prob=TRUE,breaks=breaks,xlab="",main="")
curve(dnorm,-10,10,add=TRUE,col=2,n=1000)
qqnorm(ret.std.garch,xlim=c(-6,6))
qqline(ret.std.garch,col=2)
qqnorm(ret.std.bgarch)
qqline(ret.std.bgarch,col=2)
qqnorm(ret.std.bsvar1)
qqline(ret.std.bsvar1,col=2)