etf <- etf_vix[1:55, 1:3]
# Split-------------------------------
h <- 5
etf_eval <- divide_ts(etf, h)
etf_train <- etf_eval$train
etf_test <- etf_eval$test
Models with Stochastic Volatilities
By specifying cov_spec = set_sv()
,
var_bayes()
and vhar_bayes()
fits VAR-SV and
VHAR-SV with shrinkage priors, respectively.
- Three different prior for innovation covariance, and specify through
coef_spec
- Minneosta prior
- BVAR:
set_bvar()
- BVHAR:
set_bvhar()
andset_weight_bvhar()
- BVAR:
- SSVS prior:
set_ssvs()
- Horseshoe prior:
set_horseshoe()
- NG prior:
set_ng()
- DL prior:
set_dl()
- Minneosta prior
-
sv_spec
: prior settings for SV,set_sv()
-
intercept
: prior for constant term,set_intercept()
set_sv()
#> Model Specification for SV with Cholesky Prior
#>
#> Parameters: Contemporaneous coefficients, State variance, Initial state
#> Prior: Cholesky
#> ========================================================
#> Setting for 'shape':
#> [1] rep(3, dim)
#>
#> Setting for 'scale':
#> [1] rep(0.01, dim)
#>
#> Setting for 'initial_mean':
#> [1] rep(1, dim)
#>
#> Setting for 'initial_prec':
#> [1] 0.1 * diag(dim)
SSVS
(fit_ssvs <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, coef_spec = set_ssvs(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, coef_spec = set_ssvs(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 177 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 -0.2809 -0.435 -0.132 0.0164 1.632 0.6737 1.506 0.4338
#> 2 -0.6398 -0.671 -1.804 -0.2749 2.726 1.4386 1.466 0.2481
#> 3 -0.1688 -0.194 -0.273 0.4175 1.932 0.5967 1.070 -0.0451
#> 4 0.0548 -0.344 -0.509 0.7781 0.789 0.1965 0.977 -0.1877
#> 5 -0.1710 -0.260 -1.072 -0.2136 2.007 0.2803 -0.482 -0.4731
#> 6 -0.0850 -0.251 -0.780 0.1651 1.843 0.0452 -0.751 -0.7045
#> 7 -0.4064 -0.299 -0.621 0.3135 1.570 0.6197 1.189 0.2086
#> 8 -0.1958 -0.306 -0.756 -0.0284 2.209 0.8177 0.786 -0.1558
#> 9 -0.4582 -0.509 -1.145 -0.1341 2.061 0.6314 0.395 0.0669
#> 10 -0.3015 -0.323 -1.456 -0.4003 2.746 0.1895 0.527 -0.3645
#> # ... with 10 more draws, and 169 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Horseshoe
(fit_hs <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, coef_spec = set_horseshoe(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, coef_spec = set_horseshoe(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 211 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 -0.0304 -0.0309 0.4039 -0.04124 -0.1909 0.0206 0.0882 0.033078
#> 2 0.0177 -0.0218 0.1822 0.00925 0.9670 -0.0115 0.3481 0.247849
#> 3 -0.0891 -0.0189 -0.0337 0.01162 0.4190 0.0393 0.1657 -0.035753
#> 4 -0.1159 -0.0660 -0.1383 -0.05007 0.2089 0.4171 -0.1424 0.129915
#> 5 -0.1867 -0.0577 -0.1094 0.06793 0.0804 0.4281 -0.1954 0.067185
#> 6 -0.2408 -0.0842 0.0510 -0.08670 0.3418 -0.0325 -0.6220 -0.044931
#> 7 -0.2050 -0.1970 -0.0949 -0.58709 0.0457 0.3472 -0.5688 0.036126
#> 8 -0.1667 -0.0684 0.0127 0.37730 -0.1097 -0.0351 -0.7691 -0.055198
#> 9 -0.2836 -0.0533 0.1063 -0.06070 0.4966 0.1201 -0.3582 0.000847
#> 10 -0.0980 -0.1000 0.0668 -0.06924 0.2454 0.2899 -0.1129 0.055355
#> # ... with 10 more draws, and 203 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Normal-Gamma prior
(fit_ng <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, coef_spec = set_ng(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, coef_spec = set_ng(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Metropolis-within-Gibbs
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 184 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 0.0163 -0.2097 -0.2567 -0.0240 1.18921 0.0884 -0.00159 -0.25690
#> 2 0.0638 -0.0326 -0.2059 0.0562 0.61534 0.0264 0.05115 0.02492
#> 3 0.0547 -0.1303 0.1697 0.1383 -0.20499 0.0158 -0.00102 -0.01696
#> 4 0.2011 -0.0707 0.3999 -0.2971 -0.57085 0.3386 -0.06278 0.00699
#> 5 0.0901 -0.0309 -0.0367 -0.1016 -0.04590 0.2839 0.17475 0.02876
#> 6 0.4087 0.0346 0.0456 -0.0183 0.01097 0.3827 0.00866 0.11522
#> 7 0.3760 -0.0289 0.0864 0.0130 -0.01013 0.3841 0.00161 -0.07671
#> 8 0.7592 -0.0359 0.3242 0.0197 -0.05092 0.4131 0.12073 0.16257
#> 9 0.3201 -0.0498 0.0851 0.0352 0.21481 0.5987 -0.10256 -0.06171
#> 10 0.2380 -0.0225 0.1437 0.0886 0.00587 0.5508 -0.26221 -0.37486
#> # ... with 10 more draws, and 176 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Dirichlet-Laplace prior
(fit_dl <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, coef_spec = set_dl(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, coef_spec = set_dl(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 178 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7]
#> 1 0.00748 0.13291 0.04639 0.011642 0.2212 0.01788 0.24556
#> 2 -0.02351 -0.07930 0.01324 -0.008135 0.6338 -0.01306 -0.06538
#> 3 -0.00167 -0.03925 0.02584 -0.005772 -0.3211 -0.00127 0.11515
#> 4 -0.01316 -0.01056 0.06144 -0.002228 -0.4073 0.00070 -0.14095
#> 5 -0.01329 -0.00228 -0.00488 -0.001493 -0.1545 -0.00529 0.03903
#> 6 0.00558 -0.01154 -0.03222 -0.003665 -0.0167 0.02611 0.03366
#> 7 0.06363 -0.00909 -0.02406 0.000727 0.1396 0.09105 -0.00148
#> 8 -0.22652 0.03128 0.13404 0.000510 -0.1946 0.00773 0.00545
#> 9 0.10193 -0.10966 0.00554 -0.000764 0.2897 0.00304 -0.00380
#> 10 0.04194 0.00683 -0.00989 -0.000337 0.0232 0.00636 0.01094
#> phi[8]
#> 1 0.005781
#> 2 -0.034383
#> 3 0.020210
#> 4 -0.062570
#> 5 0.025049
#> 6 -0.021208
#> 7 0.005351
#> 8 -0.000236
#> 9 0.003069
#> 10 0.000639
#> # ... with 10 more draws, and 170 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Bayesian visualization
autoplot()
also provides Bayesian visualization.
type = "trace"
gives MCMC trace plot.
autoplot(fit_hs, type = "trace", regex_pars = "tau")
type = "dens"
draws MCMC density plot.
autoplot(fit_hs, type = "dens", regex_pars = "tau")