etf <- etf_vix[1:55, 1:3]
# Split-------------------------------
h <- 5
etf_eval <- divide_ts(etf, h)
etf_train <- etf_eval$train
etf_test <- etf_eval$test
Models with Stochastic Volatilities
By specifying cov_spec = set_sv()
,
var_bayes()
and vhar_bayes()
fits VAR-SV and
VHAR-SV with shrinkage priors, respectively.
- Three different prior for innovation covariance, and specify through
bayes_spec
- Minneosta prior
- BVAR:
set_bvar()
- BVHAR:
set_bvhar()
andset_weight_bvhar()
- BVAR:
- SSVS prior:
set_ssvs()
- Horseshoe prior:
set_horseshoe()
- NG prior:
set_ng()
- DL prior:
set_dl()
- Minneosta prior
-
sv_spec
: prior settings for SV,set_sv()
-
intercept
: prior for constant term,set_intercept()
set_sv()
#> Model Specification for SV with Cholesky Prior
#>
#> Parameters: Contemporaneous coefficients, State variance, Initial state
#> Prior: Cholesky
#> ========================================================
#> Setting for 'shape':
#> [1] rep(3, dim)
#>
#> Setting for 'scale':
#> [1] rep(0.01, dim)
#>
#> Setting for 'initial_mean':
#> [1] rep(1, dim)
#>
#> Setting for 'initial_prec':
#> [1] 0.1 * diag(dim)
SSVS
(fit_ssvs <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ssvs(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ssvs(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 177 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 1.2465 -0.00917 0.415 -0.120 0.533 0.442 0.420 -0.2513
#> 2 0.6531 -0.05136 -0.379 -0.120 -0.121 0.284 -0.774 -0.1123
#> 3 0.0864 -0.06643 -0.804 -0.490 2.191 0.567 -0.335 0.1842
#> 4 0.5385 -0.06517 -0.917 -1.175 1.032 0.683 -0.149 0.0976
#> 5 0.1531 0.05269 -0.492 0.180 -0.558 0.656 -0.278 -0.5563
#> 6 0.4953 0.01371 -1.215 0.521 -0.951 0.376 -0.397 -0.2256
#> 7 0.4870 -0.09283 -0.657 -0.473 -0.448 1.078 0.839 -0.1052
#> 8 0.3117 -0.04717 -0.995 -0.636 1.655 0.601 0.487 0.3885
#> 9 -0.0168 -0.04631 -0.579 0.191 0.860 1.111 0.471 0.5940
#> 10 0.0938 -0.09232 -0.175 0.219 -0.278 0.507 0.346 -0.2830
#> # ... with 10 more draws, and 169 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Horseshoe
(fit_hs <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_horseshoe(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_horseshoe(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 211 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 0.00465 0.0569 -2.2686 -1.4700 15.6637 0.160 -1.3673 -0.47342
#> 2 -2.40957 -0.0863 0.8434 1.4342 5.7074 0.545 1.2580 -0.26365
#> 3 1.16841 0.2085 0.9358 -2.1197 2.1369 0.835 1.4724 0.32221
#> 4 1.57401 0.1359 0.2506 -0.3531 1.1812 0.345 -0.1984 -0.01213
#> 5 -0.80178 -0.1171 -0.3594 0.0103 1.0799 0.617 -0.1493 -0.02766
#> 6 0.54058 -0.0357 0.1973 -0.0840 1.4997 0.443 0.4348 -0.05712
#> 7 0.00755 0.4536 0.0290 0.1963 0.1793 0.264 0.0442 -0.00812
#> 8 0.04790 0.5025 0.0539 0.3126 0.1700 0.588 0.0119 -0.00283
#> 9 -0.08957 -0.1260 0.0270 0.0702 -0.0178 0.578 0.0588 0.05864
#> 10 0.03248 0.0406 -0.0928 -0.0638 0.0268 0.699 -0.1317 -0.04953
#> # ... with 10 more draws, and 203 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Normal-Gamma prior
(fit_ng <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ng(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ng(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Metropolis-within-Gibbs
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 184 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 0.4638 0.1606 0.0844 -0.1915 0.259 0.356 1.64e-01 -2.88e-02
#> 2 0.0710 -0.1656 -0.0143 -0.3296 0.453 0.400 -1.79e-01 3.65e-02
#> 3 0.1809 -0.1013 0.0294 0.2004 0.602 0.126 -4.18e-02 -6.75e-01
#> 4 0.0133 -0.0716 0.0369 0.0529 0.312 0.119 3.37e-02 -1.41e-02
#> 5 0.0431 -0.1053 0.1147 -0.0693 0.451 0.293 1.54e-03 5.71e-03
#> 6 0.0778 -0.1379 -0.0899 0.1655 0.327 0.131 -3.27e-03 -3.96e-03
#> 7 0.1980 -0.0811 0.0401 0.5071 0.555 0.632 -2.58e-03 9.80e-04
#> 8 0.3349 0.0201 -0.0710 0.1560 0.516 0.545 9.23e-04 9.47e-04
#> 9 0.3529 -0.0218 0.0742 0.6892 -0.238 0.480 7.95e-05 8.12e-04
#> 10 0.5153 -0.0371 -0.0380 0.8726 -0.443 0.400 -2.79e-05 3.47e-05
#> # ... with 10 more draws, and 176 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Dirichlet-Laplace prior
(fit_dl <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_dl(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_dl(),
#> cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#>
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#>
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 178 variables
#> phi[1] phi[2] phi[3] phi[4] phi[5] phi[6] phi[7] phi[8]
#> 1 -0.396 -0.0518 0.00972 -0.03567 1.21e+00 0.160 0.028289 0.37581
#> 2 -0.244 -0.0504 -0.30627 0.03251 1.63e+00 0.690 0.042976 0.73965
#> 3 0.785 0.0291 0.17402 0.14733 -9.92e-01 0.669 -0.000300 0.33097
#> 4 -0.403 -0.0239 -0.21419 0.33020 2.23e-01 0.488 0.000664 0.24997
#> 5 0.525 -0.0365 0.18465 0.17428 -4.49e-04 0.979 0.094548 0.04076
#> 6 0.893 -0.0547 0.34789 -0.10916 -6.70e-03 0.749 0.097342 -0.05944
#> 7 0.972 -0.0218 0.25069 -0.04589 -1.55e-05 0.764 0.093599 -0.00509
#> 8 0.526 -0.0140 0.11863 0.03277 5.42e-06 0.937 -0.103855 -0.04236
#> 9 0.739 0.0460 0.19684 0.00876 -3.26e-06 0.879 -0.093956 -0.06880
#> 10 0.439 0.0132 0.14968 0.01402 1.72e-05 0.966 0.339628 -0.08877
#> # ... with 10 more draws, and 170 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}
Bayesian visualization
autoplot()
also provides Bayesian visualization.
type = "trace"
gives MCMC trace plot.
autoplot(fit_hs, type = "trace", regex_pars = "tau")
type = "dens"
draws MCMC density plot.
autoplot(fit_hs, type = "dens", regex_pars = "tau")