Skip to contents
etf <- etf_vix[1:55, 1:3]
# Split-------------------------------
h <- 5
etf_eval <- divide_ts(etf, h)
etf_train <- etf_eval$train
etf_test <- etf_eval$test

Models with Stochastic Volatilities

By specifying cov_spec = set_sv(), var_bayes() and vhar_bayes() fits VAR-SV and VHAR-SV with shrinkage priors, respectively.

set_sv()
#> Model Specification for SV with Cholesky Prior
#> 
#> Parameters: Contemporaneous coefficients, State variance, Initial state
#> Prior: Cholesky
#> ========================================================
#> Setting for 'shape':
#> [1]  rep(3, dim)
#> 
#> Setting for 'scale':
#> [1]  rep(0.01, dim)
#> 
#> Setting for 'initial_mean':
#> [1]  rep(1, dim)
#> 
#> Setting for 'initial_prec':
#> [1]  0.1 * diag(dim)

SSVS

(fit_ssvs <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ssvs(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ssvs(), 
#>     cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#> 
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#> 
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 177 variables
#>       phi[1]    phi[2]   phi[3]   phi[4]   phi[5]  phi[6]   phi[7]  phi[8]
#> 1    0.71105   0.00356  -0.2354   1.4005   0.5242   0.377  -0.5504  -0.626
#> 2    0.62412   0.03416  -0.1300   1.4909  -0.3586   0.449  -0.7490  -0.841
#> 3    0.61407   0.17488   0.2433   0.6047   0.4285   0.645   0.3232  -0.394
#> 4    0.41066   0.02684   0.5377   1.2127  -0.0233   0.320  -0.4488  -0.420
#> 5    0.62156  -0.17664  -0.3490  -0.0559  -0.2779   0.379  -0.3396  -0.673
#> 6    0.14841  -0.06202  -0.4995   0.0806   0.6769   0.263  -1.4350  -0.898
#> 7    0.22024  -0.02836  -0.0231   0.1458   0.4921   0.594  -0.7086  -0.273
#> 8   -0.23065  -0.15851  -0.0929  -0.7372  -0.6345   0.140  -0.4590  -0.522
#> 9   -0.00424  -0.02825   0.1001   0.0354   0.4155   0.928   0.2185  -0.165
#> 10  -0.10142  -0.09113   0.4434   0.2203  -0.5776   1.030   0.0774  -0.442
#> # ... with 10 more draws, and 169 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}

Horseshoe

(fit_hs <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_horseshoe(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_horseshoe(), 
#>     cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#> 
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#> 
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 211 variables
#>      phi[1]    phi[2]   phi[3]   phi[4]  phi[5]  phi[6]   phi[7]   phi[8]
#> 1    1.9047   0.10675  -3.0428  -1.4967  13.881  -0.021  -1.7332  -0.3254
#> 2   -1.1849  -0.14683   0.0659   1.3729   4.433   1.027   1.4436  -0.2240
#> 3    0.5951   0.32890   1.0851  -1.8757   0.787   1.256   2.2909   0.3748
#> 4    0.9921   0.18625   0.2809  -0.6060   1.338   0.625  -0.0329   0.0596
#> 5   -0.0508  -0.13928  -0.1525  -0.0089   1.492   1.026  -0.1330  -0.0765
#> 6    0.7467  -0.00729   0.2119  -0.1068   1.244   0.853   0.4273  -0.1003
#> 7    0.3877   0.37315   0.0221   0.2599   0.787   0.794   0.2481  -0.0294
#> 8    0.2332   0.50406   0.1071   0.3790   0.643   0.925   0.0102  -0.0212
#> 9    0.0456  -0.12203   0.0302   0.1127   0.225   0.914   0.1009   0.1686
#> 10   0.0165   0.02265  -0.0299  -0.0358   0.229   0.903  -0.3033  -0.0313
#> # ... with 10 more draws, and 203 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}

Normal-Gamma prior

(fit_ng <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ng(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_ng(), 
#>     cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#> 
#> BVHAR with Stochastic Volatility
#> Fitted by Metropolis-within-Gibbs
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#> 
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 184 variables
#>     phi[1]     phi[2]    phi[3]   phi[4]   phi[5]  phi[6]     phi[7]    phi[8]
#> 1    1.202  -0.130154  -0.44412  -1.7310   0.1257   0.658   3.46e-04  -0.03047
#> 2    0.619   0.167340   0.36261  -1.3824  -0.1771   0.731   3.93e-04   0.05016
#> 3    0.415   0.042126   0.02108   0.2520   0.0522   0.671  -4.02e-05   0.03148
#> 4    0.108   0.033015   0.06590   0.2362  -0.3805   0.678   5.09e-05   0.01909
#> 5    0.349   0.013670  -0.09707  -0.0881  -0.1075   0.518   1.34e-04  -0.00656
#> 6    0.373   0.026465  -0.06382  -0.4124   0.1256   0.976   3.29e-04   0.00163
#> 7    0.322   0.029662  -0.08880   0.0321  -0.7434   0.561  -4.25e-05  -0.00283
#> 8    0.314   0.029183   0.00239   0.2924  -0.2573   0.605   3.91e-05  -0.02124
#> 9    0.308   0.026041  -0.01243   0.4730  -0.1726   0.541  -6.47e-07   0.01404
#> 10   0.454  -0.000169  -0.04022   0.2197  -0.0313   0.620  -2.47e-06  -0.00761
#> # ... with 10 more draws, and 176 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}

Dirichlet-Laplace prior

(fit_dl <- vhar_bayes(etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_dl(), cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun"))
#> Call:
#> vhar_bayes(y = etf_train, num_chains = 2, num_iter = 20, bayes_spec = set_dl(), 
#>     cov_spec = set_sv(), include_mean = FALSE, minnesota = "longrun")
#> 
#> BVHAR with Stochastic Volatility
#> Fitted by Gibbs sampling
#> Number of chains: 2
#> Total number of iteration: 20
#> Number of burn-in: 10
#> ====================================================
#> 
#> Parameter Record:
#> # A draws_df: 10 iterations, 2 chains, and 178 variables
#>      phi[1]   phi[2]     phi[3]   phi[4]    phi[5]  phi[6]     phi[7]
#> 1    0.2632   0.1791   2.49e-03   0.4872  -0.00156  0.1455  -7.18e-05
#> 2    0.0848  -0.0244  -1.68e-03   0.2407   0.00170  0.0362   2.79e-04
#> 3    0.1294  -0.0485   9.04e-04   0.8641  -0.00592  0.0380   1.42e-04
#> 4   -0.2709  -0.0628   1.02e-04   1.1526   0.00883  0.0776  -1.19e-04
#> 5    0.1380   0.1746  -1.34e-04  -0.0250  -0.00772  0.0302   5.03e-05
#> 6    0.1348   0.1153  -8.01e-05   0.0613  -0.01438  0.0422  -2.54e-04
#> 7    0.1686   0.0739  -1.28e-04   0.5711   0.01369  0.0848   1.24e-04
#> 8   -0.2162   0.1849   1.25e-05   0.6660  -0.04205  0.1473  -9.35e-04
#> 9    0.4234   0.1615   1.34e-05   0.0454   0.02103  0.3921  -1.20e-03
#> 10   0.3035   0.2117  -2.71e-05   0.1038  -0.01199  0.3140   7.84e-04
#>        phi[8]
#> 1   -0.036299
#> 2    0.167970
#> 3    0.030604
#> 4   -0.076460
#> 5   -0.040128
#> 6    0.008823
#> 7   -0.009037
#> 8    0.003507
#> 9    0.000806
#> 10  -0.000761
#> # ... with 10 more draws, and 170 more variables
#> # ... hidden reserved variables {'.chain', '.iteration', '.draw'}

Bayesian visualization

autoplot() also provides Bayesian visualization. type = "trace" gives MCMC trace plot.

autoplot(fit_hs, type = "trace", regex_pars = "tau")

type = "dens" draws MCMC density plot.

autoplot(fit_hs, type = "dens", regex_pars = "tau")