using MortalityTables
using Turing
using DataFramesMeta
using MCMCChains
using LinearAlgebra
using CairoMakie
using StatsBase
30 Bayesian Mortality Modeling
Alec Loudenback
“After a year of intense mental struggle, however, [Arthur Bailey] realized to his consternation that actuarial sledgehammering worked. He even preferred [the Bayesian underpinnings of credibility theory] to the elegance of frequentism. He positively liked formulae that described ‘actual data. . . . I realized that the hard-shelled underwriters were recognizing certain facts of life neglected by the statistical theorists.’ He wanted to give more weight to a large volume of data than to the frequentists’ small sample; doing so felt surprisingly ‘logical and reasonable.’ He concluded that only a ‘suicidal’ actuary would use Fisher’s method of maximum likelihood, which assigned a zero probability to nonevents.” - Sharon Bertsch McGrayne, Excerpt From The Theory That Would Not Die
30.1 Chapter Overview
An example of using a Bayesian MCMC approach to fitting a mortality curve to sample data, with multi-level models and censored data.
30.2 Generating fake data
The problem of interest is to look at mortality rates, which are given in terms of exposures (whether or not a life experienced a death in a given year).
We’ll grab some example rates from an insurance table, which has a “selection” component: When someone enters observation, say at age 50, their mortality is path dependent (so for someone who started being observed at 50 will have a different risk/mortality rate at age 55 than someone who started being observed at 45).
Addtionally, there may be additional groups of interest, such as:
- high/medium/low risk classification
- sex
- group (e.g. company, data source, etc.)
- type of insurance product offered
The example data will start with only the risk classification above.
= 10_000
n = map(1:n) do i
inforce
(=rand(30:70),
issue_age=rand(1:3),
risk_level
)
end
10000-element Vector{@NamedTuple{issue_age::Int64, risk_level::Int64}}:
(issue_age = 64, risk_level = 3)
(issue_age = 56, risk_level = 2)
(issue_age = 47, risk_level = 1)
(issue_age = 41, risk_level = 1)
(issue_age = 39, risk_level = 2)
(issue_age = 56, risk_level = 2)
(issue_age = 33, risk_level = 2)
(issue_age = 36, risk_level = 3)
(issue_age = 67, risk_level = 2)
(issue_age = 30, risk_level = 1)
⋮
(issue_age = 33, risk_level = 1)
(issue_age = 43, risk_level = 1)
(issue_age = 44, risk_level = 1)
(issue_age = 59, risk_level = 3)
(issue_age = 66, risk_level = 1)
(issue_age = 46, risk_level = 1)
(issue_age = 64, risk_level = 2)
(issue_age = 34, risk_level = 3)
(issue_age = 43, risk_level = 1)
= MortalityTables.table("2001 VBT Residual Standard Select and Ultimate - Male Nonsmoker, ANB")
base_table
function tabular_mortality(params, issue_age, att_age, risk_level)
= params.ultimate[att_age]
q if risk_level == 1
*= 0.7
q elseif risk_level == 2
= q
q else
*= 1.5
q end
end
tabular_mortality (generic function with 1 method)
function model_outcomes(inforce, assumption, assumption_params; n_years=5)
= map(inforce) do pol
outcomes = 1
alive = map(1:n_years) do t
sim = pol.issue_age + t - 1
att_age = assumption(
q
assumption_params,
pol.issue_age,
att_age,
pol.risk_level
)if rand() < q
= (att_age=att_age, exposures=alive, death=1)
out = 0
alive
outelse
=att_age, exposures=alive, death=0)
(att_ageend
end
filter!(x -> x.exposures == 1, sim)
end
= DataFrame(inforce)
df
= outcomes
df.outcomes = flatten(df, :outcomes)
df
= [x.att_age for x in df.outcomes]
df.att_age = [x.death for x in df.outcomes]
df.death = [x.exposures for x in df.outcomes]
df.exposures select!(df, Not(:outcomes))
end
= model_outcomes(inforce, tabular_mortality, base_table)
exposures = combine(groupby(exposures, [:issue_age, :att_age])) do subdf
data =nrow(subdf),
(exposures=sum(subdf.death),
deaths=sum(subdf.death) / nrow(subdf))
fractionend
= combine(groupby(exposures, [:issue_age, :att_age, :risk_level])) do subdf
data2 =nrow(subdf),
(exposures=sum(subdf.death),
deaths=sum(subdf.death) / nrow(subdf))
fractionend
Row | issue_age | att_age | risk_level | exposures | deaths | fraction |
---|---|---|---|---|---|---|
Int64 | Int64 | Int64 | Int64 | Int64 | Float64 | |
1 | 30 | 30 | 1 | 93 | 0 | 0.0 |
2 | 30 | 30 | 2 | 79 | 0 | 0.0 |
3 | 30 | 30 | 3 | 71 | 0 | 0.0 |
4 | 30 | 31 | 1 | 93 | 0 | 0.0 |
5 | 30 | 31 | 2 | 79 | 0 | 0.0 |
6 | 30 | 31 | 3 | 71 | 0 | 0.0 |
7 | 30 | 32 | 1 | 93 | 0 | 0.0 |
8 | 30 | 32 | 2 | 79 | 0 | 0.0 |
9 | 30 | 32 | 3 | 71 | 0 | 0.0 |
10 | 30 | 33 | 1 | 93 | 0 | 0.0 |
11 | 30 | 33 | 2 | 79 | 0 | 0.0 |
12 | 30 | 33 | 3 | 71 | 1 | 0.0140845 |
13 | 30 | 34 | 1 | 93 | 0 | 0.0 |
⋮ | ⋮ | ⋮ | ⋮ | ⋮ | ⋮ | ⋮ |
604 | 70 | 71 | 1 | 84 | 2 | 0.0238095 |
605 | 70 | 71 | 2 | 82 | 6 | 0.0731707 |
606 | 70 | 71 | 3 | 93 | 3 | 0.0322581 |
607 | 70 | 72 | 1 | 82 | 0 | 0.0 |
608 | 70 | 72 | 2 | 76 | 1 | 0.0131579 |
609 | 70 | 72 | 3 | 90 | 5 | 0.0555556 |
610 | 70 | 73 | 1 | 82 | 3 | 0.0365854 |
611 | 70 | 73 | 2 | 75 | 2 | 0.0266667 |
612 | 70 | 73 | 3 | 85 | 2 | 0.0235294 |
613 | 70 | 74 | 1 | 79 | 3 | 0.0379747 |
614 | 70 | 74 | 2 | 73 | 3 | 0.0410959 |
615 | 70 | 74 | 3 | 83 | 4 | 0.0481928 |
30.3 1: A single binomial parameter model
Estimate \(q\), the average mortality rate, not accounting for any variation within the population/sample. Our model is defines as:
\[ q ~ Beta(1,1) p(\text{death}) ~ \text{Binomial}(q) \]
@model function mortality(data, deaths)
~ Beta(1, 1)
q for i = 1:nrow(data)
~ Binomial(data.exposures[i], q)
deaths[i] end
end
= mortality(data, data.deaths) m1
DynamicPPL.Model{typeof(mortality), (:data, :deaths), (), (), Tuple{DataFrame, Vector{Int64}}, Tuple{}, DynamicPPL.DefaultContext}(mortality, (data = 205×5 DataFrame Row │ issue_age att_age exposures deaths fraction │ Int64 Int64 Int64 Int64 Float64 ─────┼─────────────────────────────────────────────────── 1 │ 30 30 243 0 0.0 2 │ 30 31 243 0 0.0 3 │ 30 32 243 0 0.0 4 │ 30 33 243 1 0.00411523 5 │ 30 34 242 0 0.0 6 │ 31 31 233 0 0.0 7 │ 31 32 233 0 0.0 8 │ 31 33 233 0 0.0 ⋮ │ ⋮ ⋮ ⋮ ⋮ ⋮ 199 │ 69 72 231 7 0.030303 200 │ 69 73 224 3 0.0133929 201 │ 70 70 264 5 0.0189394 202 │ 70 71 259 11 0.042471 203 │ 70 72 248 6 0.0241935 204 │ 70 73 242 7 0.0289256 205 │ 70 74 235 10 0.0425532 190 rows omitted, deaths = [0, 0, 0, 1, 0, 0, 0, 0, 0, 0 … 9, 5, 9, 7, 3, 5, 11, 6, 7, 10]), NamedTuple(), DynamicPPL.DefaultContext())
30.3.1 Sampling from the posterior
We use a No-U-Turn-Sampler (NUTS) technique to sample multiple chains at once:
= 4
num_chains = sample(m1, NUTS(), MCMCThreads(), 400, num_chains) chain
Chains MCMC chain (400×13×4 Array{Float64, 3}): Iterations = 201:1:600 Number of chains = 4 Samples per chain = 400 Wall duration = 2.07 seconds Compute duration = 8.22 seconds parameters = q internals = lp, n_steps, is_accept, acceptance_rate, log_density, hamiltonian_energy, hamiltonian_energy_error, max_hamiltonian_energy_error, tree_depth, numerical_error, step_size, nom_step_size Summary Statistics parameters mean std mcse ess_bulk ess_tail rhat ⋯ Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯ q 0.0078 0.0004 0.0000 622.1803 1164.2892 1.0037 ⋯ 1 column omitted Quantiles parameters 2.5% 25.0% 50.0% 75.0% 97.5% Symbol Float64 Float64 Float64 Float64 Float64 q 0.0070 0.0075 0.0077 0.0080 0.0085
Here, we have asked for the outcomes to be modeled via a single parameter for the population. We see that the posterior distribution of \(q\) is very close to the overall population mortality rate:
sum(data.deaths) / sum(data.exposures)
0.007750994237480724
However, We can see that the sampling of possible posterior parameters doesn’t really fit the data very well since our model was so simplified. The lines represent the posterior binomial probability.
This is saying that for the observed data, if there really is just a single probability p
that governs the true process that came up with the data, there’s a pretty narrow range of values it could possibly be:
let
= sqrt.(data.exposures) / 2
data_weight = Figure(title="Parametric Bayesian Mortality"
f
)= Axis(f[1, 1],
ax ="age",
xlabel="mortality rate",
ylabel=(nothing, nothing, -0.01, 0.10),
limits
)scatter!(ax,
data.att_age,
data.fraction,=data_weight,
markersize=(:blue, 0.5),
color="Experience data point (size indicates relative exposure quantity)",)
label
# show n samples from the posterior plotted on the graph
= 300
n = sort!(unique(data.att_age))
ages
= sample(chain, n)[:q]
q_posterior
for i in 1:n
hlines!(ax, [q_posterior[i]], color=(:grey, 0.1))
end
= Float64[]
sim05 = Float64[]
sim95 for r in eachrow(data)
= map(1:n) do i
outcomes rand(Binomial(r.exposures, q_posterior[i]), 500)
end
push!(sim05, quantile(Iterators.flatten(outcomes), 0.05) / r.exposures)
push!(sim95, quantile(Iterators.flatten(outcomes), 0.95) / r.exposures)
end
fend
let
= 300
n = sample(chain, n)[:q]
q_posterior
end
2-dimensional AxisArray{Float64,2,...} with axes:
:iter, 1:300
:chain, 1:1
And data, a 300×1 Matrix{Float64}:
0.008239504674789839
0.008016095645438703
0.007403851968467612
0.007623915943098581
0.007669275858358716
0.007950749810407665
0.007806347482436929
0.008126267749599294
0.008567127642336405
0.007267505832110341
⋮
0.00806618399067977
0.007390293557548343
0.007769305315884761
0.007944377201984263
0.008087919354888375
0.007607295149704525
0.007320275713880411
0.008577543361475867
0.007920714755584887
30.4 2. Parametric model
In this example, we utilize a MakehamBeard parameterization because it’s already very similar in form to a logistic function. This is important because our desired output is a probability (ie the probability of a death at a given age), so the value must be constrained to be in the interval between zero and one.
The prior values for a
,b
,c
, and k
are chosen to constrain the hazard (mortality) rate to be between zero and one.
This isn’t an ideal parameterization (e.g. we aren’t including information about the select underwriting period), but is an example of utilizing Bayesian techniques on life experience data. ”
@model function mortality2(data, deaths)
~ Exponential(0.1)
a ~ Exponential(0.1)
b = 0.0
c ~ truncated(Exponential(1), 1, Inf)
k
# use the variables to create a parametric mortality model
= MortalityTables.MakehamBeard(; a, b, c, k)
m
# loop through the rows of the dataframe to let Turing observe the data
# and how consistent the parameters are with the data
for i = 1:nrow(data)
= data.att_age[i]
age = MortalityTables.hazard(m, age)
q ~ Binomial(data.exposures[i], q)
deaths[i] end
end
mortality2 (generic function with 2 methods)
We combine the model with the data and sample from the posterior using a similar call as before:
= mortality2(data, data.deaths)
m2
= sample(m2, NUTS(), MCMCThreads(), 400, num_chains) chain2
Chains MCMC chain (400×15×4 Array{Float64, 3}): Iterations = 201:1:600 Number of chains = 4 Samples per chain = 400 Wall duration = 75.26 seconds Compute duration = 94.01 seconds parameters = a, b, k internals = lp, n_steps, is_accept, acceptance_rate, log_density, hamiltonian_energy, hamiltonian_energy_error, max_hamiltonian_energy_error, tree_depth, numerical_error, step_size, nom_step_size Summary Statistics parameters mean std mcse ess_bulk ess_tail rhat e ⋯ Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯ a 0.7711 1.3360 0.6613 7.0294 16.1783 1.5873 ⋯ b 1.3286 2.1300 1.0543 7.3498 37.2565 1.5368 ⋯ k 2.0395 0.8958 0.0319 603.8296 556.1799 1.5879 ⋯ 1 column omitted Quantiles parameters 2.5% 25.0% 50.0% 75.0% 97.5% Symbol Float64 Float64 Float64 Float64 Float64 a 0.0000 0.0000 0.0000 0.7712 3.0846 b 0.0872 0.0964 0.1020 1.3441 5.0168 k 1.0517 1.4151 1.9909 2.2066 4.5593
30.4.1 Plotting samples from the posterior
We can see that the sampling of possible posterior parameters fits the data well:
let
= sqrt.(data.exposures) / 2
data_weight
= scatter(
p
data.att_age,
data.fraction,=data_weight,
markersize=0.5,
alpha="Experience data point (size indicates relative exposure quantity)",
label=(
axis="age",
xlabel=(nothing, nothing, -0.01, 0.10),
limits="mortality rate",
ylabel="Parametric Bayesian Mortality"
title
)
)
# show n samples from the posterior plotted on the graph
= 300
n = sort!(unique(data.att_age))
ages
for i in 1:n
= sample(chain2, 1)
s = only(s[:a])
a = only(s[:b])
b = only(s[:k])
k = 0
c = MortalityTables.MakehamBeard(; a, b, c, k)
m lines!(ages, age -> MortalityTables.hazard(m, age), alpha=0.1, label="")
end
pend
let
= sqrt.(data.exposures) / 2
data_weight = Figure(title="Parametric Bayesian Mortality"
f
)= Axis(f[1, 1],
ax ="age",
xlabel="mortality rate",
ylabel=(nothing, nothing, -0.01, 0.10),
limits
)scatter!(ax,
data.att_age,
data.fraction,=data_weight,
markersize=(:blue, 0.5),
color="Experience data point (size indicates relative exposure quantity)",)
label
# show n samples from the posterior plotted on the graph
= 300
n = sort!(unique(data.att_age))
ages
for i in 1:n
= sample(chain2, 1)
s = only(s[:a])
a = only(s[:b])
b = only(s[:k])
k = 0
c = MortalityTables.MakehamBeard(; a, b, c, k)
m = MortalityTables.hazard.(m, ages)
qs lines!(ax, ages, qs, color=(:grey, 0.1))
end
fend
Recall that the lines are not plotting the possible outcomes of the claims rates, but the mean claims rate for the given age.
30.5 3. Multi-level model
This model extends the prior to create a multi-level model. Each risk class (risk_level
) gets its own \(a\) paramater in the MakhamBeard
model. The prior for \(a_i\) is determined by the hyper-parameter \(\bar{a}\).
@model function mortality3(data, deaths)
= length(levels(data.risk_level))
risk_levels ~ Exponential(0.1)
b ~ Exponential(0.1)
ā ~ filldist(Exponential(ā), risk_levels)
a = 0
c ~ truncated(Exponential(1), 1, Inf)
k
# use the variables to create a parametric mortality model
# loop through the rows of the dataframe to let Turing observe the data
# and how consistent the parameters are with the data
for i = 1:nrow(data)
= data.risk_level[i]
risk
= MortalityTables.MakehamBeard(; a=a[risk], b, c, k)
m = data.att_age[i]
age = MortalityTables.hazard(m, age)
q ~ Binomial(data.exposures[i], q)
deaths[i] end
end
= mortality3(data2, data2.deaths)
m3
= sample(m3, NUTS(), 1000)
chain3
summarize(chain3)
parameters mean std mcse ess_bulk ess_tail rhat e ⋯ Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯ b 0.0993 0.0061 0.0004 270.9465 232.9463 1.0046 ⋯ ā 0.0001 0.0002 0.0000 481.0640 389.5900 1.0007 ⋯ a[1] 0.0000 0.0000 0.0000 273.9402 277.4559 1.0027 ⋯ a[2] 0.0000 0.0000 0.0000 285.8069 281.7313 1.0019 ⋯ a[3] 0.0000 0.0000 0.0000 279.6675 325.5816 1.0029 ⋯ k 2.0396 1.0407 0.0410 393.0509 270.1086 1.0002 ⋯ 1 column omitted
let data = data2
= sqrt.(data.exposures)
data_weight = data.risk_level
color_i = CairoMakie.Makie.wong_colors()
cm
= scatter(
p, ax, _
data.att_age,
data.fraction,=data_weight,
markersize=0.5,
alpha=[(CairoMakie.Makie.wong_colors()[c], 0.7) for c in color_i],
color=CairoMakie.Makie.wong_colors(),
colormap="Experience data point (size indicates relative exposure quantity)",
label=(
axis="age",
xlabel=(nothing, nothing, -0.01, 0.10),
limits="mortality rate",
ylabel="Parametric Bayesian Mortality"
title
)
)
# show n samples from the posterior plotted on the graph
= 100
n
= sort!(unique(data.att_age))
ages for r in 1:3
for i in 1:n
= sample(chain3, 1)
s = only(s[Symbol("a[$r]")])
a = only(s[:b])
b = only(s[:k])
k = 0
c = MortalityTables.MakehamBeard(; a, b, c, k)
m lines!(ages, age -> MortalityTables.hazard(m, age), label="risk level $r", alpha=0.2, color=(CairoMakie.Makie.wong_colors()[r], 0.2))
end
end
axislegend(ax, merge=true)
pend
Again, the lines are not plotting the possible outcomes of the claims rates, but the mean claims rate for the given age and risk class.
30.6 Handling non-unit exposures
The key is to use the Poisson distribution, which is a continuous approximation to the Binomial distribution:
@model function mortality4(data, deaths)
= length(levels(data.risk_level))
risk_levels ~ Exponential(0.1)
b ~ Exponential(0.1)
ā ~ filldist(Exponential(ā), risk_levels)
a ~ Beta(4, 18)
c ~ truncated(Exponential(1), 1, Inf)
k
# use the variables to create a parametric mortality model
# loop through the rows of the dataframe to let Turing observe the data
# and how consistent the parameters are with the data
for i = 1:nrow(data)
= data.risk_level[i]
risk
= MortalityTables.MakehamBeard(; a=a[risk], b, c, k)
m = data.att_age[i]
age = MortalityTables.hazard(m, age)
q ~ Poisson(data.exposures[i] * q)
deaths[i] end
end
= mortality4(data2, data2.deaths)
m4
= sample(m4, NUTS(), 1000) chain4
Chains MCMC chain (1000×19×1 Array{Float64, 3}): Iterations = 501:1:1500 Number of chains = 1 Samples per chain = 1000 Wall duration = 28.81 seconds Compute duration = 28.81 seconds parameters = b, ā, a[1], a[2], a[3], c, k internals = lp, n_steps, is_accept, acceptance_rate, log_density, hamiltonian_energy, hamiltonian_energy_error, max_hamiltonian_energy_error, tree_depth, numerical_error, step_size, nom_step_size Summary Statistics parameters mean std mcse ess_bulk ess_tail rhat e ⋯ Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯ b 0.1094 0.0087 0.0006 237.8430 273.1789 1.0020 ⋯ ā 0.0000 0.0001 0.0000 319.6064 466.7254 0.9990 ⋯ a[1] 0.0000 0.0000 0.0000 233.0641 299.3879 1.0004 ⋯ a[2] 0.0000 0.0000 0.0000 235.3657 293.8352 1.0034 ⋯ a[3] 0.0000 0.0000 0.0000 239.0613 292.8381 1.0002 ⋯ c 0.0007 0.0003 0.0000 377.3150 436.0285 1.0002 ⋯ k 2.1617 1.0938 0.0406 584.9246 553.0357 0.9993 ⋯ 1 column omitted Quantiles parameters 2.5% 25.0% 50.0% 75.0% 97.5% Symbol Float64 Float64 Float64 Float64 Float64 b 0.0943 0.1033 0.1088 0.1149 0.1273 ā 0.0000 0.0000 0.0000 0.0000 0.0002 a[1] 0.0000 0.0000 0.0000 0.0000 0.0000 a[2] 0.0000 0.0000 0.0000 0.0000 0.0000 a[3] 0.0000 0.0000 0.0000 0.0000 0.0000 c 0.0002 0.0005 0.0006 0.0009 0.0013 k 1.0561 1.3681 1.8131 2.6336 5.0902
= [mean(chain4[Symbol("a[$f]")]) for f in 1:3]
risk_factors4
./ risk_factors4[2]
risk_factors4
let data = data2
= sqrt.(data.exposures) / 2
data_weight = data.risk_level
color_i
= scatter(
p, ax, _
data.att_age,
data.fraction,=data_weight,
markersize=0.5,
alpha=color_i,
color="Experience data point (size indicates relative exposure quantity)",
label=(xlabel="age",
axis=(nothing, nothing, -0.01, 0.10),
limits="mortality rate",
ylabel="Parametric Bayesian Mortality"
title
)
)
# show n samples from the posterior plotted on the graph
= 100
n
= sort!(unique(data.att_age))
ages for r in 1:3
for i in 1:n
= sample(chain4, 1)
s = only(s[Symbol("a[$r]")])
a = only(s[:b])
b = only(s[:k])
k = 0
c = MortalityTables.MakehamBeard(; a, b, c, k)
m lines!(ages, age -> MortalityTables.hazard(m, age), label="risk level $r", alpha=0.2, color=(CairoMakie.Makie.wong_colors()[r], 0.2))
end
end
axislegend(ax, merge=true)
pend
30.7 Model Predictions
We can generate predictive estimates by passing a vector of missing
in place of the outcome variables and then calling predict
.
We get a table of values where each row is the the prediction implied by the corresponding chain sample, and the columns are the predicted value for each of the outcomes in our original dataset.
= predict(mortality4(data2, fill(missing, length(data2.deaths))), chain4) preds
Chains MCMC chain (1000×615×1 Array{Float64, 3}): Iterations = 1:1:1000 Number of chains = 1 Samples per chain = 1000 parameters = deaths[1], deaths[2], deaths[3], deaths[4], deaths[5], deaths[6], deaths[7], deaths[8], deaths[9], deaths[10], deaths[11], deaths[12], deaths[13], deaths[14], deaths[15], deaths[16], deaths[17], deaths[18], deaths[19], deaths[20], deaths[21], deaths[22], deaths[23], deaths[24], deaths[25], deaths[26], deaths[27], deaths[28], deaths[29], deaths[30], deaths[31], deaths[32], deaths[33], deaths[34], deaths[35], deaths[36], deaths[37], deaths[38], deaths[39], deaths[40], deaths[41], deaths[42], deaths[43], deaths[44], deaths[45], deaths[46], deaths[47], deaths[48], deaths[49], deaths[50], deaths[51], deaths[52], deaths[53], deaths[54], deaths[55], deaths[56], deaths[57], deaths[58], deaths[59], deaths[60], deaths[61], deaths[62], deaths[63], deaths[64], deaths[65], deaths[66], deaths[67], deaths[68], deaths[69], deaths[70], deaths[71], deaths[72], deaths[73], deaths[74], deaths[75], deaths[76], deaths[77], deaths[78], deaths[79], deaths[80], deaths[81], deaths[82], deaths[83], deaths[84], deaths[85], deaths[86], deaths[87], deaths[88], deaths[89], deaths[90], deaths[91], deaths[92], deaths[93], deaths[94], deaths[95], deaths[96], deaths[97], deaths[98], deaths[99], deaths[100], deaths[101], deaths[102], deaths[103], deaths[104], deaths[105], deaths[106], deaths[107], deaths[108], deaths[109], deaths[110], deaths[111], deaths[112], deaths[113], deaths[114], deaths[115], deaths[116], deaths[117], deaths[118], deaths[119], deaths[120], deaths[121], deaths[122], deaths[123], deaths[124], deaths[125], deaths[126], deaths[127], deaths[128], deaths[129], deaths[130], deaths[131], deaths[132], deaths[133], deaths[134], deaths[135], deaths[136], deaths[137], deaths[138], deaths[139], deaths[140], deaths[141], deaths[142], deaths[143], deaths[144], deaths[145], deaths[146], deaths[147], deaths[148], deaths[149], deaths[150], deaths[151], deaths[152], deaths[153], deaths[154], deaths[155], deaths[156], deaths[157], deaths[158], deaths[159], deaths[160], deaths[161], deaths[162], deaths[163], deaths[164], deaths[165], deaths[166], deaths[167], deaths[168], deaths[169], deaths[170], deaths[171], deaths[172], deaths[173], deaths[174], deaths[175], deaths[176], deaths[177], deaths[178], deaths[179], deaths[180], deaths[181], deaths[182], deaths[183], deaths[184], deaths[185], deaths[186], deaths[187], deaths[188], deaths[189], deaths[190], deaths[191], deaths[192], deaths[193], deaths[194], deaths[195], deaths[196], deaths[197], deaths[198], deaths[199], deaths[200], deaths[201], deaths[202], deaths[203], deaths[204], deaths[205], deaths[206], deaths[207], deaths[208], deaths[209], deaths[210], deaths[211], deaths[212], deaths[213], deaths[214], deaths[215], deaths[216], deaths[217], deaths[218], deaths[219], deaths[220], deaths[221], deaths[222], deaths[223], deaths[224], deaths[225], deaths[226], deaths[227], deaths[228], deaths[229], deaths[230], deaths[231], deaths[232], deaths[233], deaths[234], deaths[235], deaths[236], deaths[237], deaths[238], deaths[239], deaths[240], deaths[241], deaths[242], deaths[243], deaths[244], deaths[245], deaths[246], deaths[247], deaths[248], deaths[249], deaths[250], deaths[251], deaths[252], deaths[253], deaths[254], deaths[255], deaths[256], deaths[257], deaths[258], deaths[259], deaths[260], deaths[261], deaths[262], deaths[263], deaths[264], deaths[265], deaths[266], deaths[267], deaths[268], deaths[269], deaths[270], deaths[271], deaths[272], deaths[273], deaths[274], deaths[275], deaths[276], deaths[277], deaths[278], deaths[279], deaths[280], deaths[281], deaths[282], deaths[283], deaths[284], deaths[285], deaths[286], deaths[287], deaths[288], deaths[289], deaths[290], deaths[291], deaths[292], deaths[293], deaths[294], deaths[295], deaths[296], deaths[297], deaths[298], deaths[299], deaths[300], deaths[301], deaths[302], deaths[303], deaths[304], deaths[305], deaths[306], deaths[307], deaths[308], deaths[309], deaths[310], deaths[311], deaths[312], deaths[313], deaths[314], deaths[315], deaths[316], deaths[317], deaths[318], deaths[319], deaths[320], deaths[321], deaths[322], deaths[323], deaths[324], deaths[325], deaths[326], deaths[327], deaths[328], deaths[329], deaths[330], deaths[331], deaths[332], deaths[333], deaths[334], deaths[335], deaths[336], deaths[337], deaths[338], deaths[339], deaths[340], deaths[341], deaths[342], deaths[343], deaths[344], deaths[345], deaths[346], deaths[347], deaths[348], deaths[349], deaths[350], deaths[351], deaths[352], deaths[353], deaths[354], deaths[355], deaths[356], deaths[357], deaths[358], deaths[359], deaths[360], deaths[361], deaths[362], deaths[363], deaths[364], deaths[365], deaths[366], deaths[367], deaths[368], deaths[369], deaths[370], deaths[371], deaths[372], deaths[373], deaths[374], deaths[375], deaths[376], deaths[377], deaths[378], deaths[379], deaths[380], deaths[381], deaths[382], deaths[383], deaths[384], deaths[385], deaths[386], deaths[387], deaths[388], deaths[389], deaths[390], deaths[391], deaths[392], deaths[393], deaths[394], deaths[395], deaths[396], deaths[397], deaths[398], deaths[399], deaths[400], deaths[401], deaths[402], deaths[403], deaths[404], deaths[405], deaths[406], deaths[407], deaths[408], deaths[409], deaths[410], deaths[411], deaths[412], deaths[413], deaths[414], deaths[415], deaths[416], deaths[417], deaths[418], deaths[419], deaths[420], deaths[421], deaths[422], deaths[423], deaths[424], deaths[425], deaths[426], deaths[427], deaths[428], deaths[429], deaths[430], deaths[431], deaths[432], deaths[433], deaths[434], deaths[435], deaths[436], deaths[437], deaths[438], deaths[439], deaths[440], deaths[441], deaths[442], deaths[443], deaths[444], deaths[445], deaths[446], deaths[447], deaths[448], deaths[449], deaths[450], deaths[451], deaths[452], deaths[453], deaths[454], deaths[455], deaths[456], deaths[457], deaths[458], deaths[459], deaths[460], deaths[461], deaths[462], deaths[463], deaths[464], deaths[465], deaths[466], deaths[467], deaths[468], deaths[469], deaths[470], deaths[471], deaths[472], deaths[473], deaths[474], deaths[475], deaths[476], deaths[477], deaths[478], deaths[479], deaths[480], deaths[481], deaths[482], deaths[483], deaths[484], deaths[485], deaths[486], deaths[487], deaths[488], deaths[489], deaths[490], deaths[491], deaths[492], deaths[493], deaths[494], deaths[495], deaths[496], deaths[497], deaths[498], deaths[499], deaths[500], deaths[501], deaths[502], deaths[503], deaths[504], deaths[505], deaths[506], deaths[507], deaths[508], deaths[509], deaths[510], deaths[511], deaths[512], deaths[513], deaths[514], deaths[515], deaths[516], deaths[517], deaths[518], deaths[519], deaths[520], deaths[521], deaths[522], deaths[523], deaths[524], deaths[525], deaths[526], deaths[527], deaths[528], deaths[529], deaths[530], deaths[531], deaths[532], deaths[533], deaths[534], deaths[535], deaths[536], deaths[537], deaths[538], deaths[539], deaths[540], deaths[541], deaths[542], deaths[543], deaths[544], deaths[545], deaths[546], deaths[547], deaths[548], deaths[549], deaths[550], deaths[551], deaths[552], deaths[553], deaths[554], deaths[555], deaths[556], deaths[557], deaths[558], deaths[559], deaths[560], deaths[561], deaths[562], deaths[563], deaths[564], deaths[565], deaths[566], deaths[567], deaths[568], deaths[569], deaths[570], deaths[571], deaths[572], deaths[573], deaths[574], deaths[575], deaths[576], deaths[577], deaths[578], deaths[579], deaths[580], deaths[581], deaths[582], deaths[583], deaths[584], deaths[585], deaths[586], deaths[587], deaths[588], deaths[589], deaths[590], deaths[591], deaths[592], deaths[593], deaths[594], deaths[595], deaths[596], deaths[597], deaths[598], deaths[599], deaths[600], deaths[601], deaths[602], deaths[603], deaths[604], deaths[605], deaths[606], deaths[607], deaths[608], deaths[609], deaths[610], deaths[611], deaths[612], deaths[613], deaths[614], deaths[615] internals = Summary Statistics parameters mean std mcse ess_bulk ess_tail rhat ⋯ Symbol Float64 Float64 Float64 Float64 Float64 Float64 ⋯ deaths[1] 0.0900 0.3033 0.0094 1013.0749 989.0565 0.9994 ⋯ deaths[2] 0.0730 0.2928 0.0088 1119.8522 1011.0226 1.0009 ⋯ deaths[3] 0.0870 0.2924 0.0088 1093.7167 1004.0241 0.9998 ⋯ deaths[4] 0.0910 0.3013 0.0106 777.1510 740.6722 0.9990 ⋯ deaths[5] 0.0810 0.2802 0.0087 1036.4805 1008.0890 1.0001 ⋯ deaths[6] 0.0930 0.3040 0.0092 1084.7584 1012.2033 0.9999 ⋯ deaths[7] 0.1010 0.3113 0.0108 830.5045 837.9896 1.0020 ⋯ deaths[8] 0.0900 0.2966 0.0091 1048.2790 1004.0241 1.0010 ⋯ deaths[9] 0.1030 0.3233 0.0125 655.8523 627.5553 0.9991 ⋯ deaths[10] 0.0800 0.2893 0.0092 990.8771 993.5349 0.9991 ⋯ deaths[11] 0.0950 0.3067 0.0098 984.6381 987.1671 0.9990 ⋯ deaths[12] 0.1050 0.3288 0.0105 976.2048 977.9657 0.9990 ⋯ deaths[13] 0.0910 0.2980 0.0103 817.7579 789.0939 0.9999 ⋯ deaths[14] 0.0980 0.3041 0.0095 1031.4175 1008.0890 0.9994 ⋯ deaths[15] 0.0940 0.3182 0.0105 903.1610 898.3565 0.9994 ⋯ deaths[16] 0.0770 0.2778 0.0099 781.5725 769.2176 0.9996 ⋯ deaths[17] 0.0740 0.2694 0.0086 995.2880 1004.0241 0.9993 ⋯ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ ⋱ 1 column and 598 rows omitted Quantiles parameters 2.5% 25.0% 50.0% 75.0% 97.5% Symbol Float64 Float64 Float64 Float64 Float64 deaths[1] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[2] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[3] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[4] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[5] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[6] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[7] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[8] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[9] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[10] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[11] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[12] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[13] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[14] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[15] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[16] 0.0000 0.0000 0.0000 0.0000 1.0000 deaths[17] 0.0000 0.0000 0.0000 0.0000 1.0000 ⋮ ⋮ ⋮ ⋮ ⋮ ⋮ 598 rows omitted