#using Distributed
#@everywhere using MambaModels
using MambaModels, MCMCChains
Data
globe_toss = Dict{Symbol, Any}(
:w => [6, 7, 5, 6, 6],
:n => [9, 9, 9, 9, 9]
)
globe_toss[:N] = length(globe_toss[:w]);
5
Model Specification
model = Model(
w = Stochastic(1,
(n, p, N) ->
UnivariateDistribution[Binomial(n[i], p) for i in 1:N],
false
),
p = Stochastic(() -> Beta(1, 1))
);
Object of type "Model"
-------------------------------------------------------------------------------
w:
An unmonitored node of type "0-element ArrayStochastic{1}"
Float64[]
-------------------------------------------------------------------------------
p:
A monitored node of type "ScalarStochastic"
NaN
Initial Values
inits = [
Dict(:w => globe_toss[:w], :n => globe_toss[:n], :p => 0.5),
Dict(:w => globe_toss[:w], :n => globe_toss[:n], :p => rand(Beta(1, 1)))
];
2-element Array{Dict{Symbol,Any},1}:
Dict(:w=>[6, 7, 5, 6, 6],:p=>0.5,:n=>[9, 9, 9, 9, 9])
Dict(:w=>[6, 7, 5, 6, 6],:p=>0.202661,:n=>[9, 9, 9, 9, 9])
Sampling Scheme
scheme = [NUTS(:p)]
setsamplers!(model, scheme);
Object of type "Model"
-------------------------------------------------------------------------------
w:
An unmonitored node of type "0-element ArrayStochastic{1}"
Float64[]
-------------------------------------------------------------------------------
p:
A monitored node of type "ScalarStochastic"
NaN
MCMC Simulations
chn = mcmc(model, globe_toss, inits, 10000, burnin=2500, thin=1, chains=2);
MCMC Simulation of 10000 Iterations x 2 Chains...
Chain 1: 0% [0:02:03 of 0:02:03 remaining]
Chain 1: 10% [0:00:02 of 0:00:02 remaining]
Chain 1: 20% [0:00:01 of 0:00:01 remaining]
Chain 1: 30% [0:00:01 of 0:00:01 remaining]
Chain 1: 40% [0:00:01 of 0:00:01 remaining]
Chain 1: 50% [0:00:01 of 0:00:01 remaining]
Chain 1: 60% [0:00:00 of 0:00:01 remaining]
Chain 1: 70% [0:00:00 of 0:00:01 remaining]
Chain 1: 80% [0:00:00 of 0:00:01 remaining]
Chain 1: 90% [0:00:00 of 0:00:01 remaining]
Chain 1: 100% [0:00:00 of 0:00:01 remaining]
Chain 2: 0% [0:00:01 of 0:00:01 remaining]
Chain 2: 10% [0:00:01 of 0:00:01 remaining]
Chain 2: 20% [0:00:01 of 0:00:01 remaining]
Chain 2: 30% [0:00:01 of 0:00:01 remaining]
Chain 2: 40% [0:00:01 of 0:00:01 remaining]
Chain 2: 50% [0:00:00 of 0:00:01 remaining]
Chain 2: 60% [0:00:00 of 0:00:01 remaining]
Chain 2: 70% [0:00:00 of 0:00:01 remaining]
Chain 2: 80% [0:00:00 of 0:00:01 remaining]
Chain 2: 90% [0:00:00 of 0:00:01 remaining]
Chain 2: 100% [0:00:00 of 0:00:01 remaining]
Object of type "ModelChains"
Iterations = 2501:10000
Thinning interval = 1
Chains = 1,2
Samples per chain = 7500
[0.650524; 0.650524; … ; 0.818101; 0.469521]
[0.64673; 0.684643; … ; 0.673103; 0.673103]
Describe draws
describe(chn)
Iterations = 2501:10000
Thinning interval = 1
Chains = 1,2
Samples per chain = 7500
Empirical Posterior Estimates:
Mean SD Naive SE MCSE ESS
p 0.66071519 0.068371674 0.0005582524 0.0006200058 7500
Quantiles:
2.5% 25.0% 50.0% 75.0% 97.5%
p 0.5198508 0.6144447 0.6629172 0.70996386 0.78285723
Convert to MCMCChains.Chains object
chn2 = MCMCChains.Chains(chn.value, Symbol.(chn.names))
Object of type Chains, with data of type 7500×1×2 Array{Union{Missing, Float64},3}
Log evidence = 0.0
Iterations = 1:7500
Thinning interval = 1
Chains = Chain1, Chain2
Samples per chain = 7500
parameters = p
parameters
Mean SD Naive SE MCSE ESS
p 0.6607 0.0684 0.0006 0.0006 7500
Describe the MCMCChains
MCMCChains.describe(chn2)
Log evidence = 0.0
Iterations = 1:7500
Thinning interval = 1
Chains = Chain1, Chain2
Samples per chain = 7500
parameters = p
Empirical Posterior Estimates:
====================================
parameters
Mean SD Naive SE MCSE ESS
p 0.6607 0.0684 0.0006 0.0006 7500
Quantiles:
====================================
parameters
2.5% 25.0% 50.0% 75.0% 97.5%
p 0.4111 0.6144 0.6629 0.71 0.8786
Plot chn2
MCMCChains.plot(chn2)
End of 02/m2.1m.jl
This page was generated using Literate.jl.