using Distributed, Gadfly
using Mamba[ Info: Loading DataFrames support into Gadfly.jlData
globe_toss = Dict{Symbol, Any}(
:w => [6, 7, 5, 6, 6],
:n => [9, 9, 9, 9, 9]
)
globe_toss[:N] = length(globe_toss[:w])5Model Specification
model = Model(
w = Stochastic(1,
(n, p, N) ->
UnivariateDistribution[Binomial(n[i], p) for i in 1:N],
false
),
p = Stochastic(() -> Beta(1, 1))
);Initial Values
inits = [
Dict(:w => globe_toss[:w], :n => globe_toss[:n], :p => 0.5),
Dict(:w => globe_toss[:w], :n => globe_toss[:n], :p => rand(Beta(1, 1)))
]2-element Array{Dict{Symbol,Any},1}:
Dict(:w=>[6, 7, 5, 6, 6],:p=>0.5,:n=>[9, 9, 9, 9, 9])
Dict(:w=>[6, 7, 5, 6, 6],:p=>0.134808,:n=>[9, 9, 9, 9, 9])Sampling Scheme
scheme = [NUTS(:p)]
setsamplers!(model, scheme);MCMC Simulations
sim = mcmc(model, globe_toss, inits, 10000, burnin=2500, thin=1, chains=2)MCMC Simulation of 10000 Iterations x 2 Chains...
Chain 1: 0% [0:25:43 of 0:25:45 remaining]
Chain 1: 10% [0:00:15 of 0:00:16 remaining]
Chain 1: 20% [0:00:07 of 0:00:09 remaining]
Chain 1: 30% [0:00:05 of 0:00:07 remaining]
Chain 1: 40% [0:00:03 of 0:00:06 remaining]
Chain 1: 50% [0:00:02 of 0:00:05 remaining]
Chain 1: 60% [0:00:02 of 0:00:04 remaining]
Chain 1: 70% [0:00:01 of 0:00:04 remaining]
Chain 1: 80% [0:00:01 of 0:00:03 remaining]
Chain 1: 90% [0:00:00 of 0:00:03 remaining]
Chain 1: 100% [0:00:00 of 0:00:03 remaining]
Chain 2: 0% [0:00:01 of 0:00:01 remaining]
Chain 2: 10% [0:00:01 of 0:00:01 remaining]
Chain 2: 20% [0:00:01 of 0:00:01 remaining]
Chain 2: 30% [0:00:01 of 0:00:01 remaining]
Chain 2: 40% [0:00:01 of 0:00:01 remaining]
Chain 2: 50% [0:00:01 of 0:00:01 remaining]
Chain 2: 60% [0:00:00 of 0:00:01 remaining]
Chain 2: 70% [0:00:00 of 0:00:01 remaining]
Chain 2: 80% [0:00:00 of 0:00:01 remaining]
Chain 2: 90% [0:00:00 of 0:00:01 remaining]
Chain 2: 100% [0:00:00 of 0:00:01 remaining]
Object of type "Mamba.ModelChains"
Iterations = 2501:10000
Thinning interval = 1
Chains = 1,2
Samples per chain = 7500
[0.57806; 0.57806; … ; 0.60297; 0.539374]
[0.606336; 0.606336; … ; 0.546535; 0.546535]Describe draws
describe(sim)Iterations = 2501:10000
Thinning interval = 1
Chains = 1,2
Samples per chain = 7500
Empirical Posterior Estimates:
Mean SD Naive SE MCSE ESS
p 0.6589088 0.06839548 0.0005584468 0.00062540144 7500
Quantiles:
2.5% 25.0% 50.0% 75.0% 97.5%
p 0.5160799 0.6135111 0.6627785 0.7068447 0.78476273This page was generated using Literate.jl.