Skip to content

Commit

Permalink
Move number of samples argument(s) into algorithms
Browse files Browse the repository at this point in the history
  • Loading branch information
oschulz committed Dec 3, 2020
1 parent 7077399 commit 063a204
Show file tree
Hide file tree
Showing 38 changed files with 173 additions and 273 deletions.
1 change: 1 addition & 0 deletions docs/src/internal_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ Order = [:macro, :function]

```@docs
BAT.AbstractProposalDist
BAT.AbstractSampleGenerator
BAT.BasicMvStatistics
BAT.DataSet
BAT.HMIData
Expand Down
1 change: 0 additions & 1 deletion docs/src/stable_api.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ AbstractDensity
AbstractDensityTransformTarget
AbstractMCMCWeightingScheme
AbstractPosteriorDensity
AbstractSampleGenerator
AbstractTransformedDensity
AbstractTransformToInfinite
AbstractTransformToUnitspace
Expand Down
23 changes: 5 additions & 18 deletions docs/src/tutorial_lit.jl
Original file line number Diff line number Diff line change
Expand Up @@ -244,16 +244,10 @@ posterior = PosteriorDensity(likelihood, prior)
#nb ENV["JULIA_DEBUG"] = "BAT"
#jl ENV["JULIA_DEBUG"] = "BAT"

# Let's use 4 MCMC chains and require 10^5 unique samples from each chain
# (after tuning/burn-in):
# Now we can generate a set of MCMC samples via [`bat_sample`](@ref). We'll
# use 4 MCMC chains with 10^5 MC steps in each chain (after tuning/burn-in):

nsamples = 10^4
#md nothing # hide


# Now we can generate a set of MCMC samples via [`bat_sample`](@ref):

samples = bat_sample(posterior, nsamples, MCMCSampling(sampler = MetropolisHastings(), nchains = 4)).result
samples = bat_sample(posterior, MCMCSampling(sampler = MetropolisHastings(), nsteps = 10^4, nchains = 4)).result
#md nothing # hide
#nb nothing # hide

Expand Down Expand Up @@ -447,12 +441,13 @@ convergence = BrooksGelmanConvergence()

samples = bat_sample(
rng, posterior,
nsamples,
MCMCSampling(
sampler = MetropolisHastings(
weighting = RepetitionWeighting(),
tuning = tuning
),
nchains = 4,
nsteps = 10^5,
init = init,
burnin = burnin,
convergence = convergence,
Expand All @@ -464,11 +459,3 @@ samples = bat_sample(
).result
#md nothing # hide
#nb nothing # hide

# However, in many use cases, simply using the default options via
#
# ```julia
# samples = bat_sample(posterior, nsamples).result
# ```
#
# will often be sufficient.
8 changes: 4 additions & 4 deletions examples/benchmarks/benchmarks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,10 @@ function setup_benchmark()
include("run_benchmark_ND.jl")
end

function do_benchmarks(;algorithm=MetropolisHastings(), n_samples=10^5, n_chains=8)
#run_1D_benchmark(algorithm=algorithm, n_samples=n_samples, n_chains=n_chains)
run_2D_benchmark(algorithm=algorithm, n_samples=n_samples, n_chains=n_chains)
run_ND_benchmark(n_dim=2:2:20,algorithm=MetropolisHastings(), n_samples=2*10^5, n_chains=4)
function do_benchmarks(;algorithm=MetropolisHastings(), n_steps=10^5, n_chains=8)
#run_1D_benchmark(algorithm=algorithm, n_steps=n_steps, n_chains=n_chains)
run_2D_benchmark(algorithm=algorithm, n_steps=n_steps, n_chains=n_chains)
run_ND_benchmark(n_dim=2:2:20,algorithm=MetropolisHastings(), n_steps=2*10^5, n_chains=4)
run_ks_ahmc_vs_mh(n_dim=20:5:35)
end

Expand Down
2 changes: 1 addition & 1 deletion examples/benchmarks/functions_1D.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@ testfunctions_1D = Dict(
analytical_stats_name=["mode","mean","var"] #could be taken into NamedTuple for easier addtions but would be needed to implmented into calcs anyway
sample_stats=[Vector{Float64}(undef,length(analytical_stats_name)) for i in 1:length(testfunctions_1D)]

run_stats_names = ["nsamples","nchains","Times"]
run_stats_names = ["nsteps","nchains","Times"]
run_stats=[Vector{Float64}(undef,length(run_stats_names)) for i in 1:length(testfunctions_1D)]
2 changes: 1 addition & 1 deletion examples/benchmarks/functions_2D.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
stats_names2D = ["mode","mean","var"]
run_stats_names2D = ["nsamples","nchains","Times"]
run_stats_names2D = ["nsteps","nchains","Times"]
##########################################multi normal###########################################
sig = Matrix{Float64}([1.5^2 1.5*2.5*0.4 ; 1.5*2.5*0.4 2.5^2])
analytical_stats_gauss2D = Vector{Any}(undef,length(stats_names2D))
Expand Down
4 changes: 2 additions & 2 deletions examples/benchmarks/run_benchmark_1D.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
function run_1D_benchmark(;algorithm=MetropolisHastings(), n_samples=10^5, n_chains=8)
function run_1D_benchmark(;algorithm=MetropolisHastings(), n_steps=10^5, n_chains=8)
for i in 1:length(testfunctions_1D)
sample_stats_all = run1D(
collect(keys(testfunctions_1D))[i], #There might be a nicer way but I need the name to save the plots
testfunctions_1D,
sample_stats[i],
run_stats[i],
algorithm,
n_samples,
n_steps,
n_chains
)
end
Expand Down
4 changes: 2 additions & 2 deletions examples/benchmarks/run_benchmark_2D.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
function run_2D_benchmark(;algorithm = MetropolisHastings(),n_chains = 8,n_samples = 10^5)
function run_2D_benchmark(;algorithm = MetropolisHastings(),n_chains = 8,n_steps = 10^5)
for i in 1:length(testfunctions_2D)
sample_stats_all = run2D(
collect(keys(testfunctions_2D))[i], #There might be a nicer way but I need the name to save the plots
testfunctions_2D,
sample_stats2D[i],
run_stats2D[i],
algorithm,
n_samples,
n_steps,
n_chains
)
end
Expand Down
24 changes: 13 additions & 11 deletions examples/benchmarks/run_benchmark_ND.jl
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ function run_ND_benchmark(;
n_dim = 2:2:20,
algorithm = MetropolisHastings(),
n_chains = 4,
n_samples = 4*10^5,
n_steps = 4*10^5,
time_benchmark = true,
ks_test_benchmark = true,
ahmi_benchmark = true,
Expand Down Expand Up @@ -244,16 +244,17 @@ function run_ND_benchmark(;
for j in 1:length(testfunctions)

dis = testfunctions[collect(keys(testfunctions))[j]]
iid_sample = bat_sample(dis, n_samples*n_chains).result
iid_sample = bat_sample(dis, MCMCSampling(sampler = MetropolisHastings(), nsteps = nsteps, nchains = nchains)).result

mcmc_sample = nothing
tbf = time()
if isa(algorithm,BAT.MetropolisHastings)
mcmc_sample = bat_sample(
dis, n_samples * n_chains,
dis,
MCMCSampling(
sampler = algorithm,
nchains = n_chains,
nsteps = n_steps,
init = init,
burnin = burnin,
convergence = convergence,
Expand All @@ -263,8 +264,8 @@ function run_ND_benchmark(;
).result
elseif isa(algorithm,BAT.HamiltonianMC)
mcmc_sample = bat_sample(
dis, n_samples*n_chains,
MCMCSampling(sampler = algorithm)
dis,
MCMCSampling(sampler = algorithm, nchains = n_chains, nsteps = n_steps)
).result
end
taf = time()
Expand All @@ -275,10 +276,11 @@ function run_ND_benchmark(;
tbf = time()
if isa(algorithm,BAT.MetropolisHastings)
bat_sample(
dis, n_samples * n_chains,
dis,
MCMCSampling(
sampler = algorithm,
nchains = n_chains,
nsteps = n_steps,
init = init,
burnin = burnin,
convergence = convergence,
Expand All @@ -288,8 +290,8 @@ function run_ND_benchmark(;
).result
elseif isa(algorithm,BAT.HamiltonianMC)
bat_sample(
dis, n_samples*n_chains,
MCMCSampling(sampler = algorithm)
dis,
MCMCSampling(sampler = algorithm, nchains = n_chains, nsteps = n_steps)
).result
end
taf = time()
Expand Down Expand Up @@ -357,8 +359,8 @@ function run_ND_benchmark(;
return [ks_test,ahmi,times]
end

function run_ks_ahmc_vs_mh(;n_dim=20:5:35,n_samples=2*10^5, n_chains=4)
ks_res_ahmc = run_ND_benchmark(n_dim=n_dim,algorithm=HamiltonianMC(), n_samples=n_samples, n_chains=n_chains, time_benchmark=false,ahmi_benchmark=false,hmc_benchmark=true)[1]
ks_res_mh = run_ND_benchmark(n_dim=n_dim,algorithm=MetropolisHastings(), n_samples=n_samples, n_chains=n_chains, time_benchmark=false,ahmi_benchmark=false,hmc_benchmark=true)[1]
function run_ks_ahmc_vs_mh(;n_dim=20:5:35,n_steps=2*10^5, n_chains=4)
ks_res_ahmc = run_ND_benchmark(n_dim=n_dim,algorithm=HamiltonianMC(), n_steps=n_steps, n_chains=n_chains, time_benchmark=false,ahmi_benchmark=false,hmc_benchmark=true)[1]
ks_res_mh = run_ND_benchmark(n_dim=n_dim,algorithm=MetropolisHastings(), n_steps=n_steps, n_chains=n_chains, time_benchmark=false,ahmi_benchmark=false,hmc_benchmark=true)[1]
plot_ks_values_ahmc_vs_mh(ks_res_ahmc,ks_res_mh,n_dim)
end
22 changes: 11 additions & 11 deletions examples/benchmarks/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ function plot1D(
end

nun = convert(Int64,floor(sum(hunnorm.weights)/10))
unweighted_samples = bat_sample(samples, nun).result
unweighted_samples = bat_sample(samples, OrderedResampling(nsamples = nun)).result
hunnorm = fit(Histogram, [BAT.flatview(unweighted_samples.v)...],binning)

edges = hunnorm.edges[1]
Expand Down Expand Up @@ -183,7 +183,7 @@ function plot1D(
end
length(sample_stats) != 4 ? push!(sample_stats,chi2) : sample_stats[4] = chi2

iid_sample = bat_sample(testfunctions[name].posterior,length([BAT.flatview(samples.v)...])).result
iid_sample = bat_sample(testfunctions[name].posterior, IIDSampling(nsamples = length([BAT.flatview(samples.v)...]))).result
if(testfunctions[name].ks[1] > 999)
testfunctions[name].ks[1]=bat_compare(samples,iid_sample).result.ks_p_values[1]
end
Expand All @@ -201,24 +201,24 @@ function run1D(
sample_stats::Vector{Float64},
run_stats::Vector{Float64},
algorithm::BAT.AbstractSamplingAlgorithm,
n_samples::Integer,
n_steps::Integer,
n_chains::Integer,
n_runs=1
)

sample_stats_all = []
samples, chains = bat_sample(testfunctions[key].posterior, n_samples * n_chains, MCMCSampling(sampler = algorithm, nchains = n_chains))
samples, chains = bat_sample(testfunctions[key].posterior, MCMCSampling(sampler = algorithm, nchains = n_chains, nsteps = n_steps))
for i in 1:n_runs
time_before = time()
samples, chains = bat_sample(testfunctions[key].posterior, n_samples * n_chains, MCMCSampling(sampler = algorithm, nchains = n_chains))
samples, chains = bat_sample(testfunctions[key].posterior, MCMCSampling(sampler = algorithm, nchains = n_chains, nsteps = n_steps))
time_after = time()

h = plot1D(samples,testfunctions,key,sample_stats)# posterior, key, analytical_stats,sample_stats)

sample_stats[1] = mode(samples)[1]
sample_stats[2] = mean(samples)[1]
sample_stats[3] = var(samples)[1]
run_stats[1] = n_samples
run_stats[1] = n_steps
run_stats[2] = n_chains
run_stats[3] = time_after-time_before
push!(sample_stats_all,sample_stats)
Expand Down Expand Up @@ -432,16 +432,16 @@ function run2D(
sample_stats::Vector{Any},
run_stats::Vector{Any},
algorithm::MCMCAlgorithm,
n_samples::Integer,
n_steps::Integer,
n_chains::Integer,
n_runs=1)

sample_stats_all = []

samples, stats = bat_sample(testfunctions[key].posterior, n_samples * n_chains, MCMCSampling(sampler = algorithm, nchains = n_chains))
samples, stats = bat_sample(testfunctions[key].posterior, MCMCSampling(sampler = algorithm, nchains = n_chains, nsteps = n_steps))
for i in 1:n_runs
time_before = time()
samples, stats = bat_sample(testfunctions[key].posterior, n_samples * n_chains, MCMCSampling(sampler = algorithm, nchains = n_chains))
samples, stats = bat_sample(testfunctions[key].posterior, MCMCSampling(sampler = algorithm, nchains = n_chains, nsteps = n_steps))
time_after = time()

h = plot2D(samples, testfunctions, key, sample_stats)
Expand All @@ -450,7 +450,7 @@ function run2D(
sample_stats[2] = mean(samples).data
sample_stats[3] = var(samples).data

run_stats[1] = n_samples
run_stats[1] = n_steps
run_stats[2] = n_chains
run_stats[3] = time_after-time_before
push!(sample_stats_all,sample_stats)
Expand All @@ -473,7 +473,7 @@ function make_2D_results(testfunctions::Dict,sample_stats2D::Vector{Vector{Any}}
push!(ahmi_val,round.(v.ahmi,digits=3))
end

run_stats_names2D = ["nsamples","nchains","Times"]
run_stats_names2D = ["nsteps","nchains","Times"]
stats_names2D = ["mode","mean","var"]
comparison = ["target","test","diff (abs)","diff (rel)"]
header = Vector{Any}(undef,length(stats_names2D)*length(comparison)+3)
Expand Down
2 changes: 1 addition & 1 deletion examples/dev-internal/ahmi_example.jl
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ bounds = BAT.HyperRectBounds(lo_bounds, hi_bounds, BAT.reflective_bounds)


#BAT.jl samples
bat_samples = bat_sample(PosteriorDensity(model, bounds), (10^5, 8), algorithm).result
bat_samples = bat_sample(PosteriorDensity(model, bounds), algorithm).result
data = BAT.HMIData(bat_samples)
BAT.hm_integrate!(data)

Expand Down
6 changes: 3 additions & 3 deletions examples/dev-internal/hmc_with_trafo.jl
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
include(joinpath(dirname(dirname(@__DIR__)), "docs", "src", "tutorial_lit.jl"))

samples_mh = bat_sample(posterior, 10^5, MCMCSampling(sampler = MetropolisHastings())).result
samples_mh = bat_sample(posterior, MCMCSampling(sampler = MetropolisHastings(), nsteps = 10^5)).result

posterior_is, trafo_is = bat_transform(PriorToGaussian(), posterior, PriorSubstitution())
posterior_is2, trafo_is2 = bat_transform(PriorToGaussian(), posterior, FullDensityTransform())

samples_is = bat_sample(posterior_is, 10^5, MCMCSampling(sampler = HamiltonianMC())).result
samples_is2 = bat_sample(posterior_is2, 10^5, MCMCSampling(sampler = HamiltonianMC())).result
samples_is = bat_sample(posterior_is, MCMCSampling(sampler = HamiltonianMC(), nsteps = 10^5)).result
samples_is2 = bat_sample(posterior_is2, MCMCSampling(sampler = HamiltonianMC(), nsteps = 10^5)).result

samples = inv(trafo_is).(samples_is)
samples2 = inv(trafo_is2).(samples_is2)
Expand Down
4 changes: 2 additions & 2 deletions examples/dev-internal/output_examples.jl
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ prior = BAT.NamedTupleDist(

posterior = PosteriorDensity(likelihood, prior);

samples, chains = bat_sample(posterior, 10^5, MCMCSampling(sampler = MetropolisHastings()));
#samples = bat_sample(posterior, 10^5, SobolSampler()).result;
samples, chains = bat_sample(posterior, MCMCSampling(sampler = MetropolisHastings(), nsteps = 10^5));
#samples = bat_sample(posterior, SobolSampler(nsamples = 10^5)).result;

sd = SampledDensity(posterior, samples, generator=BAT.MCMCSampleGenerator(chains))
display(sd)
Expand Down
2 changes: 1 addition & 1 deletion examples/dev-internal/plotting_examples.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ prior = BAT.NamedTupleDist(

posterior = PosteriorDensity(likelihood, prior);

samples, chains = bat_sample(posterior, 10^5, MCMCSampling(sampler = MetropolisHastings()));
samples, chains = bat_sample(posterior, MCMCSampling(sampler = MetropolisHastings(), nsteps = 10^5));

# ## Set up plotting
# Set up plotting using the [Plots.jl](https://github.com/JuliaPlots/Plots.jl) package:
Expand Down
18 changes: 0 additions & 18 deletions src/algodefaults/default_sampling_algorithm.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,21 +9,3 @@ bat_default(::typeof(bat_sample), ::Val{:algorithm}, ::AnyIIDSampleable) = IIDSa
bat_default(::typeof(bat_sample), ::Val{:algorithm}, ::DensitySampleVector) = OrderedResampling()

bat_default(::typeof(bat_sample), ::Val{:algorithm}, ::AbstractDensity) = MCMCSampling()


#=
For HamiltonianMC
#!!!!!!!!!!!!!!!! N samples steps evals
# MCMCBurninStrategy for HamiltonianMC
function MCMCBurninStrategy(algorithm::HamiltonianMC, nsamples::Integer, max_nsteps::Integer, tuner_config::MCMCTuningAlgorithm)
max_nsamples_per_cycle = nsamples
max_nsteps_per_cycle = max_nsteps
MCMCBurninStrategy(
max_nsamples_per_cycle = max_nsamples_per_cycle,
max_nsteps_per_cycle = max_nsteps_per_cycle,
max_ncycles = 1
)
end
=#
Loading

0 comments on commit 063a204

Please sign in to comment.