From 0d1c66e5116262502cd1d9bc417d1781d841f8c1 Mon Sep 17 00:00:00 2001 From: Mauro Werder Date: Wed, 2 May 2018 20:32:54 +0200 Subject: [PATCH] Made dynamichmc_inference more versatile: choise of alg, kwargs --- src/dynamichmc_inference.jl | 28 ++++++++++++++++------------ test/dynamicHMC.jl | 4 ++-- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/src/dynamichmc_inference.jl b/src/dynamichmc_inference.jl index de6413be..4f9a7ec3 100644 --- a/src/dynamichmc_inference.jl +++ b/src/dynamichmc_inference.jl @@ -4,15 +4,17 @@ struct DynamicHMCPosterior a_prior t ϵ_dist + alg + kwargs end function (P::DynamicHMCPosterior)(θ) #println(θ) - @unpack problem, data, a_prior, t, ϵ_dist = P + @unpack problem, data, a_prior, t, ϵ_dist, alg, kwargs = P a = θ try prob = problem_new_parameters(problem, a) - sol = solve(prob, Tsit5()) + sol = solve(prob, alg; kwargs...) ℓ = sum(sum(logpdf.(ϵ_dist, sol(t) .- data[:, i])) for (i, t) in enumerate(t)) catch @@ -24,13 +26,14 @@ function (P::DynamicHMCPosterior)(θ) logpdf_sum = 0 for i in length(a) logpdf_sum += logpdf(a_prior[i], a[i]) - end + end ℓ + logpdf_sum end -function dynamichmc_inference(prob::DEProblem,data,priors,t,transformations;σ = 0.01,ϵ=0.001,initial=Float64[],iterations=1000) - P = DynamicHMCPosterior(prob, data, priors, t, Normal(0.0, σ)) - +function dynamichmc_inference(prob::DEProblem, alg, t, data, priors, transformations; + σ=0.01, ϵ=0.001, initial=Float64[], iterations=1000, kwargs...) + P = DynamicHMCPosterior(prob, data, priors, t, Normal(0.0, σ), alg, kwargs) + transformations_tuple = Tuple(transformations) parameter_transformation = TransformationTuple(transformations_tuple) # assuming a > 0 PT = TransformLogLikelihood(P, parameter_transformation) @@ -49,6 +52,7 @@ function dynamichmc_inference(prob::DEProblem,data,priors,t,transformations;σ = push!(lower_bound,minimum(i)) push!(upper_bound,maximum(i)) end + optimized = Optim.minimizer(optimize(a -> -P(a),initial,lower_bound,upper_bound,Fminbox{GradientDescent}())) inverse_transforms = Float64[] for i in 1:length(initial) @@ -56,11 +60,11 @@ function dynamichmc_inference(prob::DEProblem,data,priors,t,transformations;σ = push!(inverse_transforms,inverse(para, (optimized[i], ))[1]) end #println(inverse_transforms) - sample, _ = NUTS_init_tune_mcmc(PTG, - inverse_transforms, - iterations,ϵ=ϵ) - + sample, NUTS_tuned = NUTS_init_tune_mcmc(PTG, + inverse_transforms, + iterations,ϵ=ϵ) + posterior = ungrouping_map(Vector, get_transformation(PT) ∘ get_position, sample) - (posterior, sample, _) -end \ No newline at end of file + return posterior, sample, NUTS_tuned +end diff --git a/test/dynamicHMC.jl b/test/dynamicHMC.jl index 9fd93b55..5c6511e3 100644 --- a/test/dynamicHMC.jl +++ b/test/dynamicHMC.jl @@ -17,7 +17,7 @@ sol = solve(prob1,Tsit5()) randomized = VectorOfArray([(sol(t[i]) + σ * randn(2)) for i in 1:length(t)]) data = convert(Array,randomized) -bayesian_result = dynamichmc_inference(prob1, data, [Normal(1.5, 1)], t, [bridge(ℝ, ℝ⁺, )]) +bayesian_result = dynamichmc_inference(prob1, Tsit5(), t, data, [Normal(1.5, 1)], [bridge(ℝ, ℝ⁺, )]) @test mean(bayesian_result[1][1]) ≈ 1.5 atol=1e-1 f1 = @ode_def_nohes LotkaVolterraTest4 begin @@ -35,7 +35,7 @@ data = convert(Array,randomized) priors = [Truncated(Normal(1.5,0.01),0,2),Truncated(Normal(1.0,0.01),0,1.5), Truncated(Normal(3.0,0.01),0,4),Truncated(Normal(1.0,0.01),0,2)] -bayesian_result = dynamichmc_inference(prob1, data, priors, t, [bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ, ),bridge(ℝ, ℝ⁺, )]) +bayesian_result = dynamichmc_inference(prob1, Tsit5(), t, data, priors, [bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ, ),bridge(ℝ, ℝ⁺, )]) @test mean(bayesian_result[1][1]) ≈ 1.5 atol=1e-1 @test mean(bayesian_result[1][2]) ≈ 1.0 atol=1e-1 @test mean(bayesian_result[1][3]) ≈ 3.0 atol=1e-1