Skip to content

Commit

Permalink
Merge pull request #41 from mauro3/m3/dyn_hmc-update
Browse files Browse the repository at this point in the history
Made `dynamichmc_inference` more versatile: choice of `alg`, `kwargs`
  • Loading branch information
ChrisRackauckas committed May 2, 2018
2 parents 2ec9405 + 0d1c66e commit ecc1fa5
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 14 deletions.
28 changes: 16 additions & 12 deletions src/dynamichmc_inference.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,17 @@ struct DynamicHMCPosterior
a_prior
t
ϵ_dist
alg
kwargs
end

function (P::DynamicHMCPosterior)(θ)
#println(θ)
@unpack problem, data, a_prior, t, ϵ_dist = P
@unpack problem, data, a_prior, t, ϵ_dist, alg, kwargs = P
a = θ
try
prob = problem_new_parameters(problem, a)
sol = solve(prob, Tsit5())
sol = solve(prob, alg; kwargs...)
= sum(sum(logpdf.(ϵ_dist, sol(t) .- data[:, i]))
for (i, t) in enumerate(t))
catch
Expand All @@ -24,13 +26,14 @@ function (P::DynamicHMCPosterior)(θ)
logpdf_sum = 0
for i in length(a)
logpdf_sum += logpdf(a_prior[i], a[i])
end
end
+ logpdf_sum
end

function dynamichmc_inference(prob::DEProblem,data,priors,t,transformations;σ = 0.01=0.001,initial=Float64[],iterations=1000)
P = DynamicHMCPosterior(prob, data, priors, t, Normal(0.0, σ))

function dynamichmc_inference(prob::DEProblem, alg, t, data, priors, transformations;
σ=0.01, ϵ=0.001, initial=Float64[], iterations=1000, kwargs...)
P = DynamicHMCPosterior(prob, data, priors, t, Normal(0.0, σ), alg, kwargs)

transformations_tuple = Tuple(transformations)
parameter_transformation = TransformationTuple(transformations_tuple) # assuming a > 0
PT = TransformLogLikelihood(P, parameter_transformation)
Expand All @@ -49,18 +52,19 @@ function dynamichmc_inference(prob::DEProblem,data,priors,t,transformations;σ =
push!(lower_bound,minimum(i))
push!(upper_bound,maximum(i))
end

optimized = Optim.minimizer(optimize(a -> -P(a),initial,lower_bound,upper_bound,Fminbox{GradientDescent}()))
inverse_transforms = Float64[]
for i in 1:length(initial)
para = TransformationTuple(transformations[i])
push!(inverse_transforms,inverse(para, (optimized[i], ))[1])
end
#println(inverse_transforms)
sample, _ = NUTS_init_tune_mcmc(PTG,
inverse_transforms,
iterations,ϵ=ϵ)
sample, NUTS_tuned = NUTS_init_tune_mcmc(PTG,
inverse_transforms,
iterations,ϵ=ϵ)

posterior = ungrouping_map(Vector, get_transformation(PT) get_position, sample)

(posterior, sample, _)
end
return posterior, sample, NUTS_tuned
end
4 changes: 2 additions & 2 deletions test/dynamicHMC.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ sol = solve(prob1,Tsit5())
randomized = VectorOfArray([(sol(t[i]) + σ * randn(2)) for i in 1:length(t)])
data = convert(Array,randomized)

bayesian_result = dynamichmc_inference(prob1, data, [Normal(1.5, 1)], t, [bridge(ℝ, ℝ⁺, )])
bayesian_result = dynamichmc_inference(prob1, Tsit5(), t, data, [Normal(1.5, 1)], [bridge(ℝ, ℝ⁺, )])
@test mean(bayesian_result[1][1]) 1.5 atol=1e-1

f1 = @ode_def_nohes LotkaVolterraTest4 begin
Expand All @@ -35,7 +35,7 @@ data = convert(Array,randomized)
priors = [Truncated(Normal(1.5,0.01),0,2),Truncated(Normal(1.0,0.01),0,1.5),
Truncated(Normal(3.0,0.01),0,4),Truncated(Normal(1.0,0.01),0,2)]

bayesian_result = dynamichmc_inference(prob1, data, priors, t, [bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ, ),bridge(ℝ, ℝ⁺, )])
bayesian_result = dynamichmc_inference(prob1, Tsit5(), t, data, priors, [bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ⁺, ),bridge(ℝ, ℝ, ),bridge(ℝ, ℝ⁺, )])
@test mean(bayesian_result[1][1]) 1.5 atol=1e-1
@test mean(bayesian_result[1][2]) 1.0 atol=1e-1
@test mean(bayesian_result[1][3]) 3.0 atol=1e-1
Expand Down

0 comments on commit ecc1fa5

Please sign in to comment.