Skip to content

Commit

Permalink
Fix more bugs
Browse files Browse the repository at this point in the history
  • Loading branch information
penelopeysm committed Feb 10, 2025
1 parent 74d3bdb commit 567e087
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/logdensityfunction.jl
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ function LogDensityProblems.logdensity_and_gradient(
f::LogDensityFunction, θ::AbstractVector, adtype::ADTypes.AbstractADType
)
# Ensure we concretise the elements of the params.
# θ = map(identity, θ) # TODO: Is this needed?
θ = map(identity, θ) # TODO: Is this needed?
prep = DI.prepare_gradient(_flipped_logdensity, adtype, θ, DI.Constant(f))
return DI.value_and_gradient(_flipped_logdensity, prep, adtype, θ, DI.Constant(f))
end
3 changes: 1 addition & 2 deletions test/ad.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
@testset "AD: ForwardDiff, ReverseDiff, and Mooncake" begin
@testset "$(m.f)" for m in DynamicPPL.TestUtils.DEMO_MODELS
f = DynamicPPL.LogDensityFunction(m)
rand_param_values = DynamicPPL.TestUtils.rand_prior_true(m)
vns = DynamicPPL.TestUtils.varnames(m)
varinfos = DynamicPPL.TestUtils.setup_varinfos(m, rand_param_values, vns)
Expand All @@ -11,7 +10,7 @@
# reference: https://github.com/TuringLang/DynamicPPL.jl/pull/571#issuecomment-1924304489
θ = convert(Vector{Float64}, varinfo[:])
# Calculate reference logp + gradient of logp using ForwardDiff
default_adtype = ADTypes.AutoForwardDiff(; chunksize=0)
default_adtype = ADTypes.AutoForwardDiff()
ref_logp, ref_grad = LogDensityProblems.logdensity_and_gradient(
f, θ, default_adtype
)
Expand Down

0 comments on commit 567e087

Please sign in to comment.