Skip to content

Commit

Permalink
Revert "fix + test for compiled ReverseDiff without linking (#2097)"
Browse files Browse the repository at this point in the history
This reverts commit b5a07b7.
  • Loading branch information
torfjelde authored Oct 6, 2023
1 parent b5a07b7 commit 9f3527e
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 16 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "Turing"
uuid = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"
version = "0.29.3"
version = "0.29.2"

[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
Expand Down
2 changes: 1 addition & 1 deletion src/essential/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ end
for cache in (:true, :false)
@eval begin
function LogDensityProblemsAD.ADgradient(::ReverseDiffAD{$cache}, ℓ::Turing.LogDensityFunction)
return LogDensityProblemsAD.ADgradient(Val(:ReverseDiff), ℓ; compile=Val($cache), x=DynamicPPL.getparams(ℓ))
return LogDensityProblemsAD.ADgradient(Val(:ReverseDiff), ℓ; compile=Val($cache))
end
end
end
Expand Down
14 changes: 0 additions & 14 deletions test/essential/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -198,18 +198,4 @@
end
end
end

@testset "ReverseDiff compiled without linking" begin
f = DynamicPPL.LogDensityFunction(gdemo_default)
θ = DynamicPPL.getparams(f)

f_rd = LogDensityProblemsAD.ADgradient(Turing.Essential.ReverseDiffAD{false}(), f)
f_rd_compiled = LogDensityProblemsAD.ADgradient(Turing.Essential.ReverseDiffAD{true}(), f)

ℓ, ℓ_grad = LogDensityProblems.logdensity_and_gradient(f_rd, θ)
ℓ_compiled, ℓ_grad_compiled = LogDensityProblems.logdensity_and_gradient(f_rd_compiled, θ)

@test== ℓ_compiled
@test ℓ_grad == ℓ_grad_compiled
end
end

0 comments on commit 9f3527e

Please sign in to comment.