Skip to content

Commit

Permalink
Re-introduce regression test
Browse files Browse the repository at this point in the history
  • Loading branch information
penelopeysm committed Nov 29, 2024
1 parent 408a6c6 commit e552ca5
Showing 1 changed file with 36 additions and 0 deletions.
36 changes: 36 additions & 0 deletions test/ad.jl
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,40 @@
end
end
end

@testset "Turing#2151: ReverseDiff compilation & eltype(vi, spl)" begin
# Failing model
t = 1:0.05:8
σ = 0.3
y = @. rand(sin(t) + Normal(0, σ))
@model function state_space(y, TT, ::Type{T}=Float64) where {T}
# Priors
α ~ Normal(y[1], 0.001)
τ ~ Exponential(1)
η ~ filldist(Normal(0, 1), TT-1)
σ ~ Exponential(1)
# create latent variable
x = Vector{T}(undef, TT)
x[1] = α
for t in 2:TT
x[t] = x[t-1] + η[t-1] * τ
end
# measurement model
y ~ MvNormal(x, σ^2 * I)
return x
end
model = state_space(y, length(t))

# Dummy sampling algorithm for testing. The test case can only be replicated
# with a custom sampler, it doesn't work with SampleFromPrior()
struct MyAlg end
DynamicPPL.getspace(::DynamicPPL.Sampler{MyAlg}) = ()
DynamicPPL.assume(rng, ::DynamicPPL.Sampler{MyAlg}, dist, vn, vi) =
DynamicPPL.assume(dist, vn, vi)

# Compiling the ReverseDiff tape used to fail here spl = Sampler(MyAlg())
vi = VarInfo(model)
ldf = DynamicPPL.LogDensityFunction(vi, model, SamplingContext(spl))
@test LogDensityProblemsAD.ADgradient(AutoReverseDiff(; compile=true), ldf) isa Any
end
end

0 comments on commit e552ca5

Please sign in to comment.