Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Turing integration tests #1813

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,7 @@ jobs:
- ubuntu-latest
test:
- DynamicExpressions
- Turing
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
Expand All @@ -248,8 +249,8 @@ jobs:
- uses: julia-actions/julia-buildpkg@v1
- name: "Run tests"
run: |
julia --color=yes --project=test/integration -e 'using Pkg; Pkg.develop([PackageSpec(; path) for path in (".", "lib/EnzymeCore")]); Pkg.instantiate()'
julia --color=yes --project=test/integration --threads=auto --check-bounds=yes test/integration/${{ matrix.test }}.jl
julia --color=yes --project=test/integration/${{ matrix.test }} -e 'using Pkg; Pkg.develop([PackageSpec(; path) for path in (".", "lib/EnzymeCore")]); Pkg.instantiate()'
julia --color=yes --project=test/integration/${{ matrix.test }} --threads=auto --check-bounds=yes test/integration/${{ matrix.test }}/runtests.jl
shell: bash
docs:
name: Documentation
Expand Down
21 changes: 21 additions & 0 deletions test/integration/Turing/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
AbstractPPL = "7a57a42e-76ec-4ea3-a279-07e840d6d9cf"
Bijectors = "76274a88-744f-5084-9051-94815aaf08c4"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
DynamicPPL = "366bfd00-2699-11ea-058f-f148b4cae6d8"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869"
FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000"
LogDensityProblems = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c"
LogDensityProblemsAD = "996a588d-648d-4e1f-a8f0-a84b347e47b1"

[compat]
AbstractMCMC = "=5.3.0"
AbstractPPL = "=0.8.4"
Bijectors = "=0.13.18"
Distributions = "=0.25.111"
DynamicPPL = "=0.28.4"
FiniteDifferences = "0.12.32"
LogDensityProblems = "=2.1.1"
LogDensityProblemsAD = "=1.10.0"
99 changes: 99 additions & 0 deletions test/integration/Turing/runtests.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
module TuringIntegrationTests

using Distributions: Distributions
using DynamicPPL: DynamicPPL
using Enzyme: Enzyme
using FiniteDifferences: FiniteDifferences
using LinearAlgebra: LinearAlgebra
using Random: randn
using Test: @test, @testset

# TODO(mhauru) Could we at some point make do without this?
Enzyme.API.runtimeActivity!(true)

"""
Turn a Turing model, possibly with given example values, into a log density function and a
random value that it can be evaluated at.
"""
function build_turing_problem(model)
ctx = DynamicPPL.DefaultContext()
vi = DynamicPPL.VarInfo(model)
vi_linked = DynamicPPL.link(vi, model)
ldp = DynamicPPL.LogDensityFunction(vi_linked, model, ctx)
test_function = Base.Fix1(DynamicPPL.LogDensityProblems.logdensity, ldp)
d = DynamicPPL.LogDensityProblems.dimension(ldp)
return test_function, randn(d)
end

"""
Test Enzyme.gradient, both Forward and Reverse mode, against FiniteDifferences.grad, for a
given function f and argument x.
"""
function test_grad(f, x; rtol=1e-6, atol=1e-6)
finitediff = FiniteDifferences.grad(FiniteDifferences.central_fdm(5, 1), f, x)[1]
# TODO(mhauru) The Val(1) works around https://github.com/EnzymeAD/Enzyme.jl/issues/1807
@test(
collect(Enzyme.gradient(Enzyme.Forward, Enzyme.Const(f), x, Val(1))) ≈ finitediff,
rtol = rtol,
atol = atol
)
@test(
Enzyme.gradient(Enzyme.Reverse, Enzyme.Const(f), x) ≈ finitediff,
rtol = rtol, atol = atol
)
return nothing
end

# Turing models to test with. These come from Turing's test suite.
models = collect(DynamicPPL.TestUtils.DEMO_MODELS)

# Add some other models that use features that have previously been problematic for Enzyme.

DynamicPPL.@model function MvDirichletWithManualAccumulation(w, doc)
β ~ DynamicPPL.filldist(Distributions.Dirichlet([1.0, 1.0]), 2)
log_product = log.(β)
DynamicPPL.@addlogprob! sum(log_product[CartesianIndex.(w, doc)])
end

push!(models, MvDirichletWithManualAccumulation([1, 1, 1, 1], [1, 1, 2, 2]))

DynamicPPL.@model function demo_lkjchol(d::Int=2)
x ~ DynamicPPL.LKJCholesky(d, 1.0)
return (x=x,)
end

push!(models, demo_lkjchol())

DynamicPPL.@model function hmcmatrixsup()
return v ~ Distributions.Wishart(7, [1 0.5; 0.5 1])
end

push!(models, hmcmatrixsup())

DynamicPPL.@model function mvnormal_with_transpose(x=transpose([1.5 2.0;]))
m ~ Distributions.MvNormal(LinearAlgebra.Diagonal([1.0, 1.0]))
x .~ Distributions.MvNormal(m, LinearAlgebra.Diagonal([1.0, 1.0]))
return nothing
end

push!(models, mvnormal_with_transpose())

DynamicPPL.@model function mvnorm_with_argtype(::Type{TV}=Matrix{Float64}) where {TV}
P0 = vcat([0.1 0.0], [0.0 0.1])
x = TV(undef, 2, 2)
fill!(x, zero(eltype(x)))
x[:, 2] ~ Distributions.MvNormal(x[:, 1], P0)
return nothing
end

push!(models, mvnorm_with_argtype())

# Test each model in turn, checking Enzyme's gradient against FiniteDifferences.
@testset "Turing integration tests" begin
@testset "$(typeof(model.f))" for model in models
f, x = build_turing_problem(model)
test_grad(f, x)
end
end

end
Loading