diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 18248789..11db133f 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -8,5 +8,225 @@ steps: cuda: "*" command: | julia --color=yes --project -e 'using Pkg; Pkg.add("CUDA"); Pkg.add("NLPModels"); Pkg.add("NLPModelsTest"); Pkg.instantiate()' - julia --color=yes --project -e 'include("test/gpu.jl")' + julia --color=yes --project -e 'include("test/gpu/nvidia.jl")' timeout_in_minutes: 30 + + - label: "CPUs -- ForwardDiff.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("ForwardDiff") + Pkg.instantiate() + include("test/backend/ForwardDiff.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- ReverseDiff.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("ReverseDiff") + Pkg.instantiate() + include("test/backend/ReverseDiff.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- Enzyme.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("Enzyme") + Pkg.instantiate() + include("test/backend/Enzyme.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- Zygote.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("Zygote") + Pkg.instantiate() + include("test/backend/Zygote.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- Mooncake.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("Mooncake") + Pkg.instantiate() + include("test/backend/Mooncake.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- Diffractor.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("Diffractor") + Pkg.instantiate() + include("test/backend/Diffractor.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- Tracker.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("Tracker") + Pkg.instantiate() + include("test/backend/Tracker.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- Symbolics.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("Symbolics") + Pkg.instantiate() + include("test/backend/Symbolics.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- ChainRules.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("ChainRules") + Pkg.instantiate() + include("test/backend/ChainRules.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- FastDifferentiation.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("FastDifferentiation") + Pkg.instantiate() + include("test/backend/FastDifferentiation.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- FiniteDiff.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("FiniteDiff") + Pkg.instantiate() + include("test/backend/FiniteDiff.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- FiniteDifferences.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("FiniteDifferences") + Pkg.instantiate() + include("test/backend/FiniteDifferences.jl")' + timeout_in_minutes: 30 + + - label: "CPUs -- PolyesterForwardDiff.jl" + plugins: + - JuliaCI/julia#v1: + version: "1.10" + agents: + queue: "juliaecosystem" + os: "linux" + arch: "x86_64" + command: | + julia --color=yes --project -e ' + using Pkg + Pkg.add("OptimizationProblems") + Pkg.add("PolyesterForwardDiff") + Pkg.instantiate() + include("test/backend/PolyesterForwardDiff.jl")' diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index e3911cae..dbc8462b 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -28,7 +28,7 @@ jobs: with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v1 + - uses: actions/cache@v4 env: cache-name: cache-artifacts with: diff --git a/Project.toml b/Project.toml index f7b60817..2fab24d0 100644 --- a/Project.toml +++ b/Project.toml @@ -4,21 +4,21 @@ version = "0.8.7" [deps] ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" +DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" NLPModels = "a4795742-8479-5a88-8948-cc11e1c8c1a6" -Requires = "ae029012-a4dd-5104-9daa-d747884805df" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SparseConnectivityTracer = "9f842d2f-2579-4b1d-911e-f412cf18a3f5" SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35" [compat] -ADTypes = "1.2.1" -ForwardDiff = "0.9.0, 0.10.0" -NLPModels = "0.18, 0.19, 0.20, 0.21" -Requires = "1" -ReverseDiff = "1" +ADTypes = "1.9.0" +DifferentiationInterface = "0.6.1" +ForwardDiff = "0.10.36" +NLPModels = "0.21.3" +ReverseDiff = "1.15.3" SparseConnectivityTracer = "0.6.1" SparseMatrixColorings = "0.4.0" -julia = "^1.6" +julia = "1.6" diff --git a/docs/Project.toml b/docs/Project.toml index 2d003cbd..57ff01f6 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -16,10 +16,10 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" DataFrames = "1" Documenter = "1.0" ManualNLPModels = "0.1" -NLPModels = "0.21" -NLPModelsJuMP = "0.13" -OptimizationProblems = "0.8" +NLPModels = "0.21.3" +NLPModelsJuMP = "0.13.2" +OptimizationProblems = "0.9" Percival = "0.7" Plots = "1" SolverBenchmark = "0.6" -Zygote = "0.6.62" +Zygote = "0.6.70" diff --git a/docs/src/backend.md b/docs/src/backend.md index 7e073675..22f3743c 100644 --- a/docs/src/backend.md +++ b/docs/src/backend.md @@ -1,23 +1,25 @@ # How to switch backend in ADNLPModels `ADNLPModels` allows the use of different backends to compute the derivatives required within NLPModel API. -It uses `ForwardDiff.jl`, `ReverseDiff.jl`, and more via optional depencies. +It uses `ForwardDiff.jl`, `ReverseDiff.jl`, and more via optional dependencies. The backend information is in a structure [`ADNLPModels.ADModelBackend`](@ref) in the attribute `adbackend` of a `ADNLPModel`, it can also be accessed with [`get_adbackend`](@ref). The functions used internally to define the NLPModel API and the possible backends are defined in the following table: -| Functions | FowardDiff backends | ReverseDiff backends | Zygote backends | Enzyme backend | Sparse backend | -| --------- | ------------------- | -------------------- | --------------- | -------------- | -------------- | -| `gradient` and `gradient!` | `ForwardDiffADGradient`/`GenericForwardDiffADGradient` | `ReverseDiffADGradient`/`GenericReverseDiffADGradient` | `ZygoteADGradient` | `EnzymeADGradient` | -- | -| `jacobian` | `ForwardDiffADJacobian` | `ReverseDiffADJacobian` | `ZygoteADJacobian` | -- | `SparseADJacobian` | -| `hessian` | `ForwardDiffADHessian` | `ReverseDiffADHessian` | `ZygoteADHessian` | -- | `SparseADHessian`/`SparseReverseADHessian` | -| `Jprod` | `ForwardDiffADJprod`/`GenericForwardDiffADJprod` | `ReverseDiffADJprod`/`GenericReverseDiffADJprod` | `ZygoteADJprod` | -- | -- | -| `Jtprod` | `ForwardDiffADJtprod`/`GenericForwardDiffADJtprod` | `ReverseDiffADJtprod`/`GenericReverseDiffADJtprod` | `ZygoteADJtprod` | -- | -- | -| `Hvprod` | `ForwardDiffADHvprod`/`GenericForwardDiffADHvprod` | `ReverseDiffADHvprod`/`GenericReverseDiffADHvprod` | -- | -- | -- | -| `directional_second_derivative` | `ForwardDiffADGHjvprod` | -- | -- | -- | -- | - -The functions `hess_structure!`, `hess_coord!`, `jac_structure!` and `jac_coord!` defined in `ad.jl` are generic to all the backends for now. +| package | ForwardDiff.jl | ReverseDiff.jl | Enzyme.jl | Zygote.jl | Mooncake.jl | Diffractor.jl | Tracker.jl | Symbolics.jl | ChainRules.jl | FastDifferentiation.jl | FiniteDiff.jl | FiniteDifferences.jl | PolyesterForwardDiff.jl | +|--------|----------------|----------------|-----------|-----------|-------------|---------------|------------|--------------|----------------------|------------------------|---------------|----------------------|-------------------------| +| $\nabla f(x)$ | `ForwardDiffADGradient` | `ReverseDiffADGradient` | `EnzymeADGradient` | `ZygoteADGradient` | `MooncakeADGradient` | `DiffractorADGradient` | `TrackerADGradient` | `SymbolicsADGradient` | `ChainRulesADGradient` | `FastDifferentiationADGradient` | `FiniteDiffADGradient` | `FiniteDifferencesADGradient` | `PolyesterForwardDiffADGradient` | +| $J_c(x)*v$ | `ForwardDiffADJprod` | `ReverseDiffADJprod` | `EnzymeADJprod` | `ZygoteADJprod` | `MooncakeADJprod` | `DiffractorADJprod` | `TrackerADJprod` | `SymbolicsADJprod` | `ChainRulesADJprod` | `FastDifferentiationADJprod` | `FiniteDiffADJprod` | `FiniteDifferencesADJprod` | `PolyesterForwardDiffADJprod` | +| $J^T_c(x)*v$ | `ForwardDiffADJtprod` | `ReverseDiffADJtprod` | `EnzymeADJtprod` | `ZygoteADJtprod` | `MooncakeADJtprod` | `DiffractorADJtprod` | `TrackerADJtprod` | `SymbolicsADJtprod` | `ChainRulesADJtprod` | `FastDifferentiationADJtprod` | `FiniteDiffADJtprod` | `FiniteDifferencesADJtprod` | `PolyesterForwardDiffADJtprod` | +| $J_c(x)$ | `ForwardDiffADJacobian` | `ReverseDiffADJacobian` | `EnzymeADJacobian` | `ZygoteADJacobian` | `MooncakeADJacobian` | `DiffractorADJacobian` | `TrackerADJacobian` | `SymbolicsADJacobian` | `ChainRulesADJacobian` | `FastDifferentiationADJacobian` | `FiniteDiffADJacobian` | `FiniteDifferencesADJacobian` | `PolyesterForwardDiffADJacobian` | +| $\nabla^2 \mathcal{L}(x)*v$ | `ForwardDiffADHvprod` | `ReverseDiffADHvprod` | `EnzymeADHvprod` | `ZygoteADHvprod` | `MooncakeADHvprod` | `DiffractorADHvprod` | `TrackerADHvprod` | `SymbolicsADHvprod` | `ChainRulesADHvprod` | `FastDifferentiationADHvprod` | `FiniteDiffADHvprod` | `FiniteDifferencesADHvprod` | `PolyesterForwardDiffADHvprod` | +| $\nabla^2 \mathcal{L}(x)$ | `ForwardDiffADHessian` | `ReverseDiffADHessian` | `EnzymeADHessian` | `ZygoteADHessian` | `MooncakeADHessian` | `DiffractorADHessian` | `TrackerADHessian` | `SymbolicsADHessian` | `ChainRulesADHessian` | `FastDifferentiationADHessian` | `FiniteDiffADHessian` | `FiniteDifferencesADHessian` | `PolyesterForwardDiffADHessian` | + +$\mathcal{L}(x)$ denotes the Lagrangian $f(x) + \lambda^T c(x)$. +Except for the backends based on `ForwardDiff.jl` and `ReverseDiff.jl`, all other backends require the associated AD package to be manually installed by the user to work. +Note that the Jacobians and Hessians computed by the backends above are dense. +The backends `SparseADJacobian`, `SparseADHessian`, and `SparseReverseADHessian` should be used instead if sparse Jacobians and Hessians are required. ```@example ex1 using ADNLPModels diff --git a/src/ADNLPModels.jl b/src/ADNLPModels.jl index a50d1005..772f117c 100644 --- a/src/ADNLPModels.jl +++ b/src/ADNLPModels.jl @@ -4,14 +4,16 @@ module ADNLPModels using LinearAlgebra, SparseArrays # external -using ADTypes: ADTypes, AbstractColoringAlgorithm, AbstractSparsityDetector +import DifferentiationInterface +using ADTypes: ADTypes, AbstractADType, AbstractColoringAlgorithm, AbstractSparsityDetector, AutoEnzyme, AutoZygote +using ADTypes: AutoForwardDiff, AutoReverseDiff, AutoMooncake, AutoDiffractor, AutoTracker, AutoSymbolics +using ADTypes: AutoChainRules, AutoFastDifferentiation, AutoFiniteDiff, AutoFiniteDifferences, AutoPolyesterForwardDiff using SparseConnectivityTracer: TracerSparsityDetector using SparseMatrixColorings using ForwardDiff, ReverseDiff # JSO using NLPModels -using Requires abstract type AbstractADNLPModel{T, S} <: AbstractNLPModel{T, S} end abstract type AbstractADNLSModel{T, S} <: AbstractNLSModel{T, S} end @@ -27,8 +29,7 @@ include("sparse_hessian.jl") include("forward.jl") include("reverse.jl") -include("enzyme.jl") -include("zygote.jl") +include("di.jl") include("predefined_backend.jl") include("nlp.jl") diff --git a/src/di.jl b/src/di.jl new file mode 100644 index 00000000..9900ec9d --- /dev/null +++ b/src/di.jl @@ -0,0 +1,256 @@ +for (ADGradient, fbackend) in ((:EnzymeADGradient , :AutoEnzyme ), + (:ZygoteADGradient , :AutoZygote ), + # (:ForwardDiffADGradient , :AutoForwardDiff ), + # (:ReverseDiffADGradient , :AutoReverseDiff ), + (:MooncakeADGradient , :AutoMooncake ), + (:DiffractorADGradient , :AutoDiffractor ), + (:TrackerADGradient , :AutoTracker ), + (:SymbolicsADGradient , :AutoSymbolics ), + (:ChainRulesADGradient , :AutoChainRules ), + (:FastDifferentiationADGradient , :AutoFastDifferentiation ), + (:FiniteDiffADGradient , :AutoFiniteDiff ), + (:FiniteDifferencesADGradient , :AutoFiniteDifferences ), + (:PolyesterForwardDiffADGradient, :AutoPolyesterForwardDiff)) + @eval begin + + struct $ADGradient{B, E} <: ADBackend + backend::B + prep::E + end + + function $ADGradient( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + x0::AbstractVector = rand(nvar), + kwargs..., + ) + backend = $fbackend() + prep = DifferentiationInterface.prepare_gradient(f, backend, x0) + return $ADGradient(backend, prep) + end + + function gradient(b::$ADGradient, f, x) + g = DifferentiationInterface.gradient(f, b.prep, b.backend, x) + return g + end + + function gradient!(b::$ADGradient, g, f, x) + DifferentiationInterface.gradient!(f, g, b.prep, b.backend, x) + return g + end + + end +end + +for (ADJprod, fbackend) in ((:EnzymeADJprod , :AutoEnzyme ), + (:ZygoteADJprod , :AutoZygote ), + # (:ForwardDiffADJprod , :AutoForwardDiff ), + # (:ReverseDiffADJprod , :AutoReverseDiff ), + (:MooncakeADJprod , :AutoMooncake ), + (:DiffractorADJprod , :AutoDiffractor ), + (:TrackerADJprod , :AutoTracker ), + (:SymbolicsADJprod , :AutoSymbolics ), + (:ChainRulesADJprod , :AutoChainRules ), + (:FastDifferentiationADJprod , :AutoFastDifferentiation ), + (:FiniteDiffADJprod , :AutoFiniteDiff ), + (:FiniteDifferencesADJprod , :AutoFiniteDifferences ), + (:PolyesterForwardDiffADJprod, :AutoPolyesterForwardDiff)) + @eval begin + + struct $ADJprod{B, E} <: ADBackend + backend::B + prep::E + end + + function $ADJprod( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + x0::AbstractVector = rand(nvar), + kwargs..., + ) + backend = $fbackend() + dy = similar(x0, ncon) + dx = similar(x0, nvar) + prep = DifferentiationInterface.prepare_pushforward(c, dy, backend, x0, dx) + return $ADJprod(backend, prep) + end + + function Jprod!(b::$ADJprod, Jv, c, x, v, ::Val) + DifferentiationInterface.pushforward!(c, Jv, b.prep, b.backend, x, v) + return Jv + end + + end +end + +for (ADJtprod, fbackend) in ((:EnzymeADJtprod , :AutoEnzyme ), + (:ZygoteADJtprod , :AutoZygote ), + # (:ForwardDiffADJtprod , :AutoForwardDiff ), + # (:ReverseDiffADJtprod , :AutoReverseDiff ), + (:MooncakeADJtprod , :AutoMooncake ), + (:DiffractorADJtprod , :AutoDiffractor ), + (:TrackerADJtprod , :AutoTracker ), + (:SymbolicsADJtprod , :AutoSymbolics ), + (:ChainRulesADJtprod , :AutoChainRules ), + (:FastDifferentiationADJtprod , :AutoFastDifferentiation ), + (:FiniteDiffADJtprod , :AutoFiniteDiff ), + (:FiniteDifferencesADJtprod , :AutoFiniteDifferences ), + (:PolyesterForwardDiffADJtprod, :AutoPolyesterForwardDiff)) + @eval begin + + struct $ADJtprod{B, E} <: ADBackend + backend::B + prep::E + end + + function $ADJtprod( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + x0::AbstractVector = rand(nvar), + kwargs..., + ) + backend = $fbackend() + dx = similar(x0, nvar) + dy = similar(x0, ncon) + prep = DifferentiationInterface.prepare_pullback(c, dx, backend, x0, dy) + return $ADJtprod(backend, prep) + end + + function Jtprod!(b::$ADJtprod, Jtv, c, x, v, ::Val) + DifferentiationInterface.pullback!(c, Jtv, b.prep, b.backend, x, v) + return Jtv + end + + end +end + +for (ADJacobian, fbackend) in ((:EnzymeADJacobian , :AutoEnzyme ), + (:ZygoteADJacobian , :AutoZygote ), + # (:ForwardDiffADJacobian , :AutoForwardDiff ), + # (:ReverseDiffADJacobian , :AutoReverseDiff ), + (:MooncakeADJacobian , :AutoMooncake ), + (:DiffractorADJacobian , :AutoDiffractor ), + (:TrackerADJacobian , :AutoTracker ), + (:SymbolicsADJacobian , :AutoSymbolics ), + (:ChainRulesADJacobian , :AutoChainRules ), + (:FastDifferentiationADJacobian , :AutoFastDifferentiation ), + (:FiniteDiffADJacobian , :AutoFiniteDiff ), + (:FiniteDifferencesADJacobian , :AutoFiniteDifferences ), + (:PolyesterForwardDiffADJacobian, :AutoPolyesterForwardDiff)) + @eval begin + + struct $ADJacobian{B, E} <: ADBackend + backend::B + prep::E + end + + function $ADJacobian( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + x0::AbstractVector = rand(nvar), + kwargs..., + ) + backend = $fbackend() + y = similar(x0, ncon) + prep = DifferentiationInterface.prepare_jacobian(c, y, backend, x0) + return $ADJacobian(backend, prep) + end + + function jacobian(b::$ADJacobian, c, x) + J = DifferentiationInterface.jacobian(c, b.prep, b.backend, x) + return J + end + + end +end + +for (ADHvprod, fbackend) in ((:EnzymeADHvprod , :AutoEnzyme ), + (:ZygoteADHvprod , :AutoZygote ), + # (:ForwardDiffADHvprod , :AutoForwardDiff ), + # (:ReverseDiffADHvprod , :AutoReverseDiff ), + (:MooncakeADHvprod , :AutoMooncake ), + (:DiffractorADHvprod , :AutoDiffractor ), + (:TrackerADHvprod , :AutoTracker ), + (:SymbolicsADHvprod , :AutoSymbolics ), + (:ChainRulesADHvprod , :AutoChainRules ), + (:FastDifferentiationADHvprod , :AutoFastDifferentiation ), + (:FiniteDiffADHvprod , :AutoFiniteDiff ), + (:FiniteDifferencesADHvprod , :AutoFiniteDifferences ), + (:PolyesterForwardDiffADHvprod, :AutoPolyesterForwardDiff)) + @eval begin + + struct $ADHvprod{B, E} <: ADBackend + backend::B + prep::E + end + + function $ADHvprod( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + x0::AbstractVector = rand(nvar), + kwargs..., + ) + backend = $fbackend() + tx = similar(x0) + prep = DifferentiationInterface.prepare_hvp(f, backend, x0, tx) + return $ADHvprod(backend, prep) + end + + function Hvprod!(b::$ADHvprod, Hv, f, x, v, ::Val) + DifferentiationInterface.hvp!(f, Hv, b.prep, b.backend, x, v) + return Hv + end + + end +end + +for (ADHessian, fbackend) in ((:EnzymeADHessian , :AutoEnzyme ), + (:ZygoteADHessian , :AutoZygote ), + # (:ForwardDiffADHessian , :AutoForwardDiff ), + # (:ReverseDiffADHessian , :AutoReverseDiff ), + (:MooncakeADHessian , :AutoMooncake ), + (:DiffractorADHessian , :AutoDiffractor ), + (:TrackerADHessian , :AutoTracker ), + (:SymbolicsADHessian , :AutoSymbolics ), + (:ChainRulesADHessian , :AutoChainRules ), + (:FastDifferentiationADHessian , :AutoFastDifferentiation ), + (:FiniteDiffADHessian , :AutoFiniteDiff ), + (:FiniteDifferencesADHessian , :AutoFiniteDifferences ), + (:PolyesterForwardDiffADHessian, :AutoPolyesterForwardDiff)) + @eval begin + + struct $ADHessian{B, E} <: ADBackend + backend::B + prep::E + end + + function $ADHessian( + nvar::Integer, + f, + ncon::Integer = 0, + c::Function = (args...) -> []; + x0::AbstractVector = rand(nvar), + kwargs..., + ) + backend = $fbackend() + prep = DifferentiationInterface.prepare_hessian(f, backend, x0) + return $ADHessian(backend, prep) + end + + function hessian(b::$ADHessian, f, x) + H = DifferentiationInterface.hessian(f, b.prep, b.backend, x) + return H + end + + end +end diff --git a/src/enzyme.jl b/src/enzyme.jl deleted file mode 100644 index db5133fe..00000000 --- a/src/enzyme.jl +++ /dev/null @@ -1,21 +0,0 @@ -struct EnzymeADGradient <: ADNLPModels.ADBackend end - -function EnzymeADGradient( - nvar::Integer, - f, - ncon::Integer = 0, - c::Function = (args...) -> []; - x0::AbstractVector = rand(nvar), - kwargs..., -) - return EnzymeADGradient() -end - -@init begin - @require Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" begin - function ADNLPModels.gradient!(::EnzymeADGradient, g, f, x) - Enzyme.autodiff(Enzyme.Reverse, f, Enzyme.Duplicated(x, g)) # gradient!(Reverse, g, f, x) - return g - end - end -end diff --git a/src/predefined_backend.jl b/src/predefined_backend.jl index 740fde7e..f58a1fae 100644 --- a/src/predefined_backend.jl +++ b/src/predefined_backend.jl @@ -14,7 +14,7 @@ default_backend = Dict( ) optimized = Dict( - :gradient_backend => ReverseDiffADGradient, # EnzymeADGradient + :gradient_backend => ReverseDiffADGradient, :hprod_backend => ReverseDiffADHvprod, :jprod_backend => ForwardDiffADJprod, :jtprod_backend => ReverseDiffADJtprod, @@ -43,7 +43,217 @@ generic = Dict( :hessian_residual_backend => ForwardDiffADHessian, ) -predefined_backend = Dict(:default => default_backend, :optimized => optimized, :generic => generic) +ForwardDiff_backend = Dict( + :gradient_backend => ForwardDiffADGradient, + :jprod_backend => ForwardDiffADJprod, + :jtprod_backend => ForwardDiffADJtprod, + :hprod_backend => ForwardDiffADHvprod, + :jacobian_backend => ForwardDiffADJacobian, + :hessian_backend => ForwardDiffADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => ForwardDiffADJprod, + :jtprod_residual_backend => ForwardDiffADJtprod, + :hprod_residual_backend => ForwardDiffADHvprod, + :jacobian_residual_backend => ForwardDiffADJacobian, + :hessian_residual_backend => ForwardDiffADHessian +) + +ReverseDiff_backend = Dict( + :gradient_backend => ReverseDiffADGradient, + :jprod_backend => ReverseDiffADJprod, + :jtprod_backend => ReverseDiffADJtprod, + :hprod_backend => ReverseDiffADHvprod, + :jacobian_backend => ReverseDiffADJacobian, + :hessian_backend => ReverseDiffADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => ReverseDiffADJprod, + :jtprod_residual_backend => ReverseDiffADJtprod, + :hprod_residual_backend => ReverseDiffADHvprod, + :jacobian_residual_backend => ReverseDiffADJacobian, + :hessian_residual_backend => ReverseDiffADHessian +) + +Enzyme_backend = Dict( + :gradient_backend => EnzymeADGradient, + :jprod_backend => EnzymeADJprod, + :jtprod_backend => EnzymeADJtprod, + :hprod_backend => EnzymeADHvprod, + :jacobian_backend => EnzymeADJacobian, + :hessian_backend => EnzymeADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => EnzymeADJprod, + :jtprod_residual_backend => EnzymeADJtprod, + :hprod_residual_backend => EnzymeADHvprod, + :jacobian_residual_backend => EnzymeADJacobian, + :hessian_residual_backend => EnzymeADHessian +) + +Zygote_backend = Dict( + :gradient_backend => ZygoteADGradient, + :jprod_backend => ZygoteADJprod, + :jtprod_backend => ZygoteADJtprod, + :hprod_backend => ZygoteADHvprod, + :jacobian_backend => ZygoteADJacobian, + :hessian_backend => ZygoteADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => ZygoteADJprod, + :jtprod_residual_backend => ZygoteADJtprod, + :hprod_residual_backend => ZygoteADHvprod, + :jacobian_residual_backend => ZygoteADJacobian, + :hessian_residual_backend => ZygoteADHessian +) + +Mooncake_backend = Dict( + :gradient_backend => MooncakeADGradient, + :jprod_backend => MooncakeADJprod, + :jtprod_backend => MooncakeADJtprod, + :hprod_backend => MooncakeADHvprod, + :jacobian_backend => MooncakeADJacobian, + :hessian_backend => MooncakeADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => MooncakeADJprod, + :jtprod_residual_backend => MooncakeADJtprod, + :hprod_residual_backend => MooncakeADHvprod, + :jacobian_residual_backend => MooncakeADJacobian, + :hessian_residual_backend => MooncakeADHessian +) + +Diffractor_backend = Dict( + :gradient_backend => DiffractorADGradient, + :jprod_backend => DiffractorADJprod, + :jtprod_backend => DiffractorADJtprod, + :hprod_backend => DiffractorADHvprod, + :jacobian_backend => DiffractorADJacobian, + :hessian_backend => DiffractorADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => DiffractorADJprod, + :jtprod_residual_backend => DiffractorADJtprod, + :hprod_residual_backend => DiffractorADHvprod, + :jacobian_residual_backend => DiffractorADJacobian, + :hessian_residual_backend => DiffractorADHessian +) + +Tracker_backend = Dict( + :gradient_backend => TrackerADGradient, + :jprod_backend => TrackerADJprod, + :jtprod_backend => TrackerADJtprod, + :hprod_backend => TrackerADHvprod, + :jacobian_backend => TrackerADJacobian, + :hessian_backend => TrackerADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => TrackerADJprod, + :jtprod_residual_backend => TrackerADJtprod, + :hprod_residual_backend => TrackerADHvprod, + :jacobian_residual_backend => TrackerADJacobian, + :hessian_residual_backend => TrackerADHessian +) + +Symbolics_backend = Dict( + :gradient_backend => SymbolicsADGradient, + :jprod_backend => SymbolicsADJprod, + :jtprod_backend => SymbolicsADJtprod, + :hprod_backend => SymbolicsADHvprod, + :jacobian_backend => SymbolicsADJacobian, + :hessian_backend => SymbolicsADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => SymbolicsADJprod, + :jtprod_residual_backend => SymbolicsADJtprod, + :hprod_residual_backend => SymbolicsADHvprod, + :jacobian_residual_backend => SymbolicsADJacobian, + :hessian_residual_backend => SymbolicsADHessian +) + +ChainRules_backend = Dict( + :gradient_backend => ChainRulesADGradient, + :jprod_backend => ChainRulesADJprod, + :jtprod_backend => ChainRulesADJtprod, + :hprod_backend => ChainRulesADHvprod, + :jacobian_backend => ChainRulesADJacobian, + :hessian_backend => ChainRulesADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => ChainRulesADJprod, + :jtprod_residual_backend => ChainRulesADJtprod, + :hprod_residual_backend => ChainRulesADHvprod, + :jacobian_residual_backend => ChainRulesADJacobian, + :hessian_residual_backend => ChainRulesADHessian +) + +FastDifferentiation_backend = Dict( + :gradient_backend => FastDifferentiationADGradient, + :jprod_backend => FastDifferentiationADJprod, + :jtprod_backend => FastDifferentiationADJtprod, + :hprod_backend => FastDifferentiationADHvprod, + :jacobian_backend => FastDifferentiationADJacobian, + :hessian_backend => FastDifferentiationADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => FastDifferentiationADJprod, + :jtprod_residual_backend => FastDifferentiationADJtprod, + :hprod_residual_backend => FastDifferentiationADHvprod, + :jacobian_residual_backend => FastDifferentiationADJacobian, + :hessian_residual_backend => FastDifferentiationADHessian +) + +FiniteDiff_backend = Dict( + :gradient_backend => FiniteDiffADGradient, + :jprod_backend => FiniteDiffADJprod, + :jtprod_backend => FiniteDiffADJtprod, + :hprod_backend => FiniteDiffADHvprod, + :jacobian_backend => FiniteDiffADJacobian, + :hessian_backend => FiniteDiffADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => FiniteDiffADJprod, + :jtprod_residual_backend => FiniteDiffADJtprod, + :hprod_residual_backend => FiniteDiffADHvprod, + :jacobian_residual_backend => FiniteDiffADJacobian, + :hessian_residual_backend => FiniteDiffADHessian +) + +FiniteDifferences_backend = Dict( + :gradient_backend => FiniteDifferencesADGradient, + :jprod_backend => FiniteDifferencesADJprod, + :jtprod_backend => FiniteDifferencesADJtprod, + :hprod_backend => FiniteDifferencesADHvprod, + :jacobian_backend => FiniteDifferencesADJacobian, + :hessian_backend => FiniteDifferencesADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => FiniteDifferencesADJprod, + :jtprod_residual_backend => FiniteDifferencesADJtprod, + :hprod_residual_backend => FiniteDifferencesADHvprod, + :jacobian_residual_backend => FiniteDifferencesADJacobian, + :hessian_residual_backend => FiniteDifferencesADHessian +) + +PolyesterForwardDiff_backend = Dict( + :gradient_backend => PolyesterForwardDiffADGradient, + :jprod_backend => PolyesterForwardDiffADJprod, + :jtprod_backend => PolyesterForwardDiffADJtprod, + :hprod_backend => PolyesterForwardDiffADHvprod, + :jacobian_backend => PolyesterForwardDiffADJacobian, + :hessian_backend => PolyesterForwardDiffADHessian, + :ghjvprod_backend => EmptyADbackend, + :jprod_residual_backend => PolyesterForwardDiffADJprod, + :jtprod_residual_backend => PolyesterForwardDiffADJtprod, + :hprod_residual_backend => PolyesterForwardDiffADHvprod, + :jacobian_residual_backend => PolyesterForwardDiffADJacobian, + :hessian_residual_backend => PolyesterForwardDiffADHessian +) + +predefined_backend = Dict(:default => default_backend, + :optimized => optimized, + :generic => generic, + :ForwardDiff => ForwardDiff_backend, + :ReverseDiff => ReverseDiff_backend, + :Enzyme => Enzyme_backend, + :Zygote => Zygote_backend, + :Mooncake => Mooncake_backend, + :Diffractor => Diffractor_backend, + :Tracker => Tracker_backend, + :Symbolics => Symbolics_backend, + :ChainRules => ChainRules_backend, + :FastDifferentiation => FastDifferentiation_backend, + :FiniteDiff => FiniteDiff_backend, + :FiniteDifferences => FiniteDifferences_backend, + :PolyesterForwardDiff => PolyesterForwardDiff_backend) """ get_default_backend(meth::Symbol, backend::Symbol; kwargs...) diff --git a/src/zygote.jl b/src/zygote.jl deleted file mode 100644 index 63358a7e..00000000 --- a/src/zygote.jl +++ /dev/null @@ -1,119 +0,0 @@ -struct ZygoteADGradient <: ADBackend end -struct ZygoteADJacobian <: ImmutableADbackend - nnzj::Int -end -struct ZygoteADHessian <: ImmutableADbackend - nnzh::Int -end -struct ZygoteADJprod <: ImmutableADbackend end -struct ZygoteADJtprod <: ImmutableADbackend end - -@init begin - @require Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" begin - # See https://fluxml.ai/Zygote.jl/latest/limitations/ - function get_immutable_c(nlp::ADModel) - function c(x; nnln = nlp.meta.nnln) - c = Zygote.Buffer(x, nnln) - nlp.c!(c, x) - return copy(c) - end - return c - end - get_c(nlp::ADModel, ::ImmutableADbackend) = get_immutable_c(nlp) - - function get_immutable_F(nls::AbstractADNLSModel) - function F(x; nequ = nls.nls_meta.nequ) - Fx = Zygote.Buffer(x, nequ) - nls.F!(Fx, x) - return copy(Fx) - end - return F - end - get_F(nls::AbstractADNLSModel, ::ImmutableADbackend) = get_immutable_F(nls) - - function ZygoteADGradient( - nvar::Integer, - f, - ncon::Integer = 0, - c::Function = (args...) -> []; - kwargs..., - ) - return ZygoteADGradient() - end - function gradient(::ZygoteADGradient, f, x) - g = Zygote.gradient(f, x)[1] - return g === nothing ? zero(x) : g - end - function gradient!(::ZygoteADGradient, g, f, x) - _g = Zygote.gradient(f, x)[1] - g .= _g === nothing ? 0 : _g - end - - function ZygoteADJacobian( - nvar::Integer, - f, - ncon::Integer = 0, - c::Function = (args...) -> []; - kwargs..., - ) - @assert nvar > 0 - nnzj = nvar * ncon - return ZygoteADJacobian(nnzj) - end - function jacobian(::ZygoteADJacobian, f, x) - return Zygote.jacobian(f, x)[1] - end - - function ZygoteADHessian( - nvar::Integer, - f, - ncon::Integer = 0, - c::Function = (args...) -> []; - kwargs..., - ) - @assert nvar > 0 - nnzh = nvar * (nvar + 1) / 2 - return ZygoteADHessian(nnzh) - end - function hessian(b::ZygoteADHessian, f, x) - return jacobian( - ForwardDiffADJacobian(length(x), f, x0 = x), - x -> gradient(ZygoteADGradient(), f, x), - x, - ) - end - - function ZygoteADJprod( - nvar::Integer, - f, - ncon::Integer = 0, - c::Function = (args...) -> []; - kwargs..., - ) - return ZygoteADJprod() - end - function Jprod!(::ZygoteADJprod, Jv, f, x, v, ::Val) - Jv .= vec(Zygote.jacobian(t -> f(x + t * v), 0)[1]) - return Jv - end - - function ZygoteADJtprod( - nvar::Integer, - f, - ncon::Integer = 0, - c::Function = (args...) -> []; - kwargs..., - ) - return ZygoteADJtprod() - end - function Jtprod!(::ZygoteADJtprod, Jtv, f, x, v, ::Val) - g = Zygote.gradient(x -> dot(f(x), v), x)[1] - if g === nothing - Jtv .= zero(x) - else - Jtv .= g - end - return Jtv - end - end -end diff --git a/test/Project.toml b/test/Project.toml index d028ecd8..1e33fac8 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -1,6 +1,5 @@ [deps] -CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" -Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9" +DifferentiationInterface = "a0c0ee7d-e4b9-4e03-894e-1c5f64a51d63" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" ManualNLPModels = "30dfa513-9b2f-4fb3-9796-781eabac1617" @@ -11,16 +10,13 @@ ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SparseMatrixColorings = "0a514795-09f3-496d-8182-132a7b665d35" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" -Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] -CUDA = "4, 5" -Enzyme = "0.10, 0.11, 0.12" -ForwardDiff = "0.10" +DifferentiationInterface = "0.6.1" +ForwardDiff = "0.10.36" ManualNLPModels = "0.1" -NLPModels = "0.21" +NLPModels = "0.21.3" NLPModelsModifiers = "0.7" NLPModelsTest = "0.10" -ReverseDiff = "1" +ReverseDiff = "1.15.3" SparseMatrixColorings = "0.4.0" -Zygote = "0.6" diff --git a/test/backend/ChainRules.jl b/test/backend/ChainRules.jl new file mode 100644 index 00000000..f617d6b4 --- /dev/null +++ b/test/backend/ChainRules.jl @@ -0,0 +1,5 @@ +using ADNLPModels, ChainRules + +include("utils.jl") + +test_adbackend(:ChainRules) diff --git a/test/backend/Diffractor.jl b/test/backend/Diffractor.jl new file mode 100644 index 00000000..3b71cdb2 --- /dev/null +++ b/test/backend/Diffractor.jl @@ -0,0 +1,5 @@ +using ADNLPModels, Diffractor + +include("utils.jl") + +test_adbackend(:Diffractor) diff --git a/test/backend/Enzyme.jl b/test/backend/Enzyme.jl new file mode 100644 index 00000000..3d4be068 --- /dev/null +++ b/test/backend/Enzyme.jl @@ -0,0 +1,49 @@ +using ADNLPModels, Enzyme + +include("utils.jl") + +test_adbackend(:Enzyme) + +# list_excluded_enzyme = [ +# "brybnd", +# "clplatea", +# "clplateb", +# "clplatec", +# "curly", +# "curly10", +# "curly20", +# "curly30", +# "elec", +# "fminsrf2", +# "hs101", +# "hs117", +# "hs119", +# "hs86", +# "integreq", +# "ncb20", +# "ncb20b", +# "palmer1c", +# "palmer1d", +# "palmer2c", +# "palmer3c", +# "palmer4c", +# "palmer5c", +# "palmer5d", +# "palmer6c", +# "palmer7c", +# "palmer8c", +# "sbrybnd", +# "tetra", +# "tetra_duct12", +# "tetra_duct15", +# "tetra_duct20", +# "tetra_foam5", +# "tetra_gear", +# "tetra_hook", +# "threepk", +# "triangle", +# "triangle_deer", +# "triangle_pacman", +# "triangle_turtle", +# "watson", +# ] diff --git a/test/backend/FastDifferentiation.jl b/test/backend/FastDifferentiation.jl new file mode 100644 index 00000000..eaa0d1c8 --- /dev/null +++ b/test/backend/FastDifferentiation.jl @@ -0,0 +1,5 @@ +using ADNLPModels, FastDifferentiation + +include("utils.jl") + +test_adbackend(:FastDifferentiation) diff --git a/test/backend/FiniteDiff.jl b/test/backend/FiniteDiff.jl new file mode 100644 index 00000000..d7a10b4c --- /dev/null +++ b/test/backend/FiniteDiff.jl @@ -0,0 +1,5 @@ +using ADNLPModels, FiniteDiff + +include("utils.jl") + +test_adbackend(:FiniteDiff) diff --git a/test/backend/FiniteDifferences.jl b/test/backend/FiniteDifferences.jl new file mode 100644 index 00000000..6e72b01a --- /dev/null +++ b/test/backend/FiniteDifferences.jl @@ -0,0 +1,5 @@ +using ADNLPModels, FiniteDifferences + +include("utils.jl") + +test_adbackend(:FiniteDifferences) diff --git a/test/backend/ForwardDiff.jl b/test/backend/ForwardDiff.jl new file mode 100644 index 00000000..4aeb65dc --- /dev/null +++ b/test/backend/ForwardDiff.jl @@ -0,0 +1,5 @@ +using ADNLPModels, ForwardDiff + +include("utils.jl") + +test_adbackend(:ForwardDiff) diff --git a/test/backend/Mooncake.jl b/test/backend/Mooncake.jl new file mode 100644 index 00000000..99f7ebcf --- /dev/null +++ b/test/backend/Mooncake.jl @@ -0,0 +1,5 @@ +using ADNLPModels, Mooncake + +include("utils.jl") + +test_adbackend(:Mooncake) diff --git a/test/backend/PolyesterForwardDiff.jl b/test/backend/PolyesterForwardDiff.jl new file mode 100644 index 00000000..addf2f72 --- /dev/null +++ b/test/backend/PolyesterForwardDiff.jl @@ -0,0 +1,5 @@ +using ADNLPModels, PolyesterForwardDiff + +include("utils.jl") + +test_adbackend(:PolyesterForwardDiff) diff --git a/test/backend/ReverseDiff.jl b/test/backend/ReverseDiff.jl new file mode 100644 index 00000000..467d1034 --- /dev/null +++ b/test/backend/ReverseDiff.jl @@ -0,0 +1,5 @@ +using ADNLPModels, ReverseDiff + +include("utils.jl") + +test_adbackend(:ReverseDiff) diff --git a/test/backend/Symbolics.jl b/test/backend/Symbolics.jl new file mode 100644 index 00000000..b641a7b2 --- /dev/null +++ b/test/backend/Symbolics.jl @@ -0,0 +1,5 @@ +using ADNLPModels, Symbolics + +include("utils.jl") + +test_adbackend(:Symbolics) diff --git a/test/backend/Tracker.jl b/test/backend/Tracker.jl new file mode 100644 index 00000000..972a9df5 --- /dev/null +++ b/test/backend/Tracker.jl @@ -0,0 +1,5 @@ +using ADNLPModels, Tracker + +include("utils.jl") + +test_adbackend(:Tracker) diff --git a/test/backend/Zygote.jl b/test/backend/Zygote.jl new file mode 100644 index 00000000..16a64edb --- /dev/null +++ b/test/backend/Zygote.jl @@ -0,0 +1,5 @@ +using ADNLPModels, Zygote + +include("utils.jl") + +test_adbackend(:Zygote) diff --git a/test/backend/utils.jl b/test/backend/utils.jl new file mode 100644 index 00000000..e58ee03a --- /dev/null +++ b/test/backend/utils.jl @@ -0,0 +1,12 @@ +using OptimizationProblems + +ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend() + +function test_adbackend(package::Symbol) + names = OptimizationProblems.meta[!, :name] + for pb in names + @info pb + nlp = OptimizationProblems.ADNLPProblems.eval(Meta.parse(pb))(backend=package) + grad(nlp, nlp.meta.x0) + end +end diff --git a/test/gpu.jl b/test/gpu/nvidia.jl similarity index 95% rename from test/gpu.jl rename to test/gpu/nvidia.jl index 6ac6dac1..27753edb 100644 --- a/test/gpu.jl +++ b/test/gpu/nvidia.jl @@ -2,10 +2,10 @@ using CUDA, LinearAlgebra, SparseArrays, Test using ADNLPModels, NLPModels, NLPModelsTest for problem in NLPModelsTest.nlp_problems ∪ ["GENROSE"] - include("nlp/problems/$(lowercase(problem)).jl") + include("../nlp/problems/$(lowercase(problem)).jl") end for problem in NLPModelsTest.nls_problems - include("nls/problems/$(lowercase(problem)).jl") + include("../nls/problems/$(lowercase(problem)).jl") end @test CUDA.functional() diff --git a/test/runtests.jl b/test/runtests.jl index eee1cd85..fa634344 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,8 +1,9 @@ -using CUDA, LinearAlgebra, SparseArrays, Test +using LinearAlgebra, SparseArrays, Test using SparseMatrixColorings using ADNLPModels, ManualNLPModels, NLPModels, NLPModelsModifiers, NLPModelsTest using ADNLPModels: gradient, gradient!, jacobian, hessian, Jprod!, Jtprod!, directional_second_derivative, Hvprod! +import DifferentiationInterface: MissingBackendError @testset "Test sparsity pattern of Jacobian and Hessian" begin f(x) = sum(x .^ 2) @@ -40,25 +41,6 @@ for problem in NLPModelsTest.nls_problems include("nls/problems/$(lowercase(problem)).jl") end -# Additional backends used for tests -push!( - ADNLPModels.predefined_backend, - :zygote_backend => Dict( - :gradient_backend => ADNLPModels.ZygoteADGradient, - :jprod_backend => ADNLPModels.ZygoteADJprod, - :jtprod_backend => ADNLPModels.ZygoteADJtprod, - :hprod_backend => ADNLPModels.ForwardDiffADHvprod, - :jacobian_backend => ADNLPModels.ZygoteADJacobian, - :hessian_backend => ADNLPModels.ZygoteADHessian, - :ghjvprod_backend => ADNLPModels.ForwardDiffADGHjvprod, - :jprod_residual_backend => ADNLPModels.ZygoteADJprod, - :jtprod_residual_backend => ADNLPModels.ZygoteADJtprod, - :hprod_residual_backend => ADNLPModels.ForwardDiffADHvprod, - :jacobian_residual_backend => ADNLPModels.ZygoteADJacobian, - :hessian_residual_backend => ADNLPModels.ZygoteADHessian, - ), -) - ReverseDiffAD(nvar, f) = ADNLPModels.ADModelBackend( nvar, f, @@ -96,53 +78,14 @@ function test_getter_setter(nlp) @test typeof(get_adbackend(nlp).hessian_backend) <: ADNLPModels.ReverseDiffADHessian end -ZygoteAD() = ADNLPModels.ADModelBackend( - ADNLPModels.ZygoteADGradient(), - ADNLPModels.GenericForwardDiffADHvprod(), - ADNLPModels.ZygoteADJprod(), - ADNLPModels.ZygoteADJtprod(), - ADNLPModels.ZygoteADJacobian(0), - ADNLPModels.ZygoteADHessian(0), - ADNLPModels.ForwardDiffADGHjvprod(), - ADNLPModels.EmptyADbackend(), - ADNLPModels.EmptyADbackend(), - ADNLPModels.EmptyADbackend(), - ADNLPModels.EmptyADbackend(), - ADNLPModels.EmptyADbackend(), -) - -function test_autodiff_backend_error() - @testset "Error without loading package - $backend" for backend in [:ZygoteAD] - adbackend = eval(backend)() - @test_throws ArgumentError gradient(adbackend.gradient_backend, sum, [1.0]) - @test_throws ArgumentError gradient!(adbackend.gradient_backend, [1.0], sum, [1.0]) - @test_throws ArgumentError jacobian(adbackend.jacobian_backend, identity, [1.0]) - @test_throws ArgumentError hessian(adbackend.hessian_backend, sum, [1.0]) - @test_throws ArgumentError Jprod!( - adbackend.jprod_backend, - [1.0], - [1.0], - identity, - [1.0], - Val(:c), - ) - @test_throws ArgumentError Jtprod!( - adbackend.jtprod_backend, - [1.0], - [1.0], - identity, - [1.0], - Val(:c), - ) - end +@testset "Error without loading package - $package" for package in + [:Enzyme, :Zygote, :Mooncake, :Diffractor, :Tracker, :Symbolics, :ChainRules, + :FastDifferentiation, :FiniteDiff, :FiniteDifferences, :PolyesterForwardDiff] + adbackend = ADNLPModels.predefined_backend[package] + @test_throws MissingBackendError gradient(adbackend[:gradient_backend](1, x -> sum(x)), sum, [1.0]) + @test_throws MissingBackendError gradient!(adbackend[:gradient_backend](1, x -> sum(x)), [1.0], sum, [1.0]) end -# Test the argument error without loading the packages -test_autodiff_backend_error() - -# Automatically loads the code for Zygote with Requires -import Zygote - include("nlp/basic.jl") include("nls/basic.jl") include("nlp/nlpmodelstest.jl") diff --git a/test/script_OP.jl b/test/script_OP.jl index 3b8cd908..37f7c66b 100644 --- a/test/script_OP.jl +++ b/test/script_OP.jl @@ -1,8 +1,5 @@ # script that tests ADNLPModels over OptimizationProblems.jl problems -# optional deps -# using Enzyme - # AD deps using ForwardDiff, ReverseDiff @@ -55,79 +52,3 @@ for pb in names continue end end - -#= -ADNLPModels.EmptyADbackend(args...; kwargs...) = ADNLPModels.EmptyADbackend() - -names = OptimizationProblems.meta[!, :name] -list_excluded_enzyme = [ - "brybnd", - "clplatea", - "clplateb", - "clplatec", - "curly", - "curly10", - "curly20", - "curly30", - "elec", - "fminsrf2", - "hs101", - "hs117", - "hs119", - "hs86", - "integreq", - "ncb20", - "ncb20b", - "palmer1c", - "palmer1d", - "palmer2c", - "palmer3c", - "palmer4c", - "palmer5c", - "palmer5d", - "palmer6c", - "palmer7c", - "palmer8c", - "sbrybnd", - "tetra", - "tetra_duct12", - "tetra_duct15", - "tetra_duct20", - "tetra_foam5", - "tetra_gear", - "tetra_hook", - "threepk", - "triangle", - "triangle_deer", - "triangle_pacman", - "triangle_turtle", - "watson", -] -for pb in names - @info pb - (pb in list_excluded_enzyme) && continue - nlp = eval(Meta.parse(pb))( - gradient_backend = ADNLPModels.EnzymeADGradient, - jacobian_backend = ADNLPModels.EmptyADbackend, - hessian_backend = ADNLPModels.EmptyADbackend, - ) - grad(nlp, get_x0(nlp)) -end -=# - -#= -ERROR: Duplicated Returns not yet handled -Stacktrace: - [1] autodiff - @.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:209 [inlined] - [2] autodiff(mode::EnzymeCore.ReverseMode, f::OptimizationProblems.ADNLPProblems.var"#f#254"{OptimizationProblems.ADNLPProblems.var"#f#250#255"}, args::Duplicated{Vector{Float64}}) - @ Enzyme.julia\packages\Enzyme\DIkTv\src\Enzyme.jl:248 - [3] gradient!(#unused#::ADNLPModels.EnzymeADGradient, g::Vector{Float64}, f::Function, x::Vector{Float64}) - @ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\enzyme.jl:17 - [4] grad!(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}, g::Vector{Float64}) - @ ADNLPModelsDocuments\cvs\ADNLPModels.jl\src\nlp.jl:542 - [5] grad(nlp::ADNLPModel{Float64, Vector{Float64}, Vector{Int64}}, x::Vector{Float64}) - @ NLPModels.julia\packages\NLPModels\XBcWL\src\nlp\api.jl:31 - [6] top-level scope - @ .\REPL[7]:5 -=#