Skip to content

Commit

Permalink
Add a wrapper for Optimization.jl
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Mar 26, 2024
1 parent b7ba71d commit 6acf77a
Show file tree
Hide file tree
Showing 10 changed files with 202 additions and 16 deletions.
22 changes: 15 additions & 7 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "NonlinearSolve"
uuid = "8913a72c-1f9b-4ce2-8d82-65094dcecaec"
authors = ["SciML"]
version = "3.8.3"
version = "3.9.0"

[deps]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Expand Down Expand Up @@ -35,8 +35,9 @@ FastLevenbergMarquardt = "7a0df574-e128-4d35-8cbd-3d84502bf7ce"
FixedPointAcceleration = "817d07cb-a79a-5c30-9a31-890123675176"
LeastSquaresOptim = "0fc2ff8b-aaa3-5acd-a817-1944a5e08891"
MINPACK = "4854310b-de5a-5eb6-a2a5-c1dee2bd17f9"
NLsolve = "2774e3e8-f4cf-5e23-947b-6d7e65073b56"
NLSolvers = "337daf1e-9722-11e9-073e-8b9effe078ba"
NLsolve = "2774e3e8-f4cf-5e23-947b-6d7e65073b56"
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
SIAMFANLEquations = "084e46ad-d928-497d-ad5e-07fa361a48c4"
SpeedMapping = "f1835b91-879b-4a3f-a438-e4baacf14412"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
Expand All @@ -48,8 +49,9 @@ NonlinearSolveFastLevenbergMarquardtExt = "FastLevenbergMarquardt"
NonlinearSolveFixedPointAccelerationExt = "FixedPointAcceleration"
NonlinearSolveLeastSquaresOptimExt = "LeastSquaresOptim"
NonlinearSolveMINPACKExt = "MINPACK"
NonlinearSolveNLsolveExt = "NLsolve"
NonlinearSolveNLSolversExt = "NLSolvers"
NonlinearSolveNLsolveExt = "NLsolve"
NonlinearSolveOptimizationExt = "Optimization"
NonlinearSolveSIAMFANLEquationsExt = "SIAMFANLEquations"
NonlinearSolveSpeedMappingExt = "SpeedMapping"
NonlinearSolveSymbolicsExt = "Symbolics"
Expand All @@ -61,8 +63,8 @@ Aqua = "0.8"
ArrayInterface = "7.7"
BandedMatrices = "1.4"
BenchmarkTools = "1.4"
ConcreteStructs = "0.2.3"
CUDA = "5.1"
ConcreteStructs = "0.2.3"
DiffEqBase = "6.146.0"
Enzyme = "0.11.15"
FastBroadcast = "0.2.8"
Expand All @@ -71,17 +73,20 @@ FastLevenbergMarquardt = "0.1"
FiniteDiff = "2.21"
FixedPointAcceleration = "0.3"
ForwardDiff = "0.10.36"
Ipopt = "1.6"
LazyArrays = "1.8.2"
LeastSquaresOptim = "0.8.5"
LineSearches = "7.2"
LinearAlgebra = "1.10"
LinearSolve = "2.21"
MINPACK = "1.2"
MaybeInplace = "0.1.1"
NLsolve = "4.5"
NLSolvers = "0.5"
NLsolve = "4.5"
NaNMath = "1"
NonlinearProblemLibrary = "0.1.2"
Optimization = "3.24"
OptimizationMOI = "0.4"
OrdinaryDiffEq = "6.63"
Pkg = "1.10"
PrecompileTools = "1.2"
Expand Down Expand Up @@ -118,14 +123,17 @@ Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
FastLevenbergMarquardt = "7a0df574-e128-4d35-8cbd-3d84502bf7ce"
FixedPointAcceleration = "817d07cb-a79a-5c30-9a31-890123675176"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9"
LeastSquaresOptim = "0fc2ff8b-aaa3-5acd-a817-1944a5e08891"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LinearSolve = "7ed4a6bd-45f5-4d41-b270-4a48e9bafcae"
MINPACK = "4854310b-de5a-5eb6-a2a5-c1dee2bd17f9"
NLsolve = "2774e3e8-f4cf-5e23-947b-6d7e65073b56"
NLSolvers = "337daf1e-9722-11e9-073e-8b9effe078ba"
NLsolve = "2774e3e8-f4cf-5e23-947b-6d7e65073b56"
NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
NonlinearProblemLibrary = "b7050fa9-e91f-4b37-bcee-a89a063da141"
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
OptimizationMOI = "fd9f6733-72f4-499f-8506-86b2bdd0dea1"
OrdinaryDiffEq = "1dea7af3-3e70-54e6-95c3-0bf5283fa5ed"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Expand All @@ -143,4 +151,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[targets]
test = ["Aqua", "Enzyme", "BenchmarkTools", "SafeTestsets", "Pkg", "Test", "ForwardDiff", "StaticArrays", "Symbolics", "LinearSolve", "Random", "LinearAlgebra", "Zygote", "SparseDiffTools", "NonlinearProblemLibrary", "LeastSquaresOptim", "FastLevenbergMarquardt", "NaNMath", "BandedMatrices", "DiffEqBase", "StableRNGs", "MINPACK", "NLsolve", "OrdinaryDiffEq", "SpeedMapping", "FixedPointAcceleration", "SIAMFANLEquations", "Sundials", "ReTestItems", "Reexport", "CUDA", "NLSolvers"]
test = ["Aqua", "BandedMatrices", "BenchmarkTools", "CUDA", "DiffEqBase", "Enzyme", "FastLevenbergMarquardt", "FixedPointAcceleration", "ForwardDiff", "Ipopt", "LeastSquaresOptim", "LinearAlgebra", "LinearSolve", "MINPACK", "NLSolvers", "NLsolve", "NaNMath", "NonlinearProblemLibrary", "Optimization", "OptimizationMOI", "OrdinaryDiffEq", "Pkg", "Random", "ReTestItems", "Reexport", "SIAMFANLEquations", "SafeTestsets", "SparseDiffTools", "SpeedMapping", "StableRNGs", "StaticArrays", "Sundials", "Symbolics", "Test", "Zygote"]
3 changes: 2 additions & 1 deletion docs/pages.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@ pages = ["index.md",
"native/globalization.md", "native/diagnostics.md"],
"Wrapped Solver APIs" => Any[
"api/fastlevenbergmarquardt.md", "api/fixedpointacceleration.md",
"api/leastsquaresoptim.md", "api/minpack.md", "api/nlsolve.md", "api/nlsolvers.md",
"api/leastsquaresoptim.md", "api/minpack.md",
"api/nlsolve.md", "api/nlsolvers.md", "api/optimizationjl.md",
"api/siamfanlequations.md", "api/speedmapping.md", "api/sundials.md"],
"Development Documentation" => [
"devdocs/internal_interfaces.md", "devdocs/linear_solve.md",
Expand Down
17 changes: 17 additions & 0 deletions docs/src/api/optimizationjl.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Optimization.jl

This is a extension for importing solvers from Optimization.jl into the SciML Nonlinear
Problem interface. Note that these solvers do not come by default, and thus one needs to
install the package before using these solvers:

```julia
using Pkg
Pkg.add("Optimization")
using Optimization, NonlinearSolve
```

## Solver API

```@docs
OptimizationJL
```
7 changes: 5 additions & 2 deletions docs/src/solvers/nonlinear_least_squares_solvers.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ fails it falls back to a more robust algorithms ([`LevenbergMarquardt`](@ref),
### SimpleNonlinearSolve.jl

These methods are included with NonlinearSolve.jl by default, though SimpleNonlinearSolve.jl
can be used directly to reduce dependencies and improve load times.
can be used directly to reduce dependencies and improve load times.
SimpleNonlinearSolve.jl's methods excel at small problems and problems defined with static
arrays.

Expand Down Expand Up @@ -81,5 +81,8 @@ Submethod choices for this algorithm include:

### Optimization.jl

`NonlinearLeastSquaresProblem`s can be converted into an `OptimizationProblem` and used
`NonlinearLeastSquaresProblem`s can be converted into an `OptimizationProblem` and used
with any solver of [Optimization.jl](https://github.com/SciML/Optimization.jl).

Alternatively, [`OptimizationJL`](@ref) can be used directly. The only benefit of this is
that the solver returns [`NonlinearSolution`](@ref) instead of `OptimizationSolution`.
13 changes: 11 additions & 2 deletions docs/src/solvers/nonlinear_system_solvers.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
solve(prob::NonlinearProblem, alg; kwargs...)
```

Solves for ``f(u) = 0`` in the problem defined by `prob` using the algorithm `alg`. If no
Solves for `f(u) = 0` in the problem defined by `prob` using the algorithm `alg`. If no
algorithm is given, a default algorithm will be chosen.

## Recommended Methods
Expand All @@ -22,7 +22,7 @@ fail to converge. Additionally, [`DynamicSS`](@ref) can be a good choice for hig
if the root corresponds to a stable equilibrium.

As a balance, [`NewtonRaphson`](@ref) is a good choice for most problems that aren't too
difficult yet need high performance, and [`TrustRegion`](@ref) is a bit less performant but
difficult yet need high performance, and [`TrustRegion`](@ref) is a bit less performant but
more stable. If the problem is well-conditioned, [`Klement`](@ref) or [`Broyden`](@ref) may
be faster, but highly dependent on the eigenvalues of the Jacobian being sufficiently small.

Expand Down Expand Up @@ -177,3 +177,12 @@ This is a wrapper package for importing solvers from NLSolvers.jl into the SciML
[NLSolvers.jl](https://github.com/JuliaNLSolvers/NLSolvers.jl)

For a list of possible solvers see the [NLSolvers.jl documentation](https://julianlsolvers.github.io/NLSolvers.jl/)

### Optimization.jl

This is a wrapper package for importing solvers from Optimization.jl into the SciML
Nonlinear Problem interface. These exist mostly for benchmarking purposes and shouldn't
be used by most users.

- [`OptimizationJL()`](@ref): A wrapper for
[Optimization.jl](https://github.com/SciML/Optimization.jl)
86 changes: 86 additions & 0 deletions ext/NonlinearSolveOptimizationExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
module NonlinearSolveOptimizationExt

using FastClosures, LinearAlgebra, NonlinearSolve, Optimization

function SciMLBase.__solve(
prob::NonlinearProblem, alg::OptimizationJL, args...; abstol = nothing,
maxiters = 1000, termination_condition = nothing, kwargs...)
NonlinearSolve.__test_termination_condition(termination_condition, :OptimizationJL)

prob.u0 isa Number &&
throw(ArgumentError("`OptimizationJL` doesn't support scalar `u0`"))

_objective_function = if SciMLBase.isinplace(prob)
@closure (u, p) -> begin
du = similar(u)
prob.f(du, u, p)
return norm(du, 2)
end
else
@closure (u, p) -> norm(prob.f(u, p), 2)
end

cons = if SciMLBase.isinplace(prob)
prob.f
else
@closure (du, u, p) -> copyto!(du, prob.f(u, p))
end

if alg.autodiff === nothing || alg.autodiff isa SciMLBase.NoAD
opt_func = OptimizationFunction(_objective_function; cons)
else
opt_func = OptimizationFunction(_objective_function, alg.autodiff; cons)
end
bounds = similar(prob.u0)
fill!(bounds, 0)
opt_prob = OptimizationProblem(
opt_func, prob.u0, prob.p; lcons = bounds, ucons = bounds)
sol = solve(opt_prob, alg.solver, args...; abstol, maxiters, kwargs...)

fu = zero(prob.u0)
cons(fu, sol.u, prob.p)

stats = SciMLBase.NLStats(sol.stats.fevals, sol.stats.gevals, -1, -1, -1)

return SciMLBase.build_solution(
prob, alg, sol.u, fu; retcode = sol.retcode, original = sol, stats)
end

function SciMLBase.__solve(prob::NonlinearLeastSquaresProblem, alg::OptimizationJL, args...;
abstol = nothing, maxiters = 1000, termination_condition = nothing, kwargs...)
NonlinearSolve.__test_termination_condition(termination_condition, :OptimizationJL)

_objective_function = if SciMLBase.isinplace(prob)
@closure (θ, p) -> begin
resid = prob.f.resid_prototype === nothing ? similar(θ) :
similar(prob.f.resid_prototype, eltype(θ))
prob.f(resid, θ, p)
return norm(resid, 2)
end
else
@closure (θ, p) -> norm(prob.f(θ, p), 2)
end

if alg.autodiff === nothing || alg.autodiff isa SciMLBase.NoAD
opt_func = OptimizationFunction(_objective_function)
else
opt_func = OptimizationFunction(_objective_function, alg.autodiff)
end
opt_prob = OptimizationProblem(opt_func, prob.u0, prob.p)
sol = solve(opt_prob, alg.solver, args...; abstol, maxiters, kwargs...)

if SciMLBase.isinplace(prob)
resid = prob.f.resid_prototype === nothing ? similar(prob.u0) :
prob.f.resid_prototype
prob.f(resid, sol.u, prob.p)
else
resid = prob.f(sol.u, prob.p)
end

stats = SciMLBase.NLStats(sol.stats.fevals, sol.stats.gevals, -1, -1, -1)

return SciMLBase.build_solution(
prob, alg, sol.u, resid; retcode = sol.retcode, original = sol, stats)
end

end
2 changes: 1 addition & 1 deletion src/NonlinearSolve.jl
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ export NonlinearSolvePolyAlgorithm, RobustMultiNewton, FastShortcutNonlinearPoly

# Extension Algorithms
export LeastSquaresOptimJL, FastLevenbergMarquardtJL, CMINPACK, NLsolveJL, NLSolversJL,
FixedPointAccelerationJL, SpeedMappingJL, SIAMFANLEquationsJL
FixedPointAccelerationJL, SpeedMappingJL, SIAMFANLEquationsJL, OptimizationJL

# Advanced Algorithms -- Without Bells and Whistles
export GeneralizedFirstOrderAlgorithm, ApproximateJacobianSolveAlgorithm, GeneralizedDFSane
Expand Down
37 changes: 37 additions & 0 deletions src/algorithms/extension_algs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -484,3 +484,40 @@ function SIAMFANLEquationsJL(; method = :newton, delta = 1e-3, linsolve = nothin
end
return SIAMFANLEquationsJL(method, delta, linsolve, m, beta, autodiff)
end

"""
OptimizationJL(solver, autodiff)
Wrapper over [Optimization.jl](https://docs.sciml.ai/Optimization/stable/) to solve
Nonlinear Equations and Nonlinear Least Squares Problems.
!!! danger "Using OptimizationJL for Nonlinear Systems"
This is a absolutely terrible idea. We construct the objective function as the L2-norm
of the residual function and impose an equality constraint. This is very inefficient
and exists to convince people from HackerNews that this is a horrible idea.
### Arguments
- `solver`: The solver to use from Optimization.jl. In general for NLLS, all of the
solvers will work. However, for nonlinear systems, only the solvers that support
equality constraints will work.
- `autodiff`: Automatic Differentiation Backend that Optimization.jl should use. See
https://docs.sciml.ai/Optimization/stable/API/ad/ for more details. Defaults to
`SciMLBase.NoAD()`.
!!! note
This algorithm is only available if `Optimization.jl` is installed.
"""
struct OptimizationJL{S, AD} <: AbstractNonlinearSolveExtensionAlgorithm
solver::S
autodiff::AD

function OptimizationJL(solver, autodiff = SciMLBase.NoAD())
if Base.get_extension(@__MODULE__, :NonlinearSolveOptimizationExt) === nothing
error("OptimizationJL requires Optimization.jl to be loaded")
end
return new{typeof(solver), typeof(autodiff)}(solver, autodiff)
end
end
18 changes: 18 additions & 0 deletions test/wrappers/nlls_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,24 @@ end
end
end

@testitem "Optimization.jl" setup=[WrapperNLLSSetup] begin
import Optimization, OptimizationMOI, Ipopt

prob_oop = NonlinearLeastSquaresProblem{false}(loss_function, θ_init, x)
prob_iip = NonlinearLeastSquaresProblem(
NonlinearFunction(loss_function; resid_prototype = zero(y_target)), θ_init, x)

nlls_problems = [prob_oop, prob_iip]

solver = OptimizationJL(Ipopt.Optimizer(), AutoForwardDiff())

for prob in nlls_problems
sol = solve(prob, solver; maxiters = 10000, abstol = 1e-8)
# Ipopt fails currently
@test sol isa SciMLBase.NonlinearSolution
end
end

@testitem "FastLevenbergMarquardt.jl + CMINPACK: Jacobian Provided" setup=[WrapperNLLSSetup] begin
function jac!(J, θ, p)
resid = zeros(length(p))
Expand Down
13 changes: 10 additions & 3 deletions test/wrappers/rootfind_tests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,9 @@
using Reexport
@reexport using LinearAlgebra
import NLSolvers, NLsolve, SIAMFANLEquations, MINPACK
import Optimization, OptimizationMOI, Ipopt

export NLSolvers
export NLSolvers, Ipopt
end

@testitem "Steady State Problems" setup=[WrapperRootfindImports] begin
Expand Down Expand Up @@ -48,7 +49,8 @@ end

for alg in [
NLSolversJL(NLSolvers.LineSearch(NLSolvers.Newton(), NLSolvers.Backtracking())),
NLsolveJL(), CMINPACK(), SIAMFANLEquationsJL()]
NLsolveJL(), CMINPACK(), SIAMFANLEquationsJL(),
OptimizationJL(Ipopt.Optimizer(), AutoForwardDiff())]
local sol
sol = solve(prob_iip, alg)
@test SciMLBase.successful_retcode(sol.retcode)
Expand All @@ -61,7 +63,8 @@ end
prob_oop = NonlinearProblem{false}(f_oop, u0)
for alg in [
NLSolversJL(NLSolvers.LineSearch(NLSolvers.Newton(), NLSolvers.Backtracking())),
NLsolveJL(), CMINPACK(), SIAMFANLEquationsJL()]
NLsolveJL(), CMINPACK(), SIAMFANLEquationsJL(),
OptimizationJL(Ipopt.Optimizer(), AutoForwardDiff())]
local sol
sol = solve(prob_oop, alg)
@test SciMLBase.successful_retcode(sol.retcode)
Expand Down Expand Up @@ -128,4 +131,8 @@ end
@test maximum(abs, sol.resid) < 1e-6
sol = solve(ProbN, SIAMFANLEquationsJL(; method = :pseudotransient); abstol = 1e-8)
@test maximum(abs, sol.resid) < 1e-6
sol = solve(ProbN, OptimizationJL(Ipopt.Optimizer(), AutoForwardDiff()); abstol = 1e-8)
@test maximum(abs, sol.resid) < 1e-6
sol = solve(ProbN, OptimizationJL(Ipopt.Optimizer(), AutoFiniteDiff()); abstol = 1e-8)
@test maximum(abs, sol.resid) < 1e-6
end

0 comments on commit 6acf77a

Please sign in to comment.