Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update #711 checking if OptimizationFunction is used for derivative based optimizers #711 #715

Merged
merged 33 commits into from
Apr 7, 2024
Merged
Show file tree
Hide file tree
Changes from 23 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
01f4479
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 5, 2024
404f4f3
Merge pull request #1 from ParasPuneetSingh/ParasPuneetSingh-patch-1
ParasPuneetSingh Mar 5, 2024
b2c044d
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 9, 2024
ef4e4a5
Update OptimizationBBO.jl
ParasPuneetSingh Mar 9, 2024
ed73535
Update OptimizationCMAEvolutionStrategy.jl
ParasPuneetSingh Mar 9, 2024
134d7fc
Update OptimizationEvolutionary.jl
ParasPuneetSingh Mar 9, 2024
702e095
Update OptimizationFlux.jl
ParasPuneetSingh Mar 9, 2024
6e2afe3
Update OptimizationGCMAES.jl
ParasPuneetSingh Mar 9, 2024
d1c0111
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 12, 2024
1ba53fb
Update OptimizationNLopt.jl
ParasPuneetSingh Mar 12, 2024
1767142
Update OptimizationOptimisers.jl
ParasPuneetSingh Mar 12, 2024
da5955b
Update OptimizationPRIMA.jl
ParasPuneetSingh Mar 13, 2024
64e16ec
Update OptimizationPolyalgorithms.jl
ParasPuneetSingh Mar 13, 2024
3ae278f
Update OptimizationSpeedMapping.jl
ParasPuneetSingh Mar 13, 2024
4887714
Update OptimizationBBO.jl
ParasPuneetSingh Mar 13, 2024
a1348e3
Update OptimizationNLopt.jl
ParasPuneetSingh Mar 13, 2024
0b6f600
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 13, 2024
6ea74cf
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 13, 2024
5132aea
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 13, 2024
785c218
Update OptimizationMOI.jl
ParasPuneetSingh Mar 13, 2024
8f3e381
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 26, 2024
a5652d7
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 26, 2024
6379379
Update OptimizationOptimJL.jl
ParasPuneetSingh Mar 26, 2024
6fb808b
Update lib/OptimizationOptimJL/src/OptimizationOptimJL.jl
Vaibhavdixit02 Apr 1, 2024
ea6d522
Update OptimizationNLopt.jl
ParasPuneetSingh Apr 3, 2024
73cd120
Update OptimizationOptimJL.jl
ParasPuneetSingh Apr 3, 2024
bae6741
Update OptimizationOptimJL.jl
ParasPuneetSingh Apr 3, 2024
b9854f5
Update OptimizationOptimJL.jl
ParasPuneetSingh Apr 3, 2024
15a5fa0
Update OptimizationPolyalgorithms.jl
ParasPuneetSingh Apr 4, 2024
0857a62
Update Project.toml
ParasPuneetSingh Apr 4, 2024
bb82850
Update Project.toml
ParasPuneetSingh Apr 5, 2024
2d6b7c4
Merge branch 'master' into master
Vaibhavdixit02 Apr 7, 2024
e0610cd
Update Project.toml
Vaibhavdixit02 Apr 7, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions lib/OptimizationBBO/src/OptimizationBBO.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ SciMLBase.requiresbounds(::BBO) = true
SciMLBase.allowsbounds(::BBO) = true
SciMLBase.supports_opt_cache_interface(opt::BBO) = true



for j in string.(BlackBoxOptim.SingleObjectiveMethodNames)
eval(Meta.parse("Base.@kwdef struct BBO_" * j * " <: BBO method=:" * j * " end"))
eval(Meta.parse("export BBO_" * j))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ struct CMAEvolutionStrategyOpt end

SciMLBase.allowsbounds(::CMAEvolutionStrategyOpt) = true
SciMLBase.supports_opt_cache_interface(opt::CMAEvolutionStrategyOpt) = true
SciMLBase.requiresgradient(::CMAEvolutionStrategyOpt) = false
SciMLBase.requireshessian(::CMAEvolutionStrategyOpt) = false
SciMLBase.requiresconsjac(::CMAEvolutionStrategyOpt) = false
SciMLBase.requiresconshess(::CMAEvolutionStrategyOpt) = false

function __map_optimizer_args(prob::OptimizationCache, opt::CMAEvolutionStrategyOpt;
callback = nothing,
Expand Down
4 changes: 4 additions & 0 deletions lib/OptimizationEvolutionary/src/OptimizationEvolutionary.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ using Optimization.SciMLBase
SciMLBase.allowsbounds(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.allowsconstraints(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.supports_opt_cache_interface(opt::Evolutionary.AbstractOptimizer) = true
SciMLBase.requiresgradient(opt::Evolutionary.AbstractOptimizer) = false
SciMLBase.requireshessian(opt::Evolutionary.AbstractOptimizer) = false
SciMLBase.requiresconsjac(opt::Evolutionary.AbstractOptimizer) = false
SciMLBase.requiresconshess(opt::Evolutionary.AbstractOptimizer) = false

decompose_trace(trace::Evolutionary.OptimizationTrace) = last(trace)
decompose_trace(trace::Evolutionary.OptimizationTraceRecord) = trace
Expand Down
4 changes: 4 additions & 0 deletions lib/OptimizationFlux/src/OptimizationFlux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ using Reexport, Printf, ProgressLogging
using Optimization.SciMLBase

SciMLBase.supports_opt_cache_interface(opt::Flux.Optimise.AbstractOptimiser) = true
SciMLBase.requiresgradient(opt::Flux.Optimise.AbstractOptimiser) = true
SciMLBase.requireshessian(opt::Flux.Optimise.AbstractOptimiser) = false
SciMLBase.requiresconsjac(opt::Flux.Optimise.AbstractOptimiser) = false
SciMLBase.requiresconshess(opt::Flux.Optimise.AbstractOptimiser) = false

function SciMLBase.__init(prob::SciMLBase.OptimizationProblem,
opt::Flux.Optimise.AbstractOptimiser,
Expand Down
5 changes: 5 additions & 0 deletions lib/OptimizationGCMAES/src/OptimizationGCMAES.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@ SciMLBase.requiresbounds(::GCMAESOpt) = true
SciMLBase.allowsbounds(::GCMAESOpt) = true
SciMLBase.allowscallback(::GCMAESOpt) = false
SciMLBase.supports_opt_cache_interface(opt::GCMAESOpt) = true
SciMLBase.requiresgradient(::GCMAESOpt) = true
SciMLBase.requireshessian(::GCMAESOpt) = false
SciMLBase.requiresconsjac(::GCMAESOpt) = false
SciMLBase.requiresconshess(::GCMAESOpt) = false


function __map_optimizer_args(cache::OptimizationCache, opt::GCMAESOpt;
callback = nothing,
Expand Down
5 changes: 5 additions & 0 deletions lib/OptimizationMOI/src/OptimizationMOI.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@ const MOI = MathOptInterface

const DenseOrSparse{T} = Union{Matrix{T}, SparseMatrixCSC{T}}

SciMLBase.requiresgradient(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true
SciMLBase.requireshessian(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true
SciMLBase.requiresconsjac(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true
SciMLBase.requiresconshess(opt::Union{MOI.AbstractOptimizer,MOI.OptimizerWithAttributes}) = true

function SciMLBase.allowsbounds(opt::Union{MOI.AbstractOptimizer,
MOI.OptimizerWithAttributes})
true
Expand Down
33 changes: 33 additions & 0 deletions lib/OptimizationNLopt/src/OptimizationNLopt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,39 @@ using Optimization.SciMLBase
SciMLBase.allowsbounds(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true
SciMLBase.supports_opt_cache_interface(opt::Union{NLopt.Algorithm, NLopt.Opt}) = true

function SciMLBase.requiresgradient(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if str_opt[2] == "D"
return true
else
return false
end

function SciMLBase.requireshessian(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if (str_opt[2] == "D" && str_opt[4] == "N")
return true
else
return false
end

function SciMLBase.requireshessian(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if str_opt[2] == "D" && str_opt[4] == "N"
return true
else
return false
end
function SciMLBase.requiresconsjac(opt::NLopt.Algorithm) #https://github.com/JuliaOpt/NLopt.jl/blob/master/src/NLopt.jl#L18C7-L18C16
str_opt = string(opt)
if str_opt[3] == "O" || str_opt[3] == "I" || str_opt[5] == "G"
return true
else
return false
end



function __map_optimizer_args!(cache::OptimizationCache, opt::NLopt.Opt;
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
Expand Down
9 changes: 8 additions & 1 deletion lib/OptimizationOptimJL/src/OptimizationOptimJL.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,13 @@ SciMLBase.requiresbounds(opt::Optim.SAMIN) = true
SciMLBase.supports_opt_cache_interface(opt::Optim.AbstractOptimizer) = true
SciMLBase.supports_opt_cache_interface(opt::Union{Optim.Fminbox, Optim.SAMIN}) = true
SciMLBase.supports_opt_cache_interface(opt::Optim.ConstrainedOptimizer) = true
SciMLBase.requiresgradient(Optim.AbstractOptimizer) = !(opt isa Optim.ZerothOrderOptimizer)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looks good, but this wrapper will need more of the traits set to true (Newton, NewtonTrustRegion and IPNewton)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

And not all of them follow the abstract type, especially check the SAMIN and FminBox algorithms

Vaibhavdixit02 marked this conversation as resolved.
Show resolved Hide resolved
SciMLBase.requiresgradient(::IPNewton) = true
SciMLBase.requireshessian(::IPNewton) = true
SciMLBase.requiresconsjac(::IPNewton) = true
SciMLBase.requireshessian(opt::Optim.NewtonTrustRegion) = true
SciMLBase.requireshessian(opt::Optim.Newton) = true
SciMLBase.requiresgradient(opt::Optim.Fminbox) = true

function __map_optimizer_args(cache::OptimizationCache,
opt::Union{Optim.AbstractOptimizer, Optim.Fminbox,
Expand Down Expand Up @@ -128,7 +135,7 @@ function SciMLBase.__solve(cache::OptimizationCache{
local x, cur, state

cur, state = iterate(cache.data)

-
!(cache.opt isa Optim.ZerothOrderOptimizer) && cache.f.grad === nothing &&
error("Use OptimizationFunction to pass the derivatives or automatically generate them with one of the autodiff backends")

Expand Down
1 change: 1 addition & 0 deletions lib/OptimizationOptimisers/src/OptimizationOptimisers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ using Reexport, Printf, ProgressLogging
using Optimization.SciMLBase

SciMLBase.supports_opt_cache_interface(opt::AbstractRule) = true
SciMLBase.requiresgradient(opt::AbstractRule) = true
include("sophia.jl")

function SciMLBase.__init(prob::SciMLBase.OptimizationProblem, opt::AbstractRule,
Expand Down
3 changes: 3 additions & 0 deletions lib/OptimizationPRIMA/src/OptimizationPRIMA.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ SciMLBase.supports_opt_cache_interface(::PRIMASolvers) = true
SciMLBase.allowsconstraints(::Union{LINCOA, COBYLA}) = true
SciMLBase.allowsbounds(opt::Union{BOBYQA, LINCOA, COBYLA}) = true
SciMLBase.requiresconstraints(opt::COBYLA) = true
SciMLBase.requiresgradient(opt::Union{BOBYQA, LINCOA, COBYLA}) = true
SciMLBase.requiresconsjac(opt::Union{LINCOA, COBYLA}) = true


function Optimization.OptimizationCache(prob::SciMLBase.OptimizationProblem,
opt::PRIMASolvers, data;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ using Reexport
@reexport using Optimization
using Optimization.SciMLBase, OptimizationOptimJL, OptimizationOptimisers

SciMLBase.requiresgradient(opt::PolyOpt) = true

struct PolyOpt end
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PolyOpt is defined after the trait call switch around the order


function SciMLBase.__solve(prob::OptimizationProblem,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ struct SpeedMappingOpt end
SciMLBase.allowsbounds(::SpeedMappingOpt) = true
SciMLBase.allowscallback(::SpeedMappingOpt) = false
SciMLBase.supports_opt_cache_interface(opt::SpeedMappingOpt) = true
SciMLBase.requiresgradient(opt::SpeedMappingOpt) = true

function __map_optimizer_args(cache::OptimizationCache, opt::SpeedMappingOpt;
callback = nothing,
Expand Down
Loading