Skip to content

Commit

Permalink
Merge branch 'master' into Vaibhavdixit02-patch-4
Browse files Browse the repository at this point in the history
  • Loading branch information
Vaibhavdixit02 committed Dec 27, 2023
2 parents 7198ab9 + f800cf4 commit 43abb68
Show file tree
Hide file tree
Showing 37 changed files with 669 additions and 656 deletions.
53 changes: 53 additions & 0 deletions .github/workflows/Downgrade.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
name: Downgrade
on:
pull_request:
branches:
- master
paths-ignore:
- 'docs/**'
push:
branches:
- master
paths-ignore:
- 'docs/**'
jobs:
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
version: ['1']
group:
- Core
- OptimizationBBO
- OptimizationCMAEvolutionStrategy
- OptimizationEvolutionary
- OptimizationFlux
- OptimizationGCMAES
- OptimizationMetaheuristics
- OptimizationMOI
- OptimizationMultistartOptimization
- OptimizationNLopt
#- OptimizationNonconvex
- OptimizationNOMAD
- OptimizationOptimJL
- OptimizationOptimisers
- OptimizationPRIMA
- OptimizationQuadDIRECT
- OptimizationSpeedMapping
- OptimizationPolyalgorithms
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
with:
version: ${{ matrix.version }}
- uses: cjdoris/julia-downgrade-compat-action@v1
# if: ${{ matrix.version == '1.6' }}
with:
skip: Pkg,TOML
- uses: julia-actions/julia-buildpkg@v1
- if: ${{ matrix.group == 'OptimizationQuadDIRECT' }}
run: julia --project -e 'using Pkg; Pkg.Registry.add(RegistrySpec(url = "https://github.com/HolyLab/HolyLabRegistry.git")); Pkg.add("QuadDIRECT")'
- uses: julia-actions/julia-runtest@v1
env:
GROUP: ${{ matrix.group }}
42 changes: 20 additions & 22 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
ProgressLogging = "33c8b6b6-d38a-422a-b730-caa89a2f386c"
Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
SciMLBase = "0bca4576-84f4-4d90-8ffe-ffa030f20462"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
SymbolicIndexingInterface = "2efcf032-c050-4f8e-a9bb-153293bab1f5"
Expand Down Expand Up @@ -42,31 +41,30 @@ OptimizationTrackerExt = "Tracker"
OptimizationZygoteExt = "Zygote"

[compat]
ADTypes = "0.1.5, 0.2"
ArrayInterface = "6, 7"
ConsoleProgressMonitor = "0.1"
DocStringExtensions = "0.8, 0.9"
Enzyme = "0.11.2"
FiniteDiff = "2"
ForwardDiff = "0.10"
LinearAlgebra = "1"
Logging = "1"
LoggingExtras = "0.4, 0.5, 1"
ModelingToolkit = "8"
ADTypes = "0.2.5"
ArrayInterface = "7.6"
ConsoleProgressMonitor = "0.1.1"
DocStringExtensions = "0.9"
Enzyme = "0.11.9"
FiniteDiff = "2.12"
ForwardDiff = "0.10.26"
LinearAlgebra = "1.9"
Logging = "1.9"
LoggingExtras = "0.4"
ModelingToolkit = "8.74"
Pkg = "1"
Printf = "1"
Printf = "1.9"
ProgressLogging = "0.1"
Reexport = "0.2, 1.0"
Requires = "1.0"
ReverseDiff = "1"
SciMLBase = "1.79.0, 2"
SparseArrays = "1"
SparseDiffTools = "2"
Reexport = "1.2"
ReverseDiff = "1.14"
SciMLBase = "2.11"
SparseArrays = "1.9"
SparseDiffTools = "2.14"
SymbolicIndexingInterface = "0.3"
Symbolics = "5"
Symbolics = "5.12"
TerminalLoggers = "0.1"
Tracker = "0.2"
Zygote = "0.6"
Tracker = "0.2.29"
Zygote = "0.6.67"
julia = "1.9"

[extras]
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
[![Global Docs](https://img.shields.io/badge/docs-SciML-blue.svg)](https://docs.sciml.ai/Optimization/stable/)

[![codecov](https://codecov.io/gh/SciML/Optimization.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/SciML/Optimization.jl)
[![Build Status](https://github.com/SciML/Optimization.jl/workflows/CI/badge.svg)](https://github.com/SciML/Optimization.jl/actions?query=workflow%3ACI)
[![Build Status](https://github.com/SciML/Optimization.jl/workflows/CI/badge.svg)](https://github.com/SciML/Optimization.jl/actions/workflows/CI.yml?query=branch%3Amaster++)

[![ColPrac: Contributor's Guide on Collaborative Practices for Community Packages](https://img.shields.io/badge/ColPrac-Contributor%27s%20Guide-blueviolet)](https://github.com/SciML/ColPrac)
[![SciML Code Style](https://img.shields.io/static/v1?label=code%20style&message=SciML&color=9558b2&labelColor=389826)](https://github.com/SciML/SciMLStyle)
Expand Down
1 change: 1 addition & 0 deletions docs/src/API/optimization_function.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ The choices for the auto-AD fill-ins with quick descriptions are:
- `AutoFiniteDiff()`: Finite differencing, not optimal but always applicable
- `AutoModelingToolkit()`: The fastest choice for large scalar optimizations
- `AutoEnzyme()`: Highly performant AD choice for type stable and optimized code

## Automatic Differentiation Choice API

The following sections describe the Auto-AD choices in detail.
Expand Down
3 changes: 2 additions & 1 deletion docs/src/optimization_packages/polyopt.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# OptimizationPolyalgorithms.jl

OptimizationPolyalgorithms.jl is a package for collecting polyalgorithms formed by fusing popular optimization solvers of different characteristics.
OptimizationPolyalgorithms.jl is a package for collecting polyalgorithms formed by fusing popular optimization solvers of different characteristics.

## Installation: OptimizationPolyalgorithms

Expand All @@ -10,6 +10,7 @@ To use this package, install the OptimizationPolyalgorithms package:
import Pkg;
Pkg.add("OptimizationPolyalgorithms");
```

## Algorithms

Right now we support the following polyalgorithms.
Expand Down
10 changes: 5 additions & 5 deletions ext/OptimizationEnzymeExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ isdefined(Base, :get_extension) ? (using Enzyme) : (using ..Enzyme)
end

function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
adtype::AutoEnzyme, p,
num_cons = 0)
adtype::AutoEnzyme, p,
num_cons = 0)
if f.grad === nothing
grad = let
function (res, θ, args...)
Expand Down Expand Up @@ -154,9 +154,9 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
end

function Optimization.instantiate_function(f::OptimizationFunction{true},
cache::Optimization.ReInitCache,
adtype::AutoEnzyme,
num_cons = 0)
cache::Optimization.ReInitCache,
adtype::AutoEnzyme,
num_cons = 0)
p = cache.p

if f.grad === nothing
Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationFiniteDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ isdefined(Base, :get_extension) ? (using FiniteDiff) : (using ..FiniteDiff)
const FD = FiniteDiff

function Optimization.instantiate_function(f, x, adtype::AutoFiniteDiff, p,
num_cons = 0)
num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, p, args...))
updatecache = (cache, x) -> (cache.xmm .= x; cache.xmp .= x; cache.xpm .= x; cache.xpp .= x; return cache)

Expand Down Expand Up @@ -117,7 +117,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoFiniteDiff, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoFiniteDiff, num_cons = 0)
adtype::AutoFiniteDiff, num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))
updatecache = (cache, x) -> (cache.xmm .= x; cache.xmp .= x; cache.xpm .= x; cache.xpp .= x; return cache)

Expand Down
10 changes: 5 additions & 5 deletions ext/OptimizationForwardDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ function default_chunk_size(len)
end

function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
adtype::AutoForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
adtype::AutoForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
chunksize = _chunksize === nothing ? default_chunk_size(length(x)) : _chunksize

_f = (θ, args...) -> first(f.f(θ, p, args...))
Expand Down Expand Up @@ -90,9 +90,9 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
end

function Optimization.instantiate_function(f::OptimizationFunction{true},
cache::Optimization.ReInitCache,
adtype::AutoForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
cache::Optimization.ReInitCache,
adtype::AutoForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
chunksize = _chunksize === nothing ? default_chunk_size(length(cache.u0)) : _chunksize

_f = (θ, args...) -> first(f.f(θ, cache.p, args...))
Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationMTKExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import Optimization.ADTypes: AutoModelingToolkit
isdefined(Base, :get_extension) ? (using ModelingToolkit) : (using ..ModelingToolkit)

function Optimization.instantiate_function(f, x, adtype::AutoModelingToolkit, p,
num_cons = 0)
num_cons = 0)
p = isnothing(p) ? SciMLBase.NullParameters() : p

sys = ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, x, p;
Expand Down Expand Up @@ -52,7 +52,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoModelingToolkit, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoModelingToolkit, num_cons = 0)
adtype::AutoModelingToolkit, num_cons = 0)
p = isnothing(cache.p) ? SciMLBase.NullParameters() : cache.p

sys = ModelingToolkit.modelingtoolkitize(OptimizationProblem(f, cache.u0, cache.p;
Expand Down
6 changes: 3 additions & 3 deletions ext/OptimizationReverseDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@ function default_chunk_size(len)
end

function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
p = SciMLBase.NullParameters(),
num_cons = 0)
p = SciMLBase.NullParameters(),
num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, p, args...))

chunksize = default_chunk_size(length(x))
Expand Down Expand Up @@ -151,7 +151,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoReverseDiff,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoReverseDiff, num_cons = 0)
adtype::AutoReverseDiff, num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))

chunksize = default_chunk_size(length(cache.u0))
Expand Down
20 changes: 10 additions & 10 deletions ext/OptimizationSparseDiffExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ function default_chunk_size(len)
end

function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
adtype::AutoSparseForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
adtype::AutoSparseForwardDiff{_chunksize}, p,
num_cons = 0) where {_chunksize}
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -127,9 +127,9 @@ function Optimization.instantiate_function(f::OptimizationFunction{true}, x,
end

function Optimization.instantiate_function(f::OptimizationFunction{true},
cache::Optimization.ReInitCache,
adtype::AutoSparseForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
cache::Optimization.ReInitCache,
adtype::AutoSparseForwardDiff{_chunksize},
num_cons = 0) where {_chunksize}
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -237,7 +237,7 @@ end
const FD = FiniteDiff

function Optimization.instantiate_function(f, x, adtype::AutoSparseFiniteDiff, p,
num_cons = 0)
num_cons = 0)
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -362,7 +362,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseFiniteDiff, p
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoSparseFiniteDiff, num_cons = 0)
adtype::AutoSparseFiniteDiff, num_cons = 0)
if maximum(getfield.(methods(f.f), :nargs)) > 3
error("$(string(adtype)) with SparseDiffTools does not support functions with more than 2 arguments")
end
Expand Down Expand Up @@ -493,8 +493,8 @@ end
struct OptimizationSparseReverseTag end

function Optimization.instantiate_function(f, x, adtype::AutoSparseReverseDiff,
p = SciMLBase.NullParameters(),
num_cons = 0)
p = SciMLBase.NullParameters(),
num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, p, args...))

chunksize = default_chunk_size(length(x))
Expand Down Expand Up @@ -668,7 +668,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoSparseReverseDiff,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoSparseReverseDiff, num_cons = 0)
adtype::AutoSparseReverseDiff, num_cons = 0)
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))

chunksize = default_chunk_size(length(cache.u0))
Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationTrackerExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import Optimization.ADTypes: AutoTracker
isdefined(Base, :get_extension) ? (using Tracker) : (using ..Tracker)

function Optimization.instantiate_function(f, x, adtype::AutoTracker, p,
num_cons = 0)
num_cons = 0)
num_cons != 0 && error("AutoTracker does not currently support constraints")
_f = (θ, args...) -> first(f.f(θ, p, args...))

Expand Down Expand Up @@ -37,7 +37,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoTracker, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoTracker, num_cons = 0)
adtype::AutoTracker, num_cons = 0)
num_cons != 0 && error("AutoTracker does not currently support constraints")
_f = (θ, args...) -> first(f.f(θ, cache.p, args...))

Expand Down
4 changes: 2 additions & 2 deletions ext/OptimizationZygoteExt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ isdefined(Base, :get_extension) ? (using Zygote, Zygote.ForwardDiff) :
(using ..Zygote, ..Zygote.ForwardDiff)

function Optimization.instantiate_function(f, x, adtype::AutoZygote, p,
num_cons = 0)
num_cons = 0)
_f = (θ, args...) -> f(θ, p, args...)[1]
if f.grad === nothing
grad = function (res, θ, args...)
Expand Down Expand Up @@ -84,7 +84,7 @@ function Optimization.instantiate_function(f, x, adtype::AutoZygote, p,
end

function Optimization.instantiate_function(f, cache::Optimization.ReInitCache,
adtype::AutoZygote, num_cons = 0)
adtype::AutoZygote, num_cons = 0)
_f = (θ, args...) -> f(θ, cache.p, args...)[1]
if f.grad === nothing
grad = function (res, θ, args...)
Expand Down
Loading

0 comments on commit 43abb68

Please sign in to comment.