From 18c3b60f6e876739a375c601aaeabd7956125a28 Mon Sep 17 00:00:00 2001 From: Sungho Shin Date: Sun, 30 Jul 2023 16:15:47 -0500 Subject: [PATCH] documentation page added --- .github/workflows/docs.yml | 24 ++++ README.md | 2 +- docs/make.jl | 20 +-- docs/src/algorithms.md | 0 docs/src/core.md | 4 +- docs/src/guide.jl | 41 +++--- docs/src/guide.md | 288 ++++++------------------------------- docs/src/models.md | 4 - docs/src/moi.md | 4 - docs/src/special.md | 4 - docs/src/tutorial.jl | 2 - docs/src/tutorial.md | 10 -- 12 files changed, 99 insertions(+), 304 deletions(-) create mode 100644 .github/workflows/docs.yml delete mode 100644 docs/src/algorithms.md delete mode 100644 docs/src/models.md delete mode 100644 docs/src/moi.md delete mode 100644 docs/src/special.md delete mode 100644 docs/src/tutorial.jl delete mode 100644 docs/src/tutorial.md diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..a8bb17e --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,24 @@ +name: Documentation + +on: + push: + branches: + - master # update to match your development branch (master, main, dev, trunk, ...) + tags: '*' + pull_request: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@latest + with: + version: '1.9' + - name: Install dependencies + run: julia --project=docs/ docs/install.jl + - name: Build and deploy + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # If authenticating with GitHub Actions token + DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} # If authenticating with SSH deploy key + run: julia --project=docs/ docs/make.jl diff --git a/README.md b/README.md index 7ef847e..7fbe6c9 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ | **License** | **Documentation** | **Build Status** | **Coverage** | **Citation** | |:-----------------:|:----------------:|:----------------:|:----------------:|:----------------:| -| [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) | [![doc](https://img.shields.io/badge/docs-dev-blue.svg)](https://sshin23.github.io/SIMDiff.jl/dev) | [![build](https://github.com/sshin23/SIMDiff.jl/actions/workflows/test.yml/badge.svg)](https://github.com/sshin23/SIMDiff.jl/actions/workflows/test.yml) | [![codecov](https://codecov.io/gh/sshin23/SIMDiff.jl/branch/main/graph/badge.svg?token=8ViJWBWnZt)](https://codecov.io/gh/sshin23/SIMDiff.jl) | +| [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) | [![doc](https://img.shields.io/badge/docs-dev-blue.svg)](https://github.com/sshin23/SIMDiff.jl/dev) | [![build](https://github.com/sshin23/SIMDiff.jl/actions/workflows/test.yml/badge.svg)](https://github.com/sshin23/SIMDiff.jl/actions/workflows/test.yml) | [![codecov](https://codecov.io/gh/sshin23/SIMDiff.jl/branch/main/graph/badge.svg?token=8ViJWBWnZt)](https://codecov.io/gh/sshin23/SIMDiff.jl) | ## Introduction SIMDiff.jl employs what we call **SIMD abstraction for nonlinear programs** (NLPs), which allows for the **preservation of the parallelizable structure** within the model equations, facilitating **efficient, parallel derivative evaluations** on the **GPU**. diff --git a/docs/make.jl b/docs/make.jl index 1f8aac8..f28c8ee 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,20 +1,14 @@ -using Documenter, MadDiff, Literate +using Documenter, SIMDiff, Literate const _PAGES = [ "Introduction" => "index.md", "Quick Start"=>"guide.md", - "How it Works" => "tutorial.md", - "API Manual" => [ - "MadDiffCore" => "core.md", - "MadDiffSpecialFunctions" => "special.md", - "MadDiffModels" => "models.md", - "MadDiffMOI" => "moi.md", - ] + "API Manual" => "core.md", ] const _JL_FILENAMES = [ "guide.jl", - "tutorial.jl" + # "tutorial.jl" ] for jl_filename in _JL_FILENAMES @@ -30,15 +24,15 @@ end makedocs( - sitename = "MadDiff", + sitename = "SIMDiff.jl", authors = "Sungho Shin", format = Documenter.LaTeX(platform="docker"), pages = _PAGES ) makedocs( - sitename = "MadDiff", - modules = [MadDiff], + sitename = "SIMDiff.jl", + modules = [SIMDiff], authors = "Sungho Shin", format = Documenter.HTML( prettyurls = get(ENV, "CI", nothing) == "true", @@ -51,6 +45,6 @@ makedocs( deploydocs( - repo = "github.com/sshin23/MadDiff.jl.git" + repo = "github.com/sshin23/SIMDiff.jl.git" ) diff --git a/docs/src/algorithms.md b/docs/src/algorithms.md deleted file mode 100644 index e69de29..0000000 diff --git a/docs/src/core.md b/docs/src/core.md index 6911b5e..c96bd1e 100644 --- a/docs/src/core.md +++ b/docs/src/core.md @@ -1,4 +1,4 @@ -# MadDiffCore +# SIMDiff ```@autodocs -Modules = [MadDiffCore] +Modules = [SIMDiff] ``` diff --git a/docs/src/guide.jl b/docs/src/guide.jl index d6a7f08..2cc8a89 100644 --- a/docs/src/guide.jl +++ b/docs/src/guide.jl @@ -1,5 +1,5 @@ # # Getting Started -# SIMDiff provides a built-in API for creating nonlinear prgogramming models and allows solving the created models using NLP solvers (in particular, those that are interfaced with `NLPModels`, such as [NLPModelsIpopt](https://github.com/JuliaSmoothOptimizers/NLPModelsIpopt.jl). We now use `SIMDiff`'s bulit-in API to model the following nonlinear program: +# SIMDiff can create nonlinear prgogramming models and allows solving the created models using NLP solvers (in particular, those that are interfaced with `NLPModels`, such as [NLPModelsIpopt](https://github.com/JuliaSmoothOptimizers/NLPModelsIpopt.jl). We now use `SIMDiff` to model the following nonlinear program: # ```math # \begin{aligned} # \min_{\{x_i\}_{i=0}^N} &\sum_{i=2}^N 100(x_{i-1}^2-x_i)^2+(x_{i-1}-1)^2\\ @@ -7,40 +7,35 @@ # \end{aligned} # ``` # We model the problem with: +using SIMDiff + +# We set N = 10000 -# First, we create a `SIMDiffModel`. -m = SIMDiffModel() +# First, we create a `SIMDiff.Core`. +c = SIMDiff.Core() # The variables can be created as follows: -x = [variable(m; start = mod(i,2)==1 ? -1.2 : 1.) for i=1:N]; + +x = SIMDiff.variable( + c, N; + start = (mod(i,2)==1 ? -1.2 : 1. for i=1:N) +) # The objective can be set as follows: -objective(m, sum(100(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i=2:N)); +SIMDiff.objective(c, 100*(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i in 2:N) # The constraints can be set as follows: -for i=1:N-2 - constraint(m, 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 == 0); -end +SIMDiff.constraint( + c, + 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 + for i in 1:N-2) -# The important last step is instantiating the model. This step must be taken before calling optimizers. -instantiate!(m) +# Finally, we create an NLPModel. +m = SIMDiff.Model(c) # To solve the problem with `Ipopt`, using NLPModelsIpopt sol = ipopt(m); # The solution `sol` contains the field `sol.solution` holding the optimized parameters. - -# ### SIMDiff as an AD backend of JuMP -# SIMDiff can be used as an automatic differentiation backend of JuMP. The problem above can be modeled in `JuMP` and solved with `Ipopt` along with `SIMDiff` - -using JuMP, Ipopt - -m = JuMP.Model(Ipopt.Optimizer) - -@variable(m, x[i=1:N], start=mod(i,2)==1 ? -1.2 : 1.) -@NLobjective(m, Min, sum(100(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i=2:N)) -@NLconstraint(m, [i=1:N-2], 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 == 0) - -optimize!(m; differentiation_backend = SIMDiffAD()) diff --git a/docs/src/guide.md b/docs/src/guide.md index 1f6bfd7..f230bbc 100644 --- a/docs/src/guide.md +++ b/docs/src/guide.md @@ -3,211 +3,83 @@ EditURL = "/src/guide.jl" ``` # Getting Started -## Automatic Differentiation -`MadDiff` provides a flexible user-interface for evaluating first/second-order derivatives of nonlinear expressions. In the following example, using `MadDiff`, we will create a function, gradient, and Hessian evaluator of the following function: +SIMDiff can create nonlinear prgogramming models and allows solving the created models using NLP solvers (in particular, those that are interfaced with `NLPModels`, such as [NLPModelsIpopt](https://github.com/JuliaSmoothOptimizers/NLPModelsIpopt.jl). We now use `SIMDiff` to model the following nonlinear program: ```math -f(x) = x_1^2 + e^{(x_2^{p_1})/2} + \log(x_2x_3+p_2), +\begin{aligned} +\min_{\{x_i\}_{i=0}^N} &\sum_{i=2}^N 100(x_{i-1}^2-x_i)^2+(x_{i-1}-1)^2\\ +\text{s.t.} & 3x_{i+1}^3+2x_{i+2}-5+\sin(x_{i+1}-x_{i+2})\sin(x_{i+1}+x_{i+2})+4x_{i+1}-x_i e^{x_i-x_{i+1}}-3 = 0 +\end{aligned} ``` -where ``x`` is the variable vector, and ``p`` is the parameter vector. - -We first import `MadDiff`. - -````julia -using MadDiff -```` - -First, we create a `Source` of `Variable`'s. - -````julia -x = Variable() -```` - -```` -x -```` - -The `Base.getindex!` function is extended so that `x[i]` for any `i` creates an expression for ``x_i``. For example, - -````julia -x[2] -```` - -```` -x[2] -```` - -We can do a similar thing for `Parameter`'s. - -````julia -p = Parameter() -p[1] -```` - -```` -p[1] -```` - -Now, we create the nonlienar expression expression. - -````julia -expr = x[1]^2 + exp(x[2]^p[1])/2 + log(x[2]*x[3]+p[2]) -```` - -```` -x[1]^2 + exp(x[2]^p[1])/2 + log(x[2]*x[3] + p[2]) -```` - -The function evaluator of the above expression can be created by using `MadDiff.function_evaluator` as follows: - -````julia -f = Evaluator(expr) -```` - -```` -Evaluator: -x[1]^2 + exp(x[2]^p[1])/2 + log(x[2]*x[3] + p[2]) -```` - -Now for a given variable and parameter values, the function can be evaluated as follows. - -````julia -x0 = [0.,0.5,1.5] -p0 = [2,0.5] -f(x0,p0) -```` - -```` -0.8651562596580804 -```` - -The gradient evaluator can be created as follows: - -````julia -y0 = similar(x0) -g = GradientEvaluator(expr) -g(y0,x0,p0) -y0 -```` - -```` -3-element Vector{Float64}: - 0.0 - 1.8420127083438709 - 0.4 -```` - -The Hessian evaluator can be created as follows: - -````julia -z0 = zeros(3,3) -h = HessianEvaluator(expr) -h(z0,x0,p0) -z0 -```` - -```` -3×3 Matrix{Float64}: - 2.0 0.0 0.0 - 0.0 0.486038 0.0 - 0.0 0.32 -0.16 -```` - -Note that only lower-triangular entries are evaluated. - -The evaluator can be constructed in a sparse format: +We model the problem with: ````julia -sh = SparseHessianEvaluator(expr); +using SIMDiff ```` -The sparse coordinates are: +We set ````julia -sh.sparsity +N = 10000 ```` ```` -4-element Vector{Tuple{Int64, Int64}}: - (1, 1) - (2, 2) - (3, 2) - (3, 3) +10000 ```` -The sparse Hessian can be evaluated as follows: +First, we create a `SIMDiff.Core`. ````julia -z1 = zeros(length(sh.sparsity)) -sh(z1,x0,p0) -z1 +c = SIMDiff.Core() ```` ```` -4-element Vector{Float64}: - 2.0 - 0.4860381250316117 - 0.31999999999999995 - -0.16000000000000003 +SIMDiff.Core{Float64, Vector{Float64}, Nothing}(SIMDiff.ObjectiveNull(), SIMDiff.ConstraintNull(), 0, 0, 0, 0, 0, 0, 0, 0, Float64[], Float64[], Float64[], Float64[], Float64[], Float64[], nothing) ```` -## Nonlinear Programming -### Built-in API -MadDiff provides a built-in API for creating nonlinear prgogramming models and allows solving the created models using NLP solvers (in particular, those that are interfaced with `NLPModels`, such as [NLPModelsIpopt](https://github.com/JuliaSmoothOptimizers/NLPModelsIpopt.jl) and [MadNLP](https://github.com/MadNLP/MadNLP.jl)). We now use `MadDiff`'s bulit-in API to model the following nonlinear program: -```math -\begin{aligned} -\min_{\{x_i\}_{i=0}^N} &\sum_{i=2}^N 100(x_{i-1}^2-x_i)^2+(x_{i-1}-1)^2\\ -\text{s.t.} & 3x_{i+1}^3+2x_{i+2}-5+\sin(x_{i+1}-x_{i+2})\sin(x_{i+1}+x_{i+2})+4x_{i+1}-x_i e^{x_i-x_{i+1}}-3 = 0 -\end{aligned} -``` -We model the problem with: +The variables can be created as follows: ````julia -N = 10000 +x = SIMDiff.variable( + c, N; + start = (mod(i,2)==1 ? -1.2 : 1. for i=1:N) +) ```` ```` -10000 +SIMDiff.Variable{Tuple{Int64}, Int64}((10000,), 0) ```` -First, we create a `MadDiffModel`. +The objective can be set as follows: ````julia -m = MadDiffModel() +SIMDiff.objective(c, 100*(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i in 2:N) ```` ```` -MadDiffModel{Float64} (not instantiated). - +SIMDiff.Objective{SIMDiff.ObjectiveNull, SIMDiff.Func{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}, Int64}}, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, Int64}}, SIMDiff.Compressor{Tuple{Int64, Int64, Int64}}, SIMDiff.Compressor{NTuple{4, Int64}}}, UnitRange{Int64}}(SIMDiff.ObjectiveNull(), SIMDiff.Func{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}, Int64}}, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, Int64}}, SIMDiff.Compressor{Tuple{Int64, Int64, Int64}}, SIMDiff.Compressor{NTuple{4, Int64}}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}, Int64}}, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, Int64}}(SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}, Int64}}(100, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}, Int64}(SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}(SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)), 2), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 0))), 2)), SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, Int64}(SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)), 1), 2)), SIMDiff.Compressor{Tuple{Int64, Int64, Int64}}((1, 2, 1)), SIMDiff.Compressor{NTuple{4, Int64}}((1, 2, 3, 1)), 0, 0, 0, 2, 3), 2:10000) ```` -The variables can be created as follows: +The constraints can be set as follows: ````julia -x = [variable(m; start = mod(i,2)==1 ? -1.2 : 1.) for i=1:N]; +SIMDiff.constraint( + c, + 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 + for i in 1:N-2) ```` -The objective can be set as follows: - -````julia -objective(m, sum(100(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i=2:N)); ```` - -The constraints can be set as follows: - -````julia -for i=1:N-2 - constraint(m, 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 == 0); -end +SIMDiff.Constraint{SIMDiff.ConstraintNull, SIMDiff.Func{SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node2{typeof(*), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, Int64}, SIMDiff.Compressor{NTuple{10, Int64}}, SIMDiff.Compressor{NTuple{17, Int64}}}, UnitRange{Int64}}(SIMDiff.ConstraintNull(), SIMDiff.Func{SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node2{typeof(*), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, Int64}, SIMDiff.Compressor{NTuple{10, Int64}}, SIMDiff.Compressor{NTuple{17, Int64}}}(SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node2{typeof(*), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, Int64}(SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node2{typeof(*), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}(SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}(SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}(3, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)), 3)), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}(2, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 2), 0)))), 5), SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}(SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}(SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 2), 0)))), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}(SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 2), 0)))))), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}(4, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)))), SIMDiff.Node2{typeof(*), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 0)), SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}(SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}(SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 0)), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}(SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}(SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}(SIMDiff.Par(), 1), 0)))))), 3), SIMDiff.Compressor{NTuple{10, Int64}}((1, 2, 1, 2, 1, 2, 1, 3, 3, 1)), SIMDiff.Compressor{NTuple{17, Int64}}((1, 1, 2, 3, 1, 2, 3, 1, 3, 4, 2, 5, 5, 1, 6, 5, 6)), 0, 0, 29997, 3, 6), 1:9998) ```` -The important last step is instantiating the model. This step must be taken before calling optimizers. +Finally, we create an NLPModel. ````julia -instantiate!(m) +m = SIMDiff.Model(c) ```` ```` -MadDiffModel{Float64} (instantiated). +SIMDiff.Model{Float64, Vector{Float64}, Nothing, SIMDiff.Objective{SIMDiff.ObjectiveNull, SIMDiff.Func{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}}, Int64}}, SIMDiff.Node2{typeof(^), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Par, Int64}, Int64}}, Int64}, Int64}}, SIMDiff.Compressor{Tuple{Int64, Int64, Int64}}, SIMDiff.Compressor{NTuple{4, Int64}}}, UnitRange{Int64}}, SIMDiff.Constraint{SIMDiff.ConstraintNull, SIMDiff.Func{SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(-), SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(*), Int64, SIMDiff.Node2{typeof(^), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, Int64}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, Int64}, SIMDiff.Node2{typeof(*), SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node1{typeof(sin), SIMDiff.Node2{typeof(+), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, SIMDiff.Node2{typeof(*), Int64, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}, SIMDiff.Node2{typeof(*), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Node1{typeof(exp), SIMDiff.Node2{typeof(-), SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}}, SIMDiff.Var{SIMDiff.Node2{typeof(+), SIMDiff.Node2{typeof(+), SIMDiff.Par, Int64}, Int64}}}}}}, Int64}, SIMDiff.Compressor{NTuple{10, Int64}}, SIMDiff.Compressor{NTuple{17, Int64}}}, UnitRange{Int64}}} Problem name: Generic All variables: ████████████████████ 10000 All constraints: ████████████████████ 9998 free: ████████████████████ 10000 free: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 @@ -216,101 +88,33 @@ MadDiffModel{Float64} (instantiated). low/upp: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 low/upp: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 fixed: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 fixed: ████████████████████ 9998 infeas: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 infeas: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - nnzh: ( 99.96% sparsity) 19999 linear: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 + nnzh: ( 99.82% sparsity) 89985 linear: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 nonlinear: ████████████████████ 9998 nnzj: ( 99.97% sparsity) 29994 - Counters: - obj: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 grad: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 cons: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - cons_lin: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 cons_nln: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jcon: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - jgrad: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jac: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jac_lin: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - jac_nln: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jprod: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jprod_lin: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - jprod_nln: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jtprod: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jtprod_lin: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - jtprod_nln: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 hess: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 hprod: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - jhess: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 jhprod: ⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅⋅ 0 - +SIMDiff.Counters(0, 0, 0, 0, 0, 0.0, 0.0, 0.0, 0.0, 0.0) ```` To solve the problem with `Ipopt`, ````julia using NLPModelsIpopt -ipopt(m); +sol = ipopt(m); ```` -```` -This is Ipopt version 3.13.4, running with linear solver mumps. -NOTE: Other linear solvers might be more efficient (see Ipopt documentation). - -Number of nonzeros in equality constraint Jacobian...: 29994 -Number of nonzeros in inequality constraint Jacobian.: 0 -Number of nonzeros in Lagrangian Hessian.............: 19999 - -Total number of variables............................: 10000 - variables with only lower bounds: 0 - variables with lower and upper bounds: 0 - variables with only upper bounds: 0 -Total number of equality constraints.................: 9998 -Total number of inequality constraints...............: 0 - inequality constraints with only lower bounds: 0 - inequality constraints with lower and upper bounds: 0 - inequality constraints with only upper bounds: 0 - -iter objective inf_pr inf_du lg(mu) ||d|| lg(rg) alpha_du alpha_pr ls - 0 2.5405160e+06 2.48e+01 2.73e+01 -1.0 0.00e+00 - 0.00e+00 0.00e+00 0 - 1 1.3512419e+06 1.49e+01 8.27e+01 -1.0 2.20e+00 - 1.00e+00 1.00e+00f 1 - 2 1.5156131e+05 4.28e+00 1.36e+02 -1.0 1.43e+00 - 1.00e+00 1.00e+00f 1 - 3 6.6755024e+01 3.09e-01 2.18e+01 -1.0 5.63e-01 - 1.00e+00 1.00e+00f 1 - 4 6.2338933e+00 1.73e-02 8.47e-01 -1.0 2.10e-01 - 1.00e+00 1.00e+00h 1 - 5 6.2324586e+00 1.15e-05 8.16e-04 -1.7 3.35e-03 - 1.00e+00 1.00e+00h 1 - 6 6.2324586e+00 8.36e-12 7.97e-10 -5.7 2.00e-06 - 1.00e+00 1.00e+00h 1 - -Number of Iterations....: 6 - - (scaled) (unscaled) -Objective...............: 7.8692659500479645e-01 6.2324586324379885e+00 -Dual infeasibility......: 7.9743417331632266e-10 6.3156786526652763e-09 -Constraint violation....: 8.3555384833289281e-12 8.3555384833289281e-12 -Complementarity.........: 0.0000000000000000e+00 0.0000000000000000e+00 -Overall NLP error.......: 7.9743417331632266e-10 6.3156786526652763e-09 - - -Number of objective function evaluations = 7 -Number of objective gradient evaluations = 7 -Number of equality constraint evaluations = 7 -Number of inequality constraint evaluations = 0 -Number of equality constraint Jacobian evaluations = 7 -Number of inequality constraint Jacobian evaluations = 0 -Number of Lagrangian Hessian evaluations = 6 -Total CPU secs in IPOPT (w/o function evaluations) = 0.158 -Total CPU secs in NLP function evaluations = 0.026 - -EXIT: Optimal Solution Found. - ```` -### MadDiff as a AD backend of JuMP -MadDiff can be used as an automatic differentiation backend of JuMP. The problem above can be modeled in `JuMP` and solved with `Ipopt` along with `MadDiff` +****************************************************************************** +This program contains Ipopt, a library for large-scale nonlinear optimization. + Ipopt is released as open source code under the Eclipse Public License (EPL). + For more information visit https://github.com/coin-or/Ipopt +****************************************************************************** -````julia -using JuMP, Ipopt - -m = JuMP.Model(Ipopt.Optimizer) - -@variable(m, x[i=1:N], start=mod(i,2)==1 ? -1.2 : 1.) -@NLobjective(m, Min, sum(100(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i=2:N)) -@NLconstraint(m, [i=1:N-2], 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 == 0) - -optimize!(m; differentiation_backend = MadDiffAD()) -```` - -```` -This is Ipopt version 3.13.4, running with linear solver mumps. -NOTE: Other linear solvers might be more efficient (see Ipopt documentation). +This is Ipopt version 3.13.3, running with linear solver ma27. Number of nonzeros in equality constraint Jacobian...: 29994 Number of nonzeros in inequality constraint Jacobian.: 0 -Number of nonzeros in Lagrangian Hessian.............: 19999 +Number of nonzeros in Lagrangian Hessian.............: 89985 Total number of variables............................: 10000 variables with only lower bounds: 0 @@ -335,10 +139,10 @@ Number of Iterations....: 6 (scaled) (unscaled) Objective...............: 7.8692659500479645e-01 6.2324586324379885e+00 -Dual infeasibility......: 7.9743417331632266e-10 6.3156786526652763e-09 +Dual infeasibility......: 7.9743417311426394e-10 6.3156786510649713e-09 Constraint violation....: 8.3555384833289281e-12 8.3555384833289281e-12 Complementarity.........: 0.0000000000000000e+00 0.0000000000000000e+00 -Overall NLP error.......: 7.9743417331632266e-10 6.3156786526652763e-09 +Overall NLP error.......: 7.9743417311426394e-10 6.3156786510649713e-09 Number of objective function evaluations = 7 @@ -348,13 +152,15 @@ Number of inequality constraint evaluations = 0 Number of equality constraint Jacobian evaluations = 7 Number of inequality constraint Jacobian evaluations = 0 Number of Lagrangian Hessian evaluations = 6 -Total CPU secs in IPOPT (w/o function evaluations) = 0.157 -Total CPU secs in NLP function evaluations = 0.028 +Total CPU secs in IPOPT (w/o function evaluations) = 0.611 +Total CPU secs in NLP function evaluations = 0.494 EXIT: Optimal Solution Found. ```` +The solution `sol` contains the field `sol.solution` holding the optimized parameters. + --- *This page was generated using [Literate.jl](https://github.com/fredrikekre/Literate.jl).* diff --git a/docs/src/models.md b/docs/src/models.md deleted file mode 100644 index d85622a..0000000 --- a/docs/src/models.md +++ /dev/null @@ -1,4 +0,0 @@ -# MadDiffModels -```@autodocs -Modules = [MadDiffModels] -``` diff --git a/docs/src/moi.md b/docs/src/moi.md deleted file mode 100644 index 82af820..0000000 --- a/docs/src/moi.md +++ /dev/null @@ -1,4 +0,0 @@ -# MadDiffMOI -```@autodocs -Modules = [MadDiffMOI] -``` diff --git a/docs/src/special.md b/docs/src/special.md deleted file mode 100644 index 85bc363..0000000 --- a/docs/src/special.md +++ /dev/null @@ -1,4 +0,0 @@ -# MadDiffMOI -```@autodocs -Modules = [MadDiffSpecialFunctions] -``` diff --git a/docs/src/tutorial.jl b/docs/src/tutorial.jl deleted file mode 100644 index b3727f1..0000000 --- a/docs/src/tutorial.jl +++ /dev/null @@ -1,2 +0,0 @@ -# # How it Works - diff --git a/docs/src/tutorial.md b/docs/src/tutorial.md deleted file mode 100644 index 70032ea..0000000 --- a/docs/src/tutorial.md +++ /dev/null @@ -1,10 +0,0 @@ -```@meta -EditURL = "/src/tutorial.jl" -``` - -# How it Works - ---- - -*This page was generated using [Literate.jl](https://github.com/fredrikekre/Literate.jl).* -