-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
11 changed files
with
365 additions
and
95 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
name: CompatHelper | ||
on: | ||
schedule: | ||
- cron: '00 00 * * *' | ||
workflow_dispatch: | ||
jobs: | ||
CompatHelper: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- name: Pkg.add("CompatHelper") | ||
run: julia -e 'using Pkg; Pkg.add("CompatHelper")' | ||
- name: CompatHelper.main() | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} # optional | ||
run: julia -e 'using CompatHelper; CompatHelper.main()' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
name: TagBot | ||
on: | ||
issue_comment: | ||
types: | ||
- created | ||
workflow_dispatch: | ||
jobs: | ||
TagBot: | ||
if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot' | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: JuliaRegistries/TagBot@v1 | ||
with: | ||
token: ${{ secrets.GITHUB_TOKEN }} | ||
ssh: ${{ secrets.DOCUMENTER_KEY }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
name: build | ||
|
||
on: [push, pull_request] | ||
|
||
jobs: | ||
test: | ||
runs-on: ${{ matrix.os }} | ||
strategy: | ||
matrix: | ||
julia-version: ['1.9'] | ||
julia-arch: [x64] | ||
os: [ubuntu-latest,macos-latest,windows-latest] | ||
|
||
steps: | ||
- uses: actions/checkout@v2 | ||
- uses: julia-actions/setup-julia@latest | ||
with: | ||
version: ${{ matrix.julia-version }} | ||
- uses: julia-actions/julia-buildpkg@latest | ||
- uses: julia-actions/julia-runtest@latest | ||
- uses: julia-actions/julia-processcoverage@v1 | ||
- uses: codecov/codecov-action@v1 | ||
with: | ||
file: lcov.info | ||
token: ${{ secrets.CODECOV_TOKEN }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1,17 @@ | ||
# SIMDiff.jl | ||
*An implementation of SIMD abstraction for nonlinear programs and automatic differentiation.* | ||
|
||
| **License** | **Documentation** | **Build Status** | **Coverage** | **Citation** | | ||
|:-----------------:|:----------------:|:----------------:|:----------------:|:----------------:| | ||
| [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) | [![doc](https://img.shields.io/badge/docs-dev-blue.svg)](https://sshin23.github.io/SIMDiff.jl/dev) | [![build](https://github.com/sshin23/SIMDiff.jl/actions/workflows/test.yml/badge.svg)](https://github.com/sshin23/SIMDiff.jl/actions/workflows/test.yml) | [![codecov](https://codecov.io/gh/sshin23/SIMDiff.jl/branch/main/graph/badge.svg?token=8ViJWBWnZt)](https://codecov.io/gh/sshin23/SIMDiff.jl) | | ||
|
||
## Introduction | ||
SIMDiff.jl employs what we call **SIMD abstraction for nonlinear programs** (NLPs), which allows for the **preservation of the parallelizable structure** within the model equations, facilitating **efficient, parallel derivative evaluations** on the **GPU**. | ||
|
||
SIMDiff.jl is different from other algebraic modeling tools, such as JuMP or AMPL, in the following ways: | ||
- **Modeling Interface**: SIMDiff.jl enforces users to specify the model equations always in the form of `Iterable`s. This allows SIMDiff.jl to preserve the SIMD-compatible structure in the model equations. | ||
- **Performance**: SIMDiff.jl compiles (via Julia's compiler) derivative evaluation codes that are specific to each computation pattern, based on reverse-mode automatic differentiation. This makes the speed of derivative evaluation (even on the CPU) significantly faster than other existing tools. | ||
- **Portability**: SIMDiff.jl can evaluate derivatives on GPU accelerators. The code is currently only tested for NVIDIA GPUs, but GPU code is implemented mostly based on the portable programming paradigm, KernelAbstractions.jl. In the future, we are interested in supporting Intel, AMD, and Apple GPUs. | ||
|
||
## Bug reports and support | ||
Please report issues and feature requests via the [GitHub issue tracker](https://github.com/sshin/SIMDiff.jl/issues). |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,70 @@ | ||
module NLPTest | ||
|
||
using SIMDiff, Test, ADNLPModels, NLPModels, NLPModelsIpopt, KernelAbstractions, CUDA | ||
|
||
const NLP_TEST_ARGUMENTS = [ | ||
( | ||
"luksan_vlcek", | ||
3 | ||
), | ||
( | ||
"luksan_vlcek", | ||
20 | ||
), | ||
] | ||
|
||
const BACKENDS = Any[ | ||
nothing, | ||
CPU() | ||
] | ||
|
||
if CUDA.has_cuda() | ||
push!(BACKENDS, CUDABackend()) | ||
end | ||
|
||
include("utils.jl") | ||
include("luksan.jl") | ||
|
||
function test_nlp(simdiff_model, adnlp_model, backend, args) | ||
|
||
m1 = WrapperNLPModel(simdiff_model(backend,args...)) | ||
m2 = WrapperNLPModel(adnlp_model(backend,args...)) | ||
|
||
result1 = ipopt(m1; print_level = 0) | ||
result2 = ipopt(m2; print_level = 0) | ||
|
||
@test result1.status == result2.status | ||
|
||
for field in [ | ||
:solution, | ||
:multipliers, | ||
:multipliers_L, | ||
:multipliers_U | ||
] | ||
@test getfield(result1, field) ≈ getfield(result2, field) atol=1e-6 | ||
end | ||
end | ||
|
||
function runtests() | ||
@testset "NLP tests" begin | ||
for (name, args) in NLP_TEST_ARGUMENTS | ||
for backend in BACKENDS | ||
simdiff_model = getfield( | ||
@__MODULE__, | ||
Symbol(name * "_simdiff_model") | ||
) | ||
adnlp_model = getfield( | ||
@__MODULE__, | ||
Symbol(name * "_adnlp_model") | ||
) | ||
|
||
@testset "$name $args $backend" begin | ||
test_nlp(simdiff_model, adnlp_model, backend, args) | ||
end | ||
end | ||
end | ||
end | ||
end | ||
|
||
end # NLPTest | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
function luksan_vlcek_obj(x,i) | ||
return 100*(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 | ||
end | ||
|
||
function luksan_vlcek_con(x,i) | ||
return 3x[i+1]^3+2*x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3 | ||
end | ||
|
||
function luksan_vlcek_x0(i) | ||
return mod(i,2)==1 ? -1.2 : 1.0 | ||
end | ||
|
||
function luksan_vlcek_adnlp_model(backend, N) | ||
return ADNLPModel( | ||
x->sum(luksan_vlcek_obj(x,i) for i=2:N), | ||
[luksan_vlcek_x0(i) for i=1:N], | ||
fill(-Inf,N), | ||
fill(Inf,N), | ||
x->[luksan_vlcek_con(x,i) for i=1:N-2], | ||
zeros(N-2), | ||
zeros(N-2) | ||
) | ||
end | ||
|
||
function luksan_vlcek_simdiff_model(backend, N) | ||
|
||
c = SIMDiff.Core(backend) | ||
x = SIMDiff.variable( | ||
c, N; | ||
start = (luksan_vlcek_x0(i) for i=1:N) | ||
) | ||
SIMDiff.constraint( | ||
c, | ||
luksan_vlcek_con(x,i) | ||
for i in 1:N-2) | ||
SIMDiff.objective(c, luksan_vlcek_obj(x,i) for i in 2:N) | ||
return SIMDiff.Model(c) | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,167 @@ | ||
struct WrapperNLPModel{ | ||
T, VT, VI, | ||
I <: NLPModels.AbstractNLPModel{T,VT} | ||
} <: NLPModels.AbstractNLPModel{Float64,Vector{Float64}} | ||
|
||
inner::I | ||
|
||
x_buffer:: VT | ||
y_buffer:: VT | ||
|
||
cons_buffer::VT | ||
grad_buffer::VT | ||
|
||
jac_buffer::VT | ||
jac_I_buffer::VI | ||
jac_J_buffer::VI | ||
|
||
hess_buffer::VT | ||
hess_I_buffer::VI | ||
hess_J_buffer::VI | ||
|
||
meta::NLPModels.AbstractNLPModelMeta{Float64,Vector{Float64}} | ||
end | ||
|
||
function WrapperNLPModel(m) | ||
nvar = get_nvar(m) | ||
ncon = get_ncon(m) | ||
nnzj = get_nnzj(m) | ||
nnzh = get_nnzh(m) | ||
|
||
x0 = Vector{Float64}(undef, nvar) | ||
lvar = Vector{Float64}(undef, nvar) | ||
uvar = Vector{Float64}(undef, nvar) | ||
|
||
y0 = Vector{Float64}(undef, ncon) | ||
lcon = Vector{Float64}(undef, ncon) | ||
ucon = Vector{Float64}(undef, ncon) | ||
|
||
copyto!(x0, m.meta.x0) | ||
copyto!(lvar, m.meta.lvar) | ||
copyto!(uvar, m.meta.uvar) | ||
|
||
copyto!(y0, m.meta.y0) | ||
copyto!(lcon, m.meta.lcon) | ||
copyto!(ucon, m.meta.ucon) | ||
|
||
x_buffer = similar(get_x0(m), nvar) | ||
y_buffer = similar(get_x0(m), ncon) | ||
cons_buffer = similar(get_x0(m), ncon) | ||
grad_buffer = similar(get_x0(m), nvar) | ||
jac_buffer = similar(get_x0(m), nnzj) | ||
jac_I_buffer = similar(get_x0(m), Int, nnzj) | ||
jac_J_buffer = similar(get_x0(m), Int, nnzj) | ||
hess_buffer = similar(get_x0(m), nnzh) | ||
hess_I_buffer = similar(get_x0(m), Int, nnzh) | ||
hess_J_buffer = similar(get_x0(m), Int, nnzh) | ||
|
||
return WrapperNLPModel( | ||
m, | ||
x_buffer, | ||
y_buffer, | ||
cons_buffer, | ||
grad_buffer, | ||
jac_buffer, | ||
jac_I_buffer, | ||
jac_J_buffer, | ||
hess_buffer, | ||
hess_I_buffer, | ||
hess_J_buffer, | ||
NLPModelMeta( | ||
nvar, | ||
x0 = x0, | ||
lvar = lvar, | ||
uvar = uvar, | ||
ncon = ncon, | ||
y0 = y0, | ||
lcon = lcon, | ||
ucon = ucon, | ||
nnzj = nnzj, | ||
nnzh = nnzh, | ||
minimize = m.meta.minimize | ||
) | ||
) | ||
end | ||
|
||
function NLPModels.jac_structure!( | ||
m::M, | ||
rows::V, | ||
cols::V | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
jac_structure!(m.inner, m.jac_I_buffer, m.jac_J_buffer) | ||
copyto!(rows, m.jac_I_buffer) | ||
copyto!(cols, m.jac_J_buffer) | ||
end | ||
|
||
function NLPModels.hess_structure!( | ||
m::M, | ||
rows::V, | ||
cols::V | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
hess_structure!(m.inner, m.hess_I_buffer, m.hess_J_buffer) | ||
copyto!(rows, m.hess_I_buffer) | ||
copyto!(cols, m.hess_J_buffer) | ||
end | ||
|
||
function NLPModels.obj( | ||
m::M, | ||
x::V | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
copyto!(m.x_buffer, x) | ||
o = NLPModels.obj(m.inner, m.x_buffer) | ||
return o | ||
end | ||
function NLPModels.cons!( | ||
m::M, | ||
x::V, | ||
g::V | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
copyto!(m.x_buffer, x) | ||
NLPModels.cons!(m.inner, m.x_buffer, m.cons_buffer) | ||
copyto!(g, m.cons_buffer) | ||
return | ||
end | ||
function NLPModels.grad!( | ||
m::M, | ||
x::V, | ||
f::V | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
copyto!(m.x_buffer, x) | ||
NLPModels.grad!(m.inner, m.x_buffer, m.grad_buffer) | ||
copyto!(f, m.grad_buffer) | ||
return | ||
end | ||
function NLPModels.jac_coord!( | ||
m::M, | ||
x::V, | ||
jac::V | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
copyto!(m.x_buffer, x) | ||
NLPModels.jac_coord!(m.inner, m.x_buffer, m.jac_buffer) | ||
copyto!(jac, m.jac_buffer) | ||
return | ||
end | ||
function NLPModels.hess_coord!( | ||
m::M, | ||
x::V, | ||
y::V, | ||
hess::V; | ||
obj_weight = one(eltype(x)) | ||
) where {M <: WrapperNLPModel, V <: AbstractVector} | ||
|
||
copyto!(m.x_buffer, x) | ||
copyto!(m.x_buffer, y) | ||
NLPModels.hess_coord!( | ||
m.inner, m.x_buffer, m.y_buffer, m.hess_buffer; | ||
obj_weight=obj_weight | ||
) | ||
copyto!(hess, m.hess_buffer) | ||
|
||
return | ||
end |
Oops, something went wrong.