Skip to content

Commit

Permalink
benchmark added
Browse files Browse the repository at this point in the history
  • Loading branch information
sshin23 committed Aug 10, 2023
1 parent 8411f35 commit be884c2
Show file tree
Hide file tree
Showing 15 changed files with 1,375 additions and 114 deletions.
22 changes: 21 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,25 @@ docs/site/
# It records a fixed state of all packages used by the project. As such, it should not be
# committed for packages, but should be committed for applications that require a static
# environment.
Manifest.toml
/Manifest.toml
docs/Manifest.toml

# data files
pglib*.m

# ampl nl files
*.nl

## Core latex/pdflatex auxiliary files:
*.aux
*.lof
*.log
*.lot
*.fls
*.out
*.toc
*.fmt
*.fot
*.cb
*.cb2
.*.lb
3 changes: 3 additions & 0 deletions ExaModelsExamples/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@ version = "0.1.0"
AmplNLReader = "77dd3d4c-cb1d-5e09-9340-85030ff7ba66"
BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
CPUTime = "a9c8d775-2e2e-55fc-8582-045d282d599e"
Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
ExaModels = "1037b233-b668-4ce9-9b63-f9f681f55dd2"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6"
NLPModelsIpopt = "f4238b75-b362-5c4c-b852-0801c9a21d71"
NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e"
PowerModels = "c36e90e8-916a-50a6-bd94-075b64ef4655"
Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c"
99 changes: 20 additions & 79 deletions ExaModelsExamples/src/ExaModelsExamples.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,100 +7,41 @@ import PyCall: @py_str
import MadNLP
import AmplNLReader
import CPUTime: @CPUtime

silence()
import SnoopPrecompile
import NLPModelsIpopt: ipopt
import Downloads
import Printf: @printf

include("opf.jl")
include("luksanvlcek.jl")
include("distillation.jl")
include("quadrotor.jl")
include("extras.jl")

function project!(l,x,u; marg = 1e-4)
map!(x,l,x,u) do l,x,u
max(l+marg, min(u-marg,x) )
end
const NAMES = filter(names(ExaModelsExamples; all=true)) do x
str = string(x)
endswith(str, "model") && !startswith(str, "#")
end

function compile_callbacks(m)
nvar = m.meta.nvar
ncon = m.meta.ncon
nnzj = m.meta.nnzj
nnzh = m.meta.nnzh

x = similar(m.meta.x0, nvar)
y = similar(m.meta.x0, ncon)
c = similar(m.meta.x0, ncon)
g = similar(m.meta.x0, nvar)
jac = similar(m.meta.x0, nnzj)
hess = similar(m.meta.x0, nnzh)
jrows = similar(m.meta.x0, Int, nnzj)
jcols = similar(m.meta.x0, Int, nnzj)
hrows = similar(m.meta.x0, Int, nnzh)
hcols = similar(m.meta.x0, Int, nnzh)
export ipopt # rexport

project!(m.meta.lvar, x, m.meta.uvar)

# println("Objective evaluation")
tobj = @elapsed for t=1:100
NLPModels.obj(m,x)
end
# println("Constraints evaluation")
tcon = @elapsed for t=1:100
NLPModels.cons!(m,x,c)
end
# println("Gradient evaluation")
tgrad = @elapsed for t=1:100
NLPModels.grad!(m,x,g)
end
# println("Jacobian evaluation")
tjac = @elapsed for t=1:100
NLPModels.jac_coord!(m,x,jac)
end
# println("Hessian evaluation")
thess = @elapsed for t=1:100
NLPModels.hess_coord!(m,x,y,hess)
end
# println("Jacobina sparsity evaluation")
tjacs = @elapsed for t=1:100
NLPModels.jac_structure!(m,jrows,jcols)
end
# println("Hessian sparsity evaluation")
thesss = @elapsed for t=1:100
NLPModels.hess_structure!(m,hrows,hcols)
end

return (
tobj = tobj,
tcon = tcon,
tgrad = tgrad,
tjac = tjac,
thess = thess,
tjacs = tjacs,
thesss = thesss,
)
for name in NAMES
@eval export $name
end

function __init__()
silence()
end

function parse_log(file)
open(file) do f
t1=nothing
t2=nothing
while !eof(f)
s = readline(f)
if occursin("Total CPU secs in NLP function evaluations", s)
t1 = parse(Float64,split(s, "=")[2])
elseif occursin("Total CPU secs in IPOPT (w/o function evaluations)", s)
t2 = parse(Float64,split(s, "=")[2])
end
function __compile__()
for name in NAMES
@eval begin
m = $name()
ipopt(m; print_level=0)
end
return t1,t2
end
end

for name in filter(names(ExaModelsExamples; all=true)) do x
endswith(string(x), "model")
end
@eval export $name
end
# SnoopPrecompile.@precompile_all_calls _compile()

end # module ExaModelsExamples
18 changes: 12 additions & 6 deletions ExaModelsExamples/src/distillation.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
function distillation_column_model(T, backend = nothing)
function distillation_column_model(T = 3, backend = nothing)

NT = 30
FT = 17
Expand Down Expand Up @@ -60,10 +60,12 @@ function distillation_column_model(T, backend = nothing)
ExaModels.constraint(c, L2[t]- u[t] * D - F for t in 0:T)
ExaModels.constraint(c, yA[t,i] * (1-xA[t,i]) - alpha * xA[t,i] * (1-yA[t,i]) for (t,i) in itr2)

return ExaModels.ExaModel(c)
return ADBenchmarkModel(
ExaModels.ExaModel(c)
)
end

function jump_distillation_column_model(T)
function jump_distillation_column_model(T = 3)

NT = 30
FT = 17
Expand Down Expand Up @@ -126,10 +128,12 @@ function jump_distillation_column_model(T)
yA[t,i] * (1-xA[t,i]) - alpha * xA[t,i] * (1-yA[t,i]) == 0
)

return MathOptNLPModel(m)
return ADBenchmarkModel(
MathOptNLPModel(m)
)
end

function ampl_distillation_column_model(T)
function ampl_distillation_column_model(T = 3)
nlfile = tempname()* ".nl"

py"""
Expand Down Expand Up @@ -241,5 +245,7 @@ function ampl_distillation_column_model(T)
m.write($nlfile)
"""

return AmplNLReader.AmplModel(nlfile)
return ADBenchmarkModel(
AmplNLReader.AmplModel(nlfile)
)
end
Loading

0 comments on commit be884c2

Please sign in to comment.