diff --git a/ExaModelsExamples/Project.toml b/ExaModelsExamples/Project.toml index 81ed8cf..dca6f10 100644 --- a/ExaModelsExamples/Project.toml +++ b/ExaModelsExamples/Project.toml @@ -4,8 +4,13 @@ authors = ["Sungho Shin "] version = "0.1.0" [deps] -PowerModels = "c36e90e8-916a-50a6-bd94-075b64ef4655" +AmplNLReader = "77dd3d4c-cb1d-5e09-9340-85030ff7ba66" +BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" +CPUTime = "a9c8d775-2e2e-55fc-8582-045d282d599e" ExaModels = "1037b233-b668-4ce9-9b63-f9f681f55dd2" -Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" -NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e" \ No newline at end of file +MadNLP = "2621e9c9-9eb4-46b1-8089-e8c72242dfb6" +NLPModelsJuMP = "792afdf1-32c1-5681-94e0-d7bf7a5df49e" +PowerModels = "c36e90e8-916a-50a6-bd94-075b64ef4655" +Printf = "de0858da-6303-5e67-8744-51eddeeeb8d7" +PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" diff --git a/ExaModelsExamples/luksan.nl b/ExaModelsExamples/luksan.nl new file mode 100644 index 0000000..71cc93d --- /dev/null +++ b/ExaModelsExamples/luksan.nl @@ -0,0 +1,5235 @@ +g3 1 1 0 # problem unknown + 100 98 1 0 98 # vars, constraints, objectives, ranges, eqns + 98 1 0 0 0 0 # nonlinear constrs, objs; ccons: lin, nonlin, nd, nzlb + 0 0 # network constraints: nonlinear, linear + 100 100 100 # nonlinear vars in constraints, objectives, both + 0 0 0 1 # linear network variables; functions; arith, flags + 0 0 0 0 0 # discrete variables: binary, integer, nonlinear (b,c,o) + 294 100 # nonzeros in Jacobian, obj. gradient + 0 0 # max name lengths: constraints, variables + 0 0 0 0 0 # common exprs: b,c,o,c1,o1 +C0 +o54 +3 +o2 +n3 +o5 +v1 +n3 +o2 +o41 +o0 +v1 +o2 +n-1 +v2 +o41 +o0 +v1 +v2 +o16 +o2 +v0 +o44 +o0 +v0 +o2 +n-1 +v1 +C1 +o54 +3 +o2 +n3 +o5 +v2 +n3 +o2 +o41 +o0 +v2 +o2 +n-1 +v3 +o41 +o0 +v2 +v3 +o16 +o2 +v1 +o44 +o0 +v1 +o2 +n-1 +v2 +C2 +o54 +3 +o2 +n3 +o5 +v3 +n3 +o2 +o41 +o0 +v3 +o2 +n-1 +v4 +o41 +o0 +v3 +v4 +o16 +o2 +v2 +o44 +o0 +v2 +o2 +n-1 +v3 +C3 +o54 +3 +o2 +n3 +o5 +v4 +n3 +o2 +o41 +o0 +v4 +o2 +n-1 +v5 +o41 +o0 +v4 +v5 +o16 +o2 +v3 +o44 +o0 +v3 +o2 +n-1 +v4 +C4 +o54 +3 +o2 +n3 +o5 +v5 +n3 +o2 +o41 +o0 +v5 +o2 +n-1 +v6 +o41 +o0 +v5 +v6 +o16 +o2 +v4 +o44 +o0 +v4 +o2 +n-1 +v5 +C5 +o54 +3 +o2 +n3 +o5 +v6 +n3 +o2 +o41 +o0 +v6 +o2 +n-1 +v7 +o41 +o0 +v6 +v7 +o16 +o2 +v5 +o44 +o0 +v5 +o2 +n-1 +v6 +C6 +o54 +3 +o2 +n3 +o5 +v7 +n3 +o2 +o41 +o0 +v7 +o2 +n-1 +v8 +o41 +o0 +v7 +v8 +o16 +o2 +v6 +o44 +o0 +v6 +o2 +n-1 +v7 +C7 +o54 +3 +o2 +n3 +o5 +v8 +n3 +o2 +o41 +o0 +v8 +o2 +n-1 +v9 +o41 +o0 +v8 +v9 +o16 +o2 +v7 +o44 +o0 +v7 +o2 +n-1 +v8 +C8 +o54 +3 +o2 +n3 +o5 +v9 +n3 +o2 +o41 +o0 +v9 +o2 +n-1 +v10 +o41 +o0 +v9 +v10 +o16 +o2 +v8 +o44 +o0 +v8 +o2 +n-1 +v9 +C9 +o54 +3 +o2 +n3 +o5 +v10 +n3 +o2 +o41 +o0 +v10 +o2 +n-1 +v11 +o41 +o0 +v10 +v11 +o16 +o2 +v9 +o44 +o0 +v9 +o2 +n-1 +v10 +C10 +o54 +3 +o2 +n3 +o5 +v11 +n3 +o2 +o41 +o0 +v11 +o2 +n-1 +v12 +o41 +o0 +v11 +v12 +o16 +o2 +v10 +o44 +o0 +v10 +o2 +n-1 +v11 +C11 +o54 +3 +o2 +n3 +o5 +v12 +n3 +o2 +o41 +o0 +v12 +o2 +n-1 +v13 +o41 +o0 +v12 +v13 +o16 +o2 +v11 +o44 +o0 +v11 +o2 +n-1 +v12 +C12 +o54 +3 +o2 +n3 +o5 +v13 +n3 +o2 +o41 +o0 +v13 +o2 +n-1 +v14 +o41 +o0 +v13 +v14 +o16 +o2 +v12 +o44 +o0 +v12 +o2 +n-1 +v13 +C13 +o54 +3 +o2 +n3 +o5 +v14 +n3 +o2 +o41 +o0 +v14 +o2 +n-1 +v15 +o41 +o0 +v14 +v15 +o16 +o2 +v13 +o44 +o0 +v13 +o2 +n-1 +v14 +C14 +o54 +3 +o2 +n3 +o5 +v15 +n3 +o2 +o41 +o0 +v15 +o2 +n-1 +v16 +o41 +o0 +v15 +v16 +o16 +o2 +v14 +o44 +o0 +v14 +o2 +n-1 +v15 +C15 +o54 +3 +o2 +n3 +o5 +v16 +n3 +o2 +o41 +o0 +v16 +o2 +n-1 +v17 +o41 +o0 +v16 +v17 +o16 +o2 +v15 +o44 +o0 +v15 +o2 +n-1 +v16 +C16 +o54 +3 +o2 +n3 +o5 +v17 +n3 +o2 +o41 +o0 +v17 +o2 +n-1 +v18 +o41 +o0 +v17 +v18 +o16 +o2 +v16 +o44 +o0 +v16 +o2 +n-1 +v17 +C17 +o54 +3 +o2 +n3 +o5 +v18 +n3 +o2 +o41 +o0 +v18 +o2 +n-1 +v19 +o41 +o0 +v18 +v19 +o16 +o2 +v17 +o44 +o0 +v17 +o2 +n-1 +v18 +C18 +o54 +3 +o2 +n3 +o5 +v19 +n3 +o2 +o41 +o0 +v19 +o2 +n-1 +v20 +o41 +o0 +v19 +v20 +o16 +o2 +v18 +o44 +o0 +v18 +o2 +n-1 +v19 +C19 +o54 +3 +o2 +n3 +o5 +v20 +n3 +o2 +o41 +o0 +v20 +o2 +n-1 +v21 +o41 +o0 +v20 +v21 +o16 +o2 +v19 +o44 +o0 +v19 +o2 +n-1 +v20 +C20 +o54 +3 +o2 +n3 +o5 +v21 +n3 +o2 +o41 +o0 +v21 +o2 +n-1 +v22 +o41 +o0 +v21 +v22 +o16 +o2 +v20 +o44 +o0 +v20 +o2 +n-1 +v21 +C21 +o54 +3 +o2 +n3 +o5 +v22 +n3 +o2 +o41 +o0 +v22 +o2 +n-1 +v23 +o41 +o0 +v22 +v23 +o16 +o2 +v21 +o44 +o0 +v21 +o2 +n-1 +v22 +C22 +o54 +3 +o2 +n3 +o5 +v23 +n3 +o2 +o41 +o0 +v23 +o2 +n-1 +v24 +o41 +o0 +v23 +v24 +o16 +o2 +v22 +o44 +o0 +v22 +o2 +n-1 +v23 +C23 +o54 +3 +o2 +n3 +o5 +v24 +n3 +o2 +o41 +o0 +v24 +o2 +n-1 +v25 +o41 +o0 +v24 +v25 +o16 +o2 +v23 +o44 +o0 +v23 +o2 +n-1 +v24 +C24 +o54 +3 +o2 +n3 +o5 +v25 +n3 +o2 +o41 +o0 +v25 +o2 +n-1 +v26 +o41 +o0 +v25 +v26 +o16 +o2 +v24 +o44 +o0 +v24 +o2 +n-1 +v25 +C25 +o54 +3 +o2 +n3 +o5 +v26 +n3 +o2 +o41 +o0 +v26 +o2 +n-1 +v27 +o41 +o0 +v26 +v27 +o16 +o2 +v25 +o44 +o0 +v25 +o2 +n-1 +v26 +C26 +o54 +3 +o2 +n3 +o5 +v27 +n3 +o2 +o41 +o0 +v27 +o2 +n-1 +v28 +o41 +o0 +v27 +v28 +o16 +o2 +v26 +o44 +o0 +v26 +o2 +n-1 +v27 +C27 +o54 +3 +o2 +n3 +o5 +v28 +n3 +o2 +o41 +o0 +v28 +o2 +n-1 +v29 +o41 +o0 +v28 +v29 +o16 +o2 +v27 +o44 +o0 +v27 +o2 +n-1 +v28 +C28 +o54 +3 +o2 +n3 +o5 +v29 +n3 +o2 +o41 +o0 +v29 +o2 +n-1 +v30 +o41 +o0 +v29 +v30 +o16 +o2 +v28 +o44 +o0 +v28 +o2 +n-1 +v29 +C29 +o54 +3 +o2 +n3 +o5 +v30 +n3 +o2 +o41 +o0 +v30 +o2 +n-1 +v31 +o41 +o0 +v30 +v31 +o16 +o2 +v29 +o44 +o0 +v29 +o2 +n-1 +v30 +C30 +o54 +3 +o2 +n3 +o5 +v31 +n3 +o2 +o41 +o0 +v31 +o2 +n-1 +v32 +o41 +o0 +v31 +v32 +o16 +o2 +v30 +o44 +o0 +v30 +o2 +n-1 +v31 +C31 +o54 +3 +o2 +n3 +o5 +v32 +n3 +o2 +o41 +o0 +v32 +o2 +n-1 +v33 +o41 +o0 +v32 +v33 +o16 +o2 +v31 +o44 +o0 +v31 +o2 +n-1 +v32 +C32 +o54 +3 +o2 +n3 +o5 +v33 +n3 +o2 +o41 +o0 +v33 +o2 +n-1 +v34 +o41 +o0 +v33 +v34 +o16 +o2 +v32 +o44 +o0 +v32 +o2 +n-1 +v33 +C33 +o54 +3 +o2 +n3 +o5 +v34 +n3 +o2 +o41 +o0 +v34 +o2 +n-1 +v35 +o41 +o0 +v34 +v35 +o16 +o2 +v33 +o44 +o0 +v33 +o2 +n-1 +v34 +C34 +o54 +3 +o2 +n3 +o5 +v35 +n3 +o2 +o41 +o0 +v35 +o2 +n-1 +v36 +o41 +o0 +v35 +v36 +o16 +o2 +v34 +o44 +o0 +v34 +o2 +n-1 +v35 +C35 +o54 +3 +o2 +n3 +o5 +v36 +n3 +o2 +o41 +o0 +v36 +o2 +n-1 +v37 +o41 +o0 +v36 +v37 +o16 +o2 +v35 +o44 +o0 +v35 +o2 +n-1 +v36 +C36 +o54 +3 +o2 +n3 +o5 +v37 +n3 +o2 +o41 +o0 +v37 +o2 +n-1 +v38 +o41 +o0 +v37 +v38 +o16 +o2 +v36 +o44 +o0 +v36 +o2 +n-1 +v37 +C37 +o54 +3 +o2 +n3 +o5 +v38 +n3 +o2 +o41 +o0 +v38 +o2 +n-1 +v39 +o41 +o0 +v38 +v39 +o16 +o2 +v37 +o44 +o0 +v37 +o2 +n-1 +v38 +C38 +o54 +3 +o2 +n3 +o5 +v39 +n3 +o2 +o41 +o0 +v39 +o2 +n-1 +v40 +o41 +o0 +v39 +v40 +o16 +o2 +v38 +o44 +o0 +v38 +o2 +n-1 +v39 +C39 +o54 +3 +o2 +n3 +o5 +v40 +n3 +o2 +o41 +o0 +v40 +o2 +n-1 +v41 +o41 +o0 +v40 +v41 +o16 +o2 +v39 +o44 +o0 +v39 +o2 +n-1 +v40 +C40 +o54 +3 +o2 +n3 +o5 +v41 +n3 +o2 +o41 +o0 +v41 +o2 +n-1 +v42 +o41 +o0 +v41 +v42 +o16 +o2 +v40 +o44 +o0 +v40 +o2 +n-1 +v41 +C41 +o54 +3 +o2 +n3 +o5 +v42 +n3 +o2 +o41 +o0 +v42 +o2 +n-1 +v43 +o41 +o0 +v42 +v43 +o16 +o2 +v41 +o44 +o0 +v41 +o2 +n-1 +v42 +C42 +o54 +3 +o2 +n3 +o5 +v43 +n3 +o2 +o41 +o0 +v43 +o2 +n-1 +v44 +o41 +o0 +v43 +v44 +o16 +o2 +v42 +o44 +o0 +v42 +o2 +n-1 +v43 +C43 +o54 +3 +o2 +n3 +o5 +v44 +n3 +o2 +o41 +o0 +v44 +o2 +n-1 +v45 +o41 +o0 +v44 +v45 +o16 +o2 +v43 +o44 +o0 +v43 +o2 +n-1 +v44 +C44 +o54 +3 +o2 +n3 +o5 +v45 +n3 +o2 +o41 +o0 +v45 +o2 +n-1 +v46 +o41 +o0 +v45 +v46 +o16 +o2 +v44 +o44 +o0 +v44 +o2 +n-1 +v45 +C45 +o54 +3 +o2 +n3 +o5 +v46 +n3 +o2 +o41 +o0 +v46 +o2 +n-1 +v47 +o41 +o0 +v46 +v47 +o16 +o2 +v45 +o44 +o0 +v45 +o2 +n-1 +v46 +C46 +o54 +3 +o2 +n3 +o5 +v47 +n3 +o2 +o41 +o0 +v47 +o2 +n-1 +v48 +o41 +o0 +v47 +v48 +o16 +o2 +v46 +o44 +o0 +v46 +o2 +n-1 +v47 +C47 +o54 +3 +o2 +n3 +o5 +v48 +n3 +o2 +o41 +o0 +v48 +o2 +n-1 +v49 +o41 +o0 +v48 +v49 +o16 +o2 +v47 +o44 +o0 +v47 +o2 +n-1 +v48 +C48 +o54 +3 +o2 +n3 +o5 +v49 +n3 +o2 +o41 +o0 +v49 +o2 +n-1 +v50 +o41 +o0 +v49 +v50 +o16 +o2 +v48 +o44 +o0 +v48 +o2 +n-1 +v49 +C49 +o54 +3 +o2 +n3 +o5 +v50 +n3 +o2 +o41 +o0 +v50 +o2 +n-1 +v51 +o41 +o0 +v50 +v51 +o16 +o2 +v49 +o44 +o0 +v49 +o2 +n-1 +v50 +C50 +o54 +3 +o2 +n3 +o5 +v51 +n3 +o2 +o41 +o0 +v51 +o2 +n-1 +v52 +o41 +o0 +v51 +v52 +o16 +o2 +v50 +o44 +o0 +v50 +o2 +n-1 +v51 +C51 +o54 +3 +o2 +n3 +o5 +v52 +n3 +o2 +o41 +o0 +v52 +o2 +n-1 +v53 +o41 +o0 +v52 +v53 +o16 +o2 +v51 +o44 +o0 +v51 +o2 +n-1 +v52 +C52 +o54 +3 +o2 +n3 +o5 +v53 +n3 +o2 +o41 +o0 +v53 +o2 +n-1 +v54 +o41 +o0 +v53 +v54 +o16 +o2 +v52 +o44 +o0 +v52 +o2 +n-1 +v53 +C53 +o54 +3 +o2 +n3 +o5 +v54 +n3 +o2 +o41 +o0 +v54 +o2 +n-1 +v55 +o41 +o0 +v54 +v55 +o16 +o2 +v53 +o44 +o0 +v53 +o2 +n-1 +v54 +C54 +o54 +3 +o2 +n3 +o5 +v55 +n3 +o2 +o41 +o0 +v55 +o2 +n-1 +v56 +o41 +o0 +v55 +v56 +o16 +o2 +v54 +o44 +o0 +v54 +o2 +n-1 +v55 +C55 +o54 +3 +o2 +n3 +o5 +v56 +n3 +o2 +o41 +o0 +v56 +o2 +n-1 +v57 +o41 +o0 +v56 +v57 +o16 +o2 +v55 +o44 +o0 +v55 +o2 +n-1 +v56 +C56 +o54 +3 +o2 +n3 +o5 +v57 +n3 +o2 +o41 +o0 +v57 +o2 +n-1 +v58 +o41 +o0 +v57 +v58 +o16 +o2 +v56 +o44 +o0 +v56 +o2 +n-1 +v57 +C57 +o54 +3 +o2 +n3 +o5 +v58 +n3 +o2 +o41 +o0 +v58 +o2 +n-1 +v59 +o41 +o0 +v58 +v59 +o16 +o2 +v57 +o44 +o0 +v57 +o2 +n-1 +v58 +C58 +o54 +3 +o2 +n3 +o5 +v59 +n3 +o2 +o41 +o0 +v59 +o2 +n-1 +v60 +o41 +o0 +v59 +v60 +o16 +o2 +v58 +o44 +o0 +v58 +o2 +n-1 +v59 +C59 +o54 +3 +o2 +n3 +o5 +v60 +n3 +o2 +o41 +o0 +v60 +o2 +n-1 +v61 +o41 +o0 +v60 +v61 +o16 +o2 +v59 +o44 +o0 +v59 +o2 +n-1 +v60 +C60 +o54 +3 +o2 +n3 +o5 +v61 +n3 +o2 +o41 +o0 +v61 +o2 +n-1 +v62 +o41 +o0 +v61 +v62 +o16 +o2 +v60 +o44 +o0 +v60 +o2 +n-1 +v61 +C61 +o54 +3 +o2 +n3 +o5 +v62 +n3 +o2 +o41 +o0 +v62 +o2 +n-1 +v63 +o41 +o0 +v62 +v63 +o16 +o2 +v61 +o44 +o0 +v61 +o2 +n-1 +v62 +C62 +o54 +3 +o2 +n3 +o5 +v63 +n3 +o2 +o41 +o0 +v63 +o2 +n-1 +v64 +o41 +o0 +v63 +v64 +o16 +o2 +v62 +o44 +o0 +v62 +o2 +n-1 +v63 +C63 +o54 +3 +o2 +n3 +o5 +v64 +n3 +o2 +o41 +o0 +v64 +o2 +n-1 +v65 +o41 +o0 +v64 +v65 +o16 +o2 +v63 +o44 +o0 +v63 +o2 +n-1 +v64 +C64 +o54 +3 +o2 +n3 +o5 +v65 +n3 +o2 +o41 +o0 +v65 +o2 +n-1 +v66 +o41 +o0 +v65 +v66 +o16 +o2 +v64 +o44 +o0 +v64 +o2 +n-1 +v65 +C65 +o54 +3 +o2 +n3 +o5 +v66 +n3 +o2 +o41 +o0 +v66 +o2 +n-1 +v67 +o41 +o0 +v66 +v67 +o16 +o2 +v65 +o44 +o0 +v65 +o2 +n-1 +v66 +C66 +o54 +3 +o2 +n3 +o5 +v67 +n3 +o2 +o41 +o0 +v67 +o2 +n-1 +v68 +o41 +o0 +v67 +v68 +o16 +o2 +v66 +o44 +o0 +v66 +o2 +n-1 +v67 +C67 +o54 +3 +o2 +n3 +o5 +v68 +n3 +o2 +o41 +o0 +v68 +o2 +n-1 +v69 +o41 +o0 +v68 +v69 +o16 +o2 +v67 +o44 +o0 +v67 +o2 +n-1 +v68 +C68 +o54 +3 +o2 +n3 +o5 +v69 +n3 +o2 +o41 +o0 +v69 +o2 +n-1 +v70 +o41 +o0 +v69 +v70 +o16 +o2 +v68 +o44 +o0 +v68 +o2 +n-1 +v69 +C69 +o54 +3 +o2 +n3 +o5 +v70 +n3 +o2 +o41 +o0 +v70 +o2 +n-1 +v71 +o41 +o0 +v70 +v71 +o16 +o2 +v69 +o44 +o0 +v69 +o2 +n-1 +v70 +C70 +o54 +3 +o2 +n3 +o5 +v71 +n3 +o2 +o41 +o0 +v71 +o2 +n-1 +v72 +o41 +o0 +v71 +v72 +o16 +o2 +v70 +o44 +o0 +v70 +o2 +n-1 +v71 +C71 +o54 +3 +o2 +n3 +o5 +v72 +n3 +o2 +o41 +o0 +v72 +o2 +n-1 +v73 +o41 +o0 +v72 +v73 +o16 +o2 +v71 +o44 +o0 +v71 +o2 +n-1 +v72 +C72 +o54 +3 +o2 +n3 +o5 +v73 +n3 +o2 +o41 +o0 +v73 +o2 +n-1 +v74 +o41 +o0 +v73 +v74 +o16 +o2 +v72 +o44 +o0 +v72 +o2 +n-1 +v73 +C73 +o54 +3 +o2 +n3 +o5 +v74 +n3 +o2 +o41 +o0 +v74 +o2 +n-1 +v75 +o41 +o0 +v74 +v75 +o16 +o2 +v73 +o44 +o0 +v73 +o2 +n-1 +v74 +C74 +o54 +3 +o2 +n3 +o5 +v75 +n3 +o2 +o41 +o0 +v75 +o2 +n-1 +v76 +o41 +o0 +v75 +v76 +o16 +o2 +v74 +o44 +o0 +v74 +o2 +n-1 +v75 +C75 +o54 +3 +o2 +n3 +o5 +v76 +n3 +o2 +o41 +o0 +v76 +o2 +n-1 +v77 +o41 +o0 +v76 +v77 +o16 +o2 +v75 +o44 +o0 +v75 +o2 +n-1 +v76 +C76 +o54 +3 +o2 +n3 +o5 +v77 +n3 +o2 +o41 +o0 +v77 +o2 +n-1 +v78 +o41 +o0 +v77 +v78 +o16 +o2 +v76 +o44 +o0 +v76 +o2 +n-1 +v77 +C77 +o54 +3 +o2 +n3 +o5 +v78 +n3 +o2 +o41 +o0 +v78 +o2 +n-1 +v79 +o41 +o0 +v78 +v79 +o16 +o2 +v77 +o44 +o0 +v77 +o2 +n-1 +v78 +C78 +o54 +3 +o2 +n3 +o5 +v79 +n3 +o2 +o41 +o0 +v79 +o2 +n-1 +v80 +o41 +o0 +v79 +v80 +o16 +o2 +v78 +o44 +o0 +v78 +o2 +n-1 +v79 +C79 +o54 +3 +o2 +n3 +o5 +v80 +n3 +o2 +o41 +o0 +v80 +o2 +n-1 +v81 +o41 +o0 +v80 +v81 +o16 +o2 +v79 +o44 +o0 +v79 +o2 +n-1 +v80 +C80 +o54 +3 +o2 +n3 +o5 +v81 +n3 +o2 +o41 +o0 +v81 +o2 +n-1 +v82 +o41 +o0 +v81 +v82 +o16 +o2 +v80 +o44 +o0 +v80 +o2 +n-1 +v81 +C81 +o54 +3 +o2 +n3 +o5 +v82 +n3 +o2 +o41 +o0 +v82 +o2 +n-1 +v83 +o41 +o0 +v82 +v83 +o16 +o2 +v81 +o44 +o0 +v81 +o2 +n-1 +v82 +C82 +o54 +3 +o2 +n3 +o5 +v83 +n3 +o2 +o41 +o0 +v83 +o2 +n-1 +v84 +o41 +o0 +v83 +v84 +o16 +o2 +v82 +o44 +o0 +v82 +o2 +n-1 +v83 +C83 +o54 +3 +o2 +n3 +o5 +v84 +n3 +o2 +o41 +o0 +v84 +o2 +n-1 +v85 +o41 +o0 +v84 +v85 +o16 +o2 +v83 +o44 +o0 +v83 +o2 +n-1 +v84 +C84 +o54 +3 +o2 +n3 +o5 +v85 +n3 +o2 +o41 +o0 +v85 +o2 +n-1 +v86 +o41 +o0 +v85 +v86 +o16 +o2 +v84 +o44 +o0 +v84 +o2 +n-1 +v85 +C85 +o54 +3 +o2 +n3 +o5 +v86 +n3 +o2 +o41 +o0 +v86 +o2 +n-1 +v87 +o41 +o0 +v86 +v87 +o16 +o2 +v85 +o44 +o0 +v85 +o2 +n-1 +v86 +C86 +o54 +3 +o2 +n3 +o5 +v87 +n3 +o2 +o41 +o0 +v87 +o2 +n-1 +v88 +o41 +o0 +v87 +v88 +o16 +o2 +v86 +o44 +o0 +v86 +o2 +n-1 +v87 +C87 +o54 +3 +o2 +n3 +o5 +v88 +n3 +o2 +o41 +o0 +v88 +o2 +n-1 +v89 +o41 +o0 +v88 +v89 +o16 +o2 +v87 +o44 +o0 +v87 +o2 +n-1 +v88 +C88 +o54 +3 +o2 +n3 +o5 +v89 +n3 +o2 +o41 +o0 +v89 +o2 +n-1 +v90 +o41 +o0 +v89 +v90 +o16 +o2 +v88 +o44 +o0 +v88 +o2 +n-1 +v89 +C89 +o54 +3 +o2 +n3 +o5 +v90 +n3 +o2 +o41 +o0 +v90 +o2 +n-1 +v91 +o41 +o0 +v90 +v91 +o16 +o2 +v89 +o44 +o0 +v89 +o2 +n-1 +v90 +C90 +o54 +3 +o2 +n3 +o5 +v91 +n3 +o2 +o41 +o0 +v91 +o2 +n-1 +v92 +o41 +o0 +v91 +v92 +o16 +o2 +v90 +o44 +o0 +v90 +o2 +n-1 +v91 +C91 +o54 +3 +o2 +n3 +o5 +v92 +n3 +o2 +o41 +o0 +v92 +o2 +n-1 +v93 +o41 +o0 +v92 +v93 +o16 +o2 +v91 +o44 +o0 +v91 +o2 +n-1 +v92 +C92 +o54 +3 +o2 +n3 +o5 +v93 +n3 +o2 +o41 +o0 +v93 +o2 +n-1 +v94 +o41 +o0 +v93 +v94 +o16 +o2 +v92 +o44 +o0 +v92 +o2 +n-1 +v93 +C93 +o54 +3 +o2 +n3 +o5 +v94 +n3 +o2 +o41 +o0 +v94 +o2 +n-1 +v95 +o41 +o0 +v94 +v95 +o16 +o2 +v93 +o44 +o0 +v93 +o2 +n-1 +v94 +C94 +o54 +3 +o2 +n3 +o5 +v95 +n3 +o2 +o41 +o0 +v95 +o2 +n-1 +v96 +o41 +o0 +v95 +v96 +o16 +o2 +v94 +o44 +o0 +v94 +o2 +n-1 +v95 +C95 +o54 +3 +o2 +n3 +o5 +v96 +n3 +o2 +o41 +o0 +v96 +o2 +n-1 +v97 +o41 +o0 +v96 +v97 +o16 +o2 +v95 +o44 +o0 +v95 +o2 +n-1 +v96 +C96 +o54 +3 +o2 +n3 +o5 +v97 +n3 +o2 +o41 +o0 +v97 +o2 +n-1 +v98 +o41 +o0 +v97 +v98 +o16 +o2 +v96 +o44 +o0 +v96 +o2 +n-1 +v97 +C97 +o54 +3 +o2 +n3 +o5 +v98 +n3 +o2 +o41 +o0 +v98 +o2 +n-1 +v99 +o41 +o0 +v98 +v99 +o16 +o2 +v97 +o44 +o0 +v97 +o2 +n-1 +v98 +O0 0 +o54 +198 +o2 +n100 +o5 +o0 +o2 +n-1 +v1 +o5 +v0 +n2 +n2 +o5 +o0 +v0 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v2 +o5 +v1 +n2 +n2 +o5 +o0 +v1 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v3 +o5 +v2 +n2 +n2 +o5 +o0 +v2 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v4 +o5 +v3 +n2 +n2 +o5 +o0 +v3 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v5 +o5 +v4 +n2 +n2 +o5 +o0 +v4 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v6 +o5 +v5 +n2 +n2 +o5 +o0 +v5 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v7 +o5 +v6 +n2 +n2 +o5 +o0 +v6 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v8 +o5 +v7 +n2 +n2 +o5 +o0 +v7 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v9 +o5 +v8 +n2 +n2 +o5 +o0 +v8 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v10 +o5 +v9 +n2 +n2 +o5 +o0 +v9 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v11 +o5 +v10 +n2 +n2 +o5 +o0 +v10 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v12 +o5 +v11 +n2 +n2 +o5 +o0 +v11 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v13 +o5 +v12 +n2 +n2 +o5 +o0 +v12 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v14 +o5 +v13 +n2 +n2 +o5 +o0 +v13 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v15 +o5 +v14 +n2 +n2 +o5 +o0 +v14 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v16 +o5 +v15 +n2 +n2 +o5 +o0 +v15 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v17 +o5 +v16 +n2 +n2 +o5 +o0 +v16 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v18 +o5 +v17 +n2 +n2 +o5 +o0 +v17 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v19 +o5 +v18 +n2 +n2 +o5 +o0 +v18 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v20 +o5 +v19 +n2 +n2 +o5 +o0 +v19 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v21 +o5 +v20 +n2 +n2 +o5 +o0 +v20 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v22 +o5 +v21 +n2 +n2 +o5 +o0 +v21 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v23 +o5 +v22 +n2 +n2 +o5 +o0 +v22 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v24 +o5 +v23 +n2 +n2 +o5 +o0 +v23 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v25 +o5 +v24 +n2 +n2 +o5 +o0 +v24 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v26 +o5 +v25 +n2 +n2 +o5 +o0 +v25 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v27 +o5 +v26 +n2 +n2 +o5 +o0 +v26 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v28 +o5 +v27 +n2 +n2 +o5 +o0 +v27 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v29 +o5 +v28 +n2 +n2 +o5 +o0 +v28 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v30 +o5 +v29 +n2 +n2 +o5 +o0 +v29 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v31 +o5 +v30 +n2 +n2 +o5 +o0 +v30 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v32 +o5 +v31 +n2 +n2 +o5 +o0 +v31 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v33 +o5 +v32 +n2 +n2 +o5 +o0 +v32 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v34 +o5 +v33 +n2 +n2 +o5 +o0 +v33 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v35 +o5 +v34 +n2 +n2 +o5 +o0 +v34 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v36 +o5 +v35 +n2 +n2 +o5 +o0 +v35 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v37 +o5 +v36 +n2 +n2 +o5 +o0 +v36 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v38 +o5 +v37 +n2 +n2 +o5 +o0 +v37 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v39 +o5 +v38 +n2 +n2 +o5 +o0 +v38 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v40 +o5 +v39 +n2 +n2 +o5 +o0 +v39 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v41 +o5 +v40 +n2 +n2 +o5 +o0 +v40 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v42 +o5 +v41 +n2 +n2 +o5 +o0 +v41 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v43 +o5 +v42 +n2 +n2 +o5 +o0 +v42 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v44 +o5 +v43 +n2 +n2 +o5 +o0 +v43 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v45 +o5 +v44 +n2 +n2 +o5 +o0 +v44 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v46 +o5 +v45 +n2 +n2 +o5 +o0 +v45 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v47 +o5 +v46 +n2 +n2 +o5 +o0 +v46 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v48 +o5 +v47 +n2 +n2 +o5 +o0 +v47 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v49 +o5 +v48 +n2 +n2 +o5 +o0 +v48 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v50 +o5 +v49 +n2 +n2 +o5 +o0 +v49 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v51 +o5 +v50 +n2 +n2 +o5 +o0 +v50 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v52 +o5 +v51 +n2 +n2 +o5 +o0 +v51 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v53 +o5 +v52 +n2 +n2 +o5 +o0 +v52 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v54 +o5 +v53 +n2 +n2 +o5 +o0 +v53 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v55 +o5 +v54 +n2 +n2 +o5 +o0 +v54 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v56 +o5 +v55 +n2 +n2 +o5 +o0 +v55 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v57 +o5 +v56 +n2 +n2 +o5 +o0 +v56 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v58 +o5 +v57 +n2 +n2 +o5 +o0 +v57 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v59 +o5 +v58 +n2 +n2 +o5 +o0 +v58 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v60 +o5 +v59 +n2 +n2 +o5 +o0 +v59 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v61 +o5 +v60 +n2 +n2 +o5 +o0 +v60 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v62 +o5 +v61 +n2 +n2 +o5 +o0 +v61 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v63 +o5 +v62 +n2 +n2 +o5 +o0 +v62 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v64 +o5 +v63 +n2 +n2 +o5 +o0 +v63 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v65 +o5 +v64 +n2 +n2 +o5 +o0 +v64 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v66 +o5 +v65 +n2 +n2 +o5 +o0 +v65 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v67 +o5 +v66 +n2 +n2 +o5 +o0 +v66 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v68 +o5 +v67 +n2 +n2 +o5 +o0 +v67 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v69 +o5 +v68 +n2 +n2 +o5 +o0 +v68 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v70 +o5 +v69 +n2 +n2 +o5 +o0 +v69 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v71 +o5 +v70 +n2 +n2 +o5 +o0 +v70 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v72 +o5 +v71 +n2 +n2 +o5 +o0 +v71 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v73 +o5 +v72 +n2 +n2 +o5 +o0 +v72 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v74 +o5 +v73 +n2 +n2 +o5 +o0 +v73 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v75 +o5 +v74 +n2 +n2 +o5 +o0 +v74 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v76 +o5 +v75 +n2 +n2 +o5 +o0 +v75 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v77 +o5 +v76 +n2 +n2 +o5 +o0 +v76 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v78 +o5 +v77 +n2 +n2 +o5 +o0 +v77 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v79 +o5 +v78 +n2 +n2 +o5 +o0 +v78 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v80 +o5 +v79 +n2 +n2 +o5 +o0 +v79 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v81 +o5 +v80 +n2 +n2 +o5 +o0 +v80 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v82 +o5 +v81 +n2 +n2 +o5 +o0 +v81 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v83 +o5 +v82 +n2 +n2 +o5 +o0 +v82 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v84 +o5 +v83 +n2 +n2 +o5 +o0 +v83 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v85 +o5 +v84 +n2 +n2 +o5 +o0 +v84 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v86 +o5 +v85 +n2 +n2 +o5 +o0 +v85 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v87 +o5 +v86 +n2 +n2 +o5 +o0 +v86 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v88 +o5 +v87 +n2 +n2 +o5 +o0 +v87 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v89 +o5 +v88 +n2 +n2 +o5 +o0 +v88 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v90 +o5 +v89 +n2 +n2 +o5 +o0 +v89 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v91 +o5 +v90 +n2 +n2 +o5 +o0 +v90 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v92 +o5 +v91 +n2 +n2 +o5 +o0 +v91 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v93 +o5 +v92 +n2 +n2 +o5 +o0 +v92 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v94 +o5 +v93 +n2 +n2 +o5 +o0 +v93 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v95 +o5 +v94 +n2 +n2 +o5 +o0 +v94 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v96 +o5 +v95 +n2 +n2 +o5 +o0 +v95 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v97 +o5 +v96 +n2 +n2 +o5 +o0 +v96 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v98 +o5 +v97 +n2 +n2 +o5 +o0 +v97 +n-1 +n2 +o2 +n100 +o5 +o0 +o2 +n-1 +v99 +o5 +v98 +n2 +n2 +o5 +o0 +v98 +n-1 +n2 +x100 +0 -1.2 +1 1.0 +2 -1.2 +3 1.0 +4 -1.2 +5 1.0 +6 -1.2 +7 1.0 +8 -1.2 +9 1.0 +10 -1.2 +11 1.0 +12 -1.2 +13 1.0 +14 -1.2 +15 1.0 +16 -1.2 +17 1.0 +18 -1.2 +19 1.0 +20 -1.2 +21 1.0 +22 -1.2 +23 1.0 +24 -1.2 +25 1.0 +26 -1.2 +27 1.0 +28 -1.2 +29 1.0 +30 -1.2 +31 1.0 +32 -1.2 +33 1.0 +34 -1.2 +35 1.0 +36 -1.2 +37 1.0 +38 -1.2 +39 1.0 +40 -1.2 +41 1.0 +42 -1.2 +43 1.0 +44 -1.2 +45 1.0 +46 -1.2 +47 1.0 +48 -1.2 +49 1.0 +50 -1.2 +51 1.0 +52 -1.2 +53 1.0 +54 -1.2 +55 1.0 +56 -1.2 +57 1.0 +58 -1.2 +59 1.0 +60 -1.2 +61 1.0 +62 -1.2 +63 1.0 +64 -1.2 +65 1.0 +66 -1.2 +67 1.0 +68 -1.2 +69 1.0 +70 -1.2 +71 1.0 +72 -1.2 +73 1.0 +74 -1.2 +75 1.0 +76 -1.2 +77 1.0 +78 -1.2 +79 1.0 +80 -1.2 +81 1.0 +82 -1.2 +83 1.0 +84 -1.2 +85 1.0 +86 -1.2 +87 1.0 +88 -1.2 +89 1.0 +90 -1.2 +91 1.0 +92 -1.2 +93 1.0 +94 -1.2 +95 1.0 +96 -1.2 +97 1.0 +98 -1.2 +99 1.0 +r +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +4 8 +b +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +3 +k99 +1 +3 +6 +9 +12 +15 +18 +21 +24 +27 +30 +33 +36 +39 +42 +45 +48 +51 +54 +57 +60 +63 +66 +69 +72 +75 +78 +81 +84 +87 +90 +93 +96 +99 +102 +105 +108 +111 +114 +117 +120 +123 +126 +129 +132 +135 +138 +141 +144 +147 +150 +153 +156 +159 +162 +165 +168 +171 +174 +177 +180 +183 +186 +189 +192 +195 +198 +201 +204 +207 +210 +213 +216 +219 +222 +225 +228 +231 +234 +237 +240 +243 +246 +249 +252 +255 +258 +261 +264 +267 +270 +273 +276 +279 +282 +285 +288 +291 +293 +J0 3 +0 0 +1 4 +2 2 +J1 3 +1 0 +2 4 +3 2 +J2 3 +2 0 +3 4 +4 2 +J3 3 +3 0 +4 4 +5 2 +J4 3 +4 0 +5 4 +6 2 +J5 3 +5 0 +6 4 +7 2 +J6 3 +6 0 +7 4 +8 2 +J7 3 +7 0 +8 4 +9 2 +J8 3 +8 0 +9 4 +10 2 +J9 3 +9 0 +10 4 +11 2 +J10 3 +10 0 +11 4 +12 2 +J11 3 +11 0 +12 4 +13 2 +J12 3 +12 0 +13 4 +14 2 +J13 3 +13 0 +14 4 +15 2 +J14 3 +14 0 +15 4 +16 2 +J15 3 +15 0 +16 4 +17 2 +J16 3 +16 0 +17 4 +18 2 +J17 3 +17 0 +18 4 +19 2 +J18 3 +18 0 +19 4 +20 2 +J19 3 +19 0 +20 4 +21 2 +J20 3 +20 0 +21 4 +22 2 +J21 3 +21 0 +22 4 +23 2 +J22 3 +22 0 +23 4 +24 2 +J23 3 +23 0 +24 4 +25 2 +J24 3 +24 0 +25 4 +26 2 +J25 3 +25 0 +26 4 +27 2 +J26 3 +26 0 +27 4 +28 2 +J27 3 +27 0 +28 4 +29 2 +J28 3 +28 0 +29 4 +30 2 +J29 3 +29 0 +30 4 +31 2 +J30 3 +30 0 +31 4 +32 2 +J31 3 +31 0 +32 4 +33 2 +J32 3 +32 0 +33 4 +34 2 +J33 3 +33 0 +34 4 +35 2 +J34 3 +34 0 +35 4 +36 2 +J35 3 +35 0 +36 4 +37 2 +J36 3 +36 0 +37 4 +38 2 +J37 3 +37 0 +38 4 +39 2 +J38 3 +38 0 +39 4 +40 2 +J39 3 +39 0 +40 4 +41 2 +J40 3 +40 0 +41 4 +42 2 +J41 3 +41 0 +42 4 +43 2 +J42 3 +42 0 +43 4 +44 2 +J43 3 +43 0 +44 4 +45 2 +J44 3 +44 0 +45 4 +46 2 +J45 3 +45 0 +46 4 +47 2 +J46 3 +46 0 +47 4 +48 2 +J47 3 +47 0 +48 4 +49 2 +J48 3 +48 0 +49 4 +50 2 +J49 3 +49 0 +50 4 +51 2 +J50 3 +50 0 +51 4 +52 2 +J51 3 +51 0 +52 4 +53 2 +J52 3 +52 0 +53 4 +54 2 +J53 3 +53 0 +54 4 +55 2 +J54 3 +54 0 +55 4 +56 2 +J55 3 +55 0 +56 4 +57 2 +J56 3 +56 0 +57 4 +58 2 +J57 3 +57 0 +58 4 +59 2 +J58 3 +58 0 +59 4 +60 2 +J59 3 +59 0 +60 4 +61 2 +J60 3 +60 0 +61 4 +62 2 +J61 3 +61 0 +62 4 +63 2 +J62 3 +62 0 +63 4 +64 2 +J63 3 +63 0 +64 4 +65 2 +J64 3 +64 0 +65 4 +66 2 +J65 3 +65 0 +66 4 +67 2 +J66 3 +66 0 +67 4 +68 2 +J67 3 +67 0 +68 4 +69 2 +J68 3 +68 0 +69 4 +70 2 +J69 3 +69 0 +70 4 +71 2 +J70 3 +70 0 +71 4 +72 2 +J71 3 +71 0 +72 4 +73 2 +J72 3 +72 0 +73 4 +74 2 +J73 3 +73 0 +74 4 +75 2 +J74 3 +74 0 +75 4 +76 2 +J75 3 +75 0 +76 4 +77 2 +J76 3 +76 0 +77 4 +78 2 +J77 3 +77 0 +78 4 +79 2 +J78 3 +78 0 +79 4 +80 2 +J79 3 +79 0 +80 4 +81 2 +J80 3 +80 0 +81 4 +82 2 +J81 3 +81 0 +82 4 +83 2 +J82 3 +82 0 +83 4 +84 2 +J83 3 +83 0 +84 4 +85 2 +J84 3 +84 0 +85 4 +86 2 +J85 3 +85 0 +86 4 +87 2 +J86 3 +86 0 +87 4 +88 2 +J87 3 +87 0 +88 4 +89 2 +J88 3 +88 0 +89 4 +90 2 +J89 3 +89 0 +90 4 +91 2 +J90 3 +90 0 +91 4 +92 2 +J91 3 +91 0 +92 4 +93 2 +J92 3 +92 0 +93 4 +94 2 +J93 3 +93 0 +94 4 +95 2 +J94 3 +94 0 +95 4 +96 2 +J95 3 +95 0 +96 4 +97 2 +J96 3 +96 0 +97 4 +98 2 +J97 3 +97 0 +98 4 +99 2 +G0 100 +0 0 +1 0 +2 0 +3 0 +4 0 +5 0 +6 0 +7 0 +8 0 +9 0 +10 0 +11 0 +12 0 +13 0 +14 0 +15 0 +16 0 +17 0 +18 0 +19 0 +20 0 +21 0 +22 0 +23 0 +24 0 +25 0 +26 0 +27 0 +28 0 +29 0 +30 0 +31 0 +32 0 +33 0 +34 0 +35 0 +36 0 +37 0 +38 0 +39 0 +40 0 +41 0 +42 0 +43 0 +44 0 +45 0 +46 0 +47 0 +48 0 +49 0 +50 0 +51 0 +52 0 +53 0 +54 0 +55 0 +56 0 +57 0 +58 0 +59 0 +60 0 +61 0 +62 0 +63 0 +64 0 +65 0 +66 0 +67 0 +68 0 +69 0 +70 0 +71 0 +72 0 +73 0 +74 0 +75 0 +76 0 +77 0 +78 0 +79 0 +80 0 +81 0 +82 0 +83 0 +84 0 +85 0 +86 0 +87 0 +88 0 +89 0 +90 0 +91 0 +92 0 +93 0 +94 0 +95 0 +96 0 +97 0 +98 0 +99 0 diff --git a/ExaModelsExamples/src/ExaModelsExamples.jl b/ExaModelsExamples/src/ExaModelsExamples.jl index 655b821..7ff72da 100644 --- a/ExaModelsExamples/src/ExaModelsExamples.jl +++ b/ExaModelsExamples/src/ExaModelsExamples.jl @@ -3,12 +3,23 @@ module ExaModelsExamples import ExaModels: ExaModels, NLPModels import JuMP, NLPModelsJuMP import PowerModels: PowerModels, silence +import PyCall: @py_str +import MadNLP +import AmplNLReader +import CPUTime: @CPUtime + +silence() include("opf.jl") include("luksanvlcek.jl") include("distillation.jl") include("quadrotor.jl") +function project!(l,x,u; marg = 1e-4) + map!(x,l,x,u) do l,x,u + max(l+marg, min(u-marg,x) ) + end +end function compile_callbacks(m) nvar = m.meta.nvar @@ -27,22 +38,46 @@ function compile_callbacks(m) hrows = similar(m.meta.x0, Int, nnzh) hcols = similar(m.meta.x0, Int, nnzh) - println("Objective evaluation") - @time NLPModels.obj(m,x) - println("Constraints evaluation") - @time NLPModels.cons!(m,x,c) - println("Gradient evaluation") - @time NLPModels.grad!(m,x,g) - println("Jacobian evaluation") - @time NLPModels.jac_coord!(m,x,jac) - println("Hessian evaluation") - @time NLPModels.hess_coord!(m,x,y,hess) - println("Jacobina sparsity evaluation") - @time NLPModels.jac_structure!(m,jrows,jcols) - println("Hessian sparsity evaluation") - @time NLPModels.hess_structure!(m,hrows,hcols) + project!(m.meta.lvar, x, m.meta.uvar) + + # println("Objective evaluation") + tobj = @elapsed for t=1:100 + NLPModels.obj(m,x) + end + # println("Constraints evaluation") + tcon = @elapsed for t=1:100 + NLPModels.cons!(m,x,c) + end + # println("Gradient evaluation") + tgrad = @elapsed for t=1:100 + NLPModels.grad!(m,x,g) + end + # println("Jacobian evaluation") + tjac = @elapsed for t=1:100 + NLPModels.jac_coord!(m,x,jac) + end + # println("Hessian evaluation") + thess = @elapsed for t=1:100 + NLPModels.hess_coord!(m,x,y,hess) + end + # println("Jacobina sparsity evaluation") + tjacs = @elapsed for t=1:100 + NLPModels.jac_structure!(m,jrows,jcols) + end + # println("Hessian sparsity evaluation") + thesss = @elapsed for t=1:100 + NLPModels.hess_structure!(m,hrows,hcols) + end - return + return ( + tobj = tobj, + tcon = tcon, + tgrad = tgrad, + tjac = tjac, + thess = thess, + tjacs = tjacs, + thesss = thesss, + ) end diff --git a/ExaModelsExamples/src/distillation.jl b/ExaModelsExamples/src/distillation.jl index 8f3d7e3..8a48a3b 100644 --- a/ExaModelsExamples/src/distillation.jl +++ b/ExaModelsExamples/src/distillation.jl @@ -126,5 +126,120 @@ function jump_distillation_column_model(T) yA[t,i] * (1-xA[t,i]) - alpha * xA[t,i] * (1-yA[t,i]) == 0 ) - return m + return MathOptNLPModel(m) +end + +function ampl_distillation_column_model(T) + nlfile = tempname()* ".nl" + + py""" + from pyomo.environ import * + + # Create a ConcreteModel + m = ConcreteModel() + + # Constants + T = $T + NT = 30 + FT = 17 + Ac = 0.5 + At = 0.25 + Ar = 1.0 + D = 0.2 + F = 0.4 + ybar = .8958 + ubar = 2.0 + alpha= 1.6 + dt = 10/T + xAf = 0.5 + xA0s = {i: 0.5 for i in range(NT+2)} + + # Define the decision variables + m.xA = Var(range(T+1), range(NT+2), initialize=0.5) + m.yA = Var(range(T+1), range(NT+2), initialize=0.5) + m.u = Var(range(T+1), initialize=1.0) + m.V = Var(range(T+1), initialize=1.0) + m.L2 = Var(range(T+1), initialize=1.0) + + # Define the objective function + m.obj = Objective( + expr=sum((m.yA[t, 1] - ybar)**2 for t in range(T+1)) + + sum((m.u[t] - ubar)**2 for t in range(T+1)), + sense=minimize + ) + + # Define the constraints + m.constr1 = ConstraintList() + for i in range(NT+2): + m.constr1.add(expr=m.xA[0, i] - xA0s[i] == 0) + + m.constr2 = ConstraintList() + for t in range(1, T+1): + m.constr2.add( + expr=(m.xA[t, 0] - m.xA[t-1, 0]) / dt - (1/Ac) * (m.yA[t, 1] - m.xA[t, 0]) == 0 + ) + + m.constr3 = ConstraintList() + for t in range(1, T+1): + for i in range(1, FT): + m.constr3.add( + expr=( + (m.xA[t, i] - m.xA[t-1, i]) / dt - + (1/At) * ( + m.u[t] * D * (m.yA[t, i-1] - m.xA[t, i]) - m.V[t] * (m.yA[t, i] - m.yA[t, i+1]) + ) == 0 + ) + ) + + m.constr4 = ConstraintList() + for t in range(1, T+1): + m.constr4.add( + expr=( + (m.xA[t, FT] - m.xA[t-1, FT]) / dt - + (1/At) * ( + F * xAf + m.u[t] * D * m.xA[t, FT-1] - m.L2[t] * m.xA[t, FT] + - m.V[t] * (m.yA[t, FT] - m.yA[t, FT+1]) + ) == 0 + ) + ) + + m.constr5 = ConstraintList() + for t in range(1, T+1): + for i in range(FT+1, NT+1): + m.constr5.add( + expr=( + (m.xA[t, i] - m.xA[t-1, i]) / dt - + (1/At) * ( + m.L2[t] * (m.yA[t, i-1] - m.xA[t, i]) - m.V[t] * (m.yA[t, i] - m.yA[t, i+1]) + ) == 0 + ) + ) + + m.constr6 = ConstraintList() + for t in range(1, T+1): + m.constr6.add( + expr=( + (m.xA[t, NT+1] - m.xA[t-1, NT+1]) / dt - + (1/Ar) * ( + m.L2[t] * m.xA[t, NT] - (F - D) * m.xA[t, NT+1] - m.V[t] * m.yA[t, NT+1] + ) == 0 + ) + ) + + m.constr7 = ConstraintList() + for t in range(T+1): + m.constr7.add(expr=m.V[t] - m.u[t] * D - D == 0) + + m.constr8 = ConstraintList() + for t in range(T+1): + m.constr8.add(expr=m.L2[t] - m.u[t] * D - F == 0) + + m.constr9 = ConstraintList() + for t in range(T+1): + for i in range(NT+2): + m.constr9.add(expr=m.yA[t, i] * (1 - m.xA[t, i]) - alpha * m.xA[t, i] * (1 - m.yA[t, i]) == 0) + m.write($nlfile) + """ + + return AmplNLReader.AmplModel(nlfile) end diff --git a/ExaModelsExamples/src/luksanvlcek.jl b/ExaModelsExamples/src/luksanvlcek.jl index e37bb13..9baeee8 100644 --- a/ExaModelsExamples/src/luksanvlcek.jl +++ b/ExaModelsExamples/src/luksanvlcek.jl @@ -13,10 +13,48 @@ function luksan_vlcek_model(N, backend = nothing) return ExaModels.ExaModel(c) end +function MathOptNLPModel(jm) + JuMP.set_optimizer(jm, MadNLP.Optimizer) + JuMP.set_optimizer_attribute(jm, "max_iter", 0) + JuMP.set_optimizer_attribute(jm, "print_level", MadNLP.ERROR) + JuMP.optimize!(jm) + return jm.moi_backend.optimizer.model.nlp +end + function jump_luksan_vlcek_model(N) jm=JuMP.Model() + JuMP.@variable(jm,x[i=1:N], start= mod(i,2)==1 ? -1.2 : 1.) JuMP.@NLconstraint(jm,[i=1:N-2], 3x[i+1]^3+2x[i+2]-5+sin(x[i+1]-x[i+2])sin(x[i+1]+x[i+2])+4x[i+1]-x[i]exp(x[i]-x[i+1])-3==0.) JuMP.@NLobjective(jm,Min,sum(100(x[i-1]^2-x[i])^2+(x[i-1]-1)^2 for i=2:N)) - return jm + + return MathOptNLPModel(jm) +end + + +function ampl_luksan_vlcek_model(N) + nlfile = tempname()* ".nl" + py""" + N = $N + + import pyomo.environ as pyo + + model = pyo.ConcreteModel() + + model.x = pyo.Var(range(1,N+1), initialize=lambda model, i: -1.2 if i % 2 == 1 else 1.0) + + def luksan_constraint_rule(model, i): + return (3*model.x[i+1]**3 + 2*model.x[i+2] - 5 + + pyo.sin(model.x[i+1]-model.x[i+2])*pyo.sin(model.x[i+1]+model.x[i+2]) + + 4*model.x[i+1] - model.x[i]*pyo.exp(model.x[i]-model.x[i+1]) - 3 == 0) + def luksan_objective_rule(model): + return sum(100*(model.x[i-1]**2-model.x[i])**2+(model.x[i-1]-1)**2 for i in range(2,N+1)) + + + model.constraint = pyo.Constraint(range(1,N-1), rule=luksan_constraint_rule) + model.objective = pyo.Objective(rule=luksan_objective_rule, sense=pyo.minimize) + model.write($nlfile) + """ + + return AmplNLReader.AmplModel(nlfile) end diff --git a/ExaModelsExamples/src/opf.jl b/ExaModelsExamples/src/opf.jl index 466e6e3..c6129b1 100644 --- a/ExaModelsExamples/src/opf.jl +++ b/ExaModelsExamples/src/opf.jl @@ -16,14 +16,33 @@ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -function jump_ac_power_model(file_name) - +function get_power_data_ref(file_name) data = PowerModels.parse_file(file_name) PowerModels.standardize_cost_terms!(data, order=2) PowerModels.calc_thermal_limits!(data) - ref = PowerModels.build_ref(data)[:it][:pm][:nw][0] + return PowerModels.build_ref(data)[:it][:pm][:nw][0] +end +function ampl_data(filename) + data = parse_ac_power_data(filename) + + bus_gens = [Int[] for i=1:length(data.bus)] + bus_arcs = [Int[] for i=1:length(data.bus)] + + for g in data.gen + push!(bus_gens[g.bus], g.i) + end + + for a in data.arc + push!(bus_arcs[a.bus], a.i) + end + + return data, bus_gens, bus_arcs +end +function jump_ac_power_model(file_name) + + ref = get_power_data_ref(file_name) model = JuMP.Model() #JuMP.set_optimizer_attribute(model, "print_level", 0) @@ -101,7 +120,7 @@ function jump_ac_power_model(file_name) JuMP.@constraint(model, p_to^2 + q_to^2 <= branch["rate_a"]^2) end - return model + return MathOptNLPModel(model) end convert_data(data::N, backend) where {names, N <: NamedTuple{names}} = NamedTuple{names}(ExaModels.convert_array(d,backend) for d in data) @@ -354,3 +373,133 @@ function ac_power_model( end + +function ampl_ac_power_model(filename) + nlfile = tempname()* ".nl" + + py""" + import pyomo.environ as pyo + import numpy as np + import math + import julia + from julia import ExaModelsExamples + + ExaModelsExamples.silence() + + data, bus_gens, bus_arcs = ExaModelsExamples.ampl_data($filename) + + nbus = len(data.bus) + ngen = len(data.gen) + narc = len(data.arc) + + m = pyo.ConcreteModel() + + m.va = pyo.Var(range(nbus)) + + m.vm = pyo.Var( + range(nbus), + initialize = np.ones(nbus), + bounds = lambda m,i: (data.vmin[i], data.vmax[i]) + ) + + m.pg = pyo.Var( + range(ngen), + bounds = lambda m,i: (data.pmin[i], data.pmax[i]) + ) + + m.qg = pyo.Var( + range(ngen), + bounds = lambda m,i: (data.qmin[i], data.qmax[i]) + ) + + m.p = pyo.Var( + range(narc), + bounds = lambda m,i: (-data.rate_a[i], data.rate_a[i]) + ) + + m.q = pyo.Var( + range(narc), + bounds = lambda m,i: (-data.rate_a[i], data.rate_a[i]) + ) + + m.obj = pyo.Objective( + expr = sum(g.cost1 * m.pg[g.i-1]**2 + g.cost2 * m.pg[g.i-1] + g.cost3 for g in data.gen), + sense=pyo.minimize + ) + + m.c1 = pyo.ConstraintList() + m.c2 = pyo.ConstraintList() + m.c3 = pyo.ConstraintList() + m.c4 = pyo.ConstraintList() + m.c5 = pyo.ConstraintList() + m.c6 = pyo.ConstraintList() + m.c7 = pyo.ConstraintList() + m.c8 = pyo.ConstraintList() + m.c9 = pyo.ConstraintList() + m.c10= pyo.ConstraintList() + + for i in data.ref_buses: + m.c1.add(expr=m.va[i-1] == 0) + + for (b, amin, amax) in zip(data.branch, data.angmin, data.angmax): + m.c2.add( + expr = + m.p[b.f_idx-1] + - b.c5*m.vm[b.f_bus-1]**2 + - b.c3*(m.vm[b.f_bus-1]*m.vm[b.t_bus-1]*pyo.cos(m.va[b.f_bus-1]-m.va[b.t_bus-1])) + - b.c4*(m.vm[b.f_bus-1]*m.vm[b.t_bus-1]*pyo.sin(m.va[b.f_bus-1]-m.va[b.t_bus-1])) + == 0 + ) + m.c3.add( + expr = + m.q[b.f_idx-1] + + b.c6*m.vm[b.f_bus-1]**2 + + b.c4*(m.vm[b.f_bus-1]*m.vm[b.t_bus-1]*pyo.cos(m.va[b.f_bus-1]-m.va[b.t_bus-1])) + - b.c3*(m.vm[b.f_bus-1]*m.vm[b.t_bus-1]*pyo.sin(m.va[b.f_bus-1]-m.va[b.t_bus-1])) + == 0 + ) + m.c4.add( + m.p[b.t_idx-1] + - b.c7*m.vm[b.t_bus-1]**2 + - b.c1*(m.vm[b.t_bus-1]*m.vm[b.f_bus-1]*pyo.cos(m.va[b.t_bus-1]-m.va[b.f_bus-1])) + - b.c2*(m.vm[b.t_bus-1]*m.vm[b.f_bus-1]*pyo.sin(m.va[b.t_bus-1]-m.va[b.f_bus-1])) + == 0 + ) + m.c5.add( + m.q[b.t_idx-1] + + b.c8*m.vm[b.t_bus-1]**2 + + b.c2*(m.vm[b.t_bus-1]*m.vm[b.f_bus-1]*pyo.cos(m.va[b.t_bus-1]-m.va[b.f_bus-1])) + - b.c1*(m.vm[b.t_bus-1]*m.vm[b.f_bus-1]*pyo.sin(m.va[b.t_bus-1]-m.va[b.f_bus-1])) + == 0 + ) + m.c6.add( + (amin, m.va[b.f_bus-1] - m.va[b.t_bus-1], amax) + ) + m.c7.add( + (None, m.p[b.f_idx-1]**2 + m.q[b.f_idx-1]**2 - b.rate_a_sq, 0) + ) + m.c8.add( + (None, m.p[b.t_idx-1]**2 + m.q[b.t_idx-1]**2 - b.rate_a_sq, 0) + ) + + for (b,g,a) in zip(data.bus, bus_gens, bus_arcs): + m.c9.add( + b.pd + + sum(m.p[j-1] for j in a) + - sum(m.pg[j-1] for j in g) + + b.gs * m.vm[b.i-1]**2 + == 0 + ) + m.c10.add( + b.qd + + sum(m.q[j-1] for j in a) + - sum(m.qg[j-1] for j in g) + - b.bs * m.vm[b.i-1]**2 + == 0 + ) + + m.write($nlfile) + """ + + return AmplNLReader.AmplModel(nlfile) +end diff --git a/ExaModelsExamples/src/quadrotor.jl b/ExaModelsExamples/src/quadrotor.jl index 780e13e..dea9cce 100644 --- a/ExaModelsExamples/src/quadrotor.jl +++ b/ExaModelsExamples/src/quadrotor.jl @@ -62,5 +62,132 @@ function jump_quadrotor_model(N) JuMP.@NLconstraint(m,[i=1:N], x[i+1,9] == x[i,9] + (u[i,2]*cos(x[i,7])*tan(x[i,8])+u[i,3]*sin(x[i,7])*tan(x[i,8])+u[i,4])*dt) JuMP.@objective(m,Min, .5*sum(Q[j]*(x[i,j]-d(i,j,N))^2 for i=1:N for j=1:n) + .5*sum(R[j]*(u[i,j]^2) for i=1:N for j=1:p) + .5*sum(Qf[j]*(x[N+1,j]-d(N+1,j,N))^2 for j=1:n)) - return m + return MathOptNLPModel(m) +end + +function ampl_quadrotor_model(N) + nlfile = tempname()* ".nl" + py""" + N = $N + + import math + import pyomo.environ as pyo + + # Constants + n = 9 + p = 4 + nd = 9 + x0 = [0, 0, 0, 0, 0, 0, 0, 0, 0] + dt = 0.01 + Q = [1, 0, 1, 0, 1, 0, 1, 1, 1] + Qf = [x / dt for x in Q] + R = [0.1, 0.1, 0.1, 0.1] + + def d(i, j, N): + if j == 1: + return 1 * math.sin(2 * math.pi / N * i) + elif j == 3: + return 2 * math.sin(4 * math.pi / N * i) + elif j == 5: + return 2 * i / N + else: + return 0 + + + m = pyo.ConcreteModel() + + # Define the decision variables + m.x = pyo.Var(range(1, N+2), range(1, n+1), initialize=0) + m.u = pyo.Var(range(1, N+1), range(1, p+1), initialize=0) + + # Define the constraints + m.constr1 = pyo.ConstraintList() + for i in range(1, n+1): + m.constr1.add(expr=m.x[1, i] == x0[i-1]) + + m.constr2 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr2.add(expr=m.x[i+1, 1] == m.x[i, 1] + m.x[i, 2] * dt) + + m.constr3 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr3.add( + expr=( + m.x[i+1, 2] == m.x[i, 2] + + (m.u[i, 1] * pyo.cos(m.x[i, 7]) * pyo.sin(m.x[i, 8]) * pyo.cos(m.x[i, 9]) + + m.u[i, 1] * pyo.sin(m.x[i, 7]) * pyo.sin(m.x[i, 9])) * dt + ) + ) + + m.constr9 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr9.add(expr=m.x[i+1, 3] == m.x[i, 3] + m.x[i, 4] * dt) + + m.constr4 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr4.add( + expr=( + m.x[i+1, 4] == m.x[i, 4] + + (m.u[i, 1] * pyo.cos(m.x[i, 7]) * pyo.sin(m.x[i, 8]) * pyo.sin(m.x[i, 9]) - + m.u[i, 1] * pyo.sin(m.x[i, 7]) * pyo.cos(m.x[i, 9])) * dt + ) + ) + + m.constr10 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr10.add(expr=m.x[i+1, 5] == m.x[i, 5] + m.x[i, 6] * dt) + + m.constr5 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr5.add( + expr=( + m.x[i+1, 6] == m.x[i, 6] + + (m.u[i, 1] * pyo.cos(m.x[i, 7]) * pyo.cos(m.x[i, 8]) - 9.8) * dt + ) + ) + + m.constr6 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr6.add( + expr=( + m.x[i+1, 7] == m.x[i, 7] + + (m.u[i, 2] * pyo.cos(m.x[i, 7]) / pyo.cos(m.x[i, 8]) + + m.u[i, 3] * pyo.sin(m.x[i, 7]) / pyo.cos(m.x[i, 8])) * dt + ) + ) + + m.constr7 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr7.add( + expr=( + m.x[i+1, 8] == m.x[i, 8] + + (-m.u[i, 2] * pyo.sin(m.x[i, 7]) + m.u[i, 3] * pyo.cos(m.x[i, 7])) * dt + ) + ) + + m.constr8 = pyo.ConstraintList() + for i in range(1, N+1): + m.constr8.add( + expr=( + m.x[i+1, 9] == m.x[i, 9] + + (m.u[i, 2] * pyo.cos(m.x[i, 7]) * pyo.tan(m.x[i, 8]) + + m.u[i, 3] * pyo.sin(m.x[i, 7]) * pyo.tan(m.x[i, 8]) + m.u[i, 4]) * dt + ) + ) + + m.obj = pyo.Objective( + expr=( + 0.5 * sum(Q[j-1] * (m.x[i, j] - d(i, j, N))**2 for i in range(1, N+1) for j in range(1, n+1)) + + 0.5 * sum(R[j-1] * (m.u[i, j])**2 for i in range(1, N+1) for j in range(1, p+1)) + + 0.5 * sum(Qf[j-1] * (m.x[N+1, j] - d(N+1, j, N))**2 for j in range(1, n+1)) + ), + sense=pyo.minimize + ) + + + m.write($nlfile) + """ + + return AmplNLReader.AmplModel(nlfile) + end diff --git a/Project.toml b/Project.toml index 2483acb..e40909b 100644 --- a/Project.toml +++ b/Project.toml @@ -21,7 +21,7 @@ ExaModelsSpecialFunctions = "SpecialFunctions" [compat] julia = "1.9" -NLPModels = "0.20" +NLPModels = "0.18, 0.19, 0.20" SolverCore = "0.3" CUDA = "4" KernelAbstractions = "0.9" diff --git a/docs/make.jl b/docs/make.jl index 39f22bd..69c9960 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,7 +1,8 @@ using Documenter, ExaModels, Literate const _PAGES = [ - "Introduction" => "index.md", + "Introduction" =>"index.md", + "SIMD Abstraction"=>"simd.md", "Quick Start"=>"guide.md", "API Manual" => "core.md", ] @@ -24,7 +25,7 @@ end makedocs( - sitename = "ExaModels.jl", + sitename = "ExaModels", authors = "Sungho Shin", format = Documenter.LaTeX(platform="docker"), pages = _PAGES @@ -35,7 +36,7 @@ makedocs( modules = [ExaModels], authors = "Sungho Shin", format = Documenter.HTML( - prettyurls = get(ENV, "CI", nothing) == "true", + prettyurls = true, sidebar_sitename = true, collapselevel = 1, ), diff --git a/docs/src/guide.jl b/docs/src/guide.jl index 9b1247a..3512c88 100644 --- a/docs/src/guide.jl +++ b/docs/src/guide.jl @@ -10,7 +10,7 @@ using ExaModels # We set -N = 10000 +N = 10000; # First, we create a `ExaModels.Core`. c = ExaCore() diff --git a/docs/src/index.md b/docs/src/index.md index 8440f80..72a4a89 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -2,11 +2,18 @@ Welcome to the documentation of [ExaModels.jl](https://github.com/sshin23/ExaModels.jl) -!!! warning - This documentation page is under construction. - -# What is ExaModels? -ExaModels.jl implements SIMD abstraction of nonlinear programs and the automatic differentiation of its functions. ExaModels.jl expresses the functions in the form of iterables over statically typed data. This allows highly efficient derivative computations based on reverse-mode automatic differentiation. - -# Bug reports and support -Please report issues and feature requests via the [Github issue tracker](https://github.com/sshin23/ExaModels.jl/issues). +!!! note + This documentation is also available in PDF format: [ExaModels.pdf](ExaModels.pdf). + +## What is ExaModels.jl? +ExaModels.jl is an [algebraic modeling](https://en.wikipedia.org/wiki/Algebraic_modeling_language) and [automatic differentiation](https://en.wikipedia.org/wiki/Automatic_differentiation) tool in [Julia Language](https://julialang.org/), specialized for [SIMD](https://en.wikipedia.org/wiki/Single_instruction,_multiple_data) abstraction of [nonlinear programs](https://en.wikipedia.org/wiki/Nonlinear_programming). ExaModels.jl employs what we call [SIMD](https://en.wikipedia.org/wiki/Single_instruction,_multiple_data) abstraction for [nonlinear programs](https://en.wikipedia.org/wiki/Nonlinear_programming) (NLPs), which allows for the preservation of the parallelizable structure within the model equations, facilitating efficient, parallel [reverse-mode automatic differentiation](https://en.wikipedia.org/wiki/Automatic_differentiation) on the [GPU](https://en.wikipedia.org/wiki/Graphics_processing_unit) accelerators. More details about SIMD abstraction can be found [here](/simd). + +## Differences from other tools +ExaModels.jl is different from other algebraic modeling tools, such as [JuMP](https://github.com/jump-dev/JuMP.jl) or [AMPL](https://ampl.com/), in the following ways: +- **Modeling Interface**: ExaModels.jl enforces users to specify the model equations always in the form of `Generator`. This allows ExaModels.jl to preserve the SIMD-compatible structure in the model equations. +- **Performance**: ExaModels.jl compiles (via Julia's compiler) derivative evaluation codes that are specific to each computation pattern, based on reverse-mode automatic differentiation. This makes the speed of derivative evaluation (even on the CPU) significantly faster than other existing tools. +- **Portability**: ExaModels.jl can evaluate derivatives on GPU accelerators. The code is currently only tested for NVIDIA GPUs, but GPU code is implemented mostly based on the portable programming paradigm, [KernelAbstractions.jl](https://github.com/JuliaGPU/KernelAbstractions.jl). In the future, we are interested in supporting Intel, AMD, and Apple GPUs. + +# Supporting ExaModels.jl +- Please report issues and feature requests via the [GitHub issue tracker](https://github.com/sshin/ExaModels.jl/issues). +- Questions are welcome at [GitHub discussion forum](https://github.com/sshin23/ExaModels.jl/discussions). diff --git a/docs/src/simd.md b/docs/src/simd.md new file mode 100644 index 0000000..6b75c3f --- /dev/null +++ b/docs/src/simd.md @@ -0,0 +1,51 @@ +# SIMD Abstraction + +In this page, we explain what SIMD abstraction of nonlinear program is, and why it can be beneficial for scalable optimization of large-scale optimization problems. More discussion can be found in our [paper](https://arxiv.org/abs/2307.16830). + +## What is SIMD abstraction? +The mathematical statement of the problem formulation is as follows. +```math +\begin{aligned} + \min_{x^\flat\leq x \leq x^\sharp} + & \sum_{l\in[L]}\sum_{i\in [I_l]} f^{(l)}(x; p^{(l)}_i)\\ + \text{s.t.}\; &\left[g^{(m)}(x; q_j)\right]_{j\in [J_m]} +\sum_{n\in [N_m]}\sum_{k\in [K_n]}h^{(n)}(x; s^{(n)}_{k}) =0,\quad \forall m\in[M] +\end{aligned} +``` +where $f^{(\ell)}(\cdot,\cdot)$, $g^{(m)}(\cdot,\cdot)$, and +$h^{(n)}(\cdot,\cdot)$ are twice differentiable functions with respect +to the first argument, whereas $\{\{p^{(k)}_i\}_{i\in [N_k]}\}_{k\in[K]}$, +$\{\{q^{(k)}_{i}\}_{i\in [M_l]}\}_{m\in[M]}$, and +$\{\{\{s^{(n)}_{k}\}_{k\in[K_n]}\}_{n\in[N_m]}\}_{m\in[M]}$ are +problem data, which can either be discrete or continuous. +It is also assumed +that our functions $f^{(l)}(\cdot,\cdot)$, $g^{(m)}(\cdot,\cdot)$, and +$h^{(n)}(\cdot,\cdot)$ can be expressed with computational +graphs of moderate length. + +## Why SIMD abstraction? +Many physics-based models, such as AC OPF, have a highly repetitive +structure. One of the manifestations of it is that the mathematical +statement of the model is concise, even if the practical model may contain +millions of variables and constraints. This is possible due to the use of +repetition over a certain index and data sets. For example, +it suffices to use 15 computational patterns to fully specify the +AC OPF model. These patterns arise from (1) generation cost, (2) reference +bus voltage angle constraint, (3-6) active and reactive power flow (from and to), +(7) voltage angle difference constraint, (8-9) apparent +power flow limits (from and to), (10-11) power balance equations, +(12-13) generators' contributions to the power balance equations, and +(14-15) in/out flows contributions to the power balance +equations. However, such repetitive structure is not well exploited in +the standard NLP modeling paradigms. In fact, without the SIMD +abstraction, it is difficult for the AD package to detect the +parallelizable structure within the model, as it will require the full +inspection of the computational graph over all expressions. By +preserving the repetitive structures in the model, the repetitive +structure can be directly available in AD implementation. + +Using the multiple dispatch feature of Julia, ExaModels.jl generates +highly efficient derivative computation code, specifically compiled +for each computational pattern in the model. These derivative evaluation codes can be run over the data in various GPU array formats, +and implemented via array and kernel programming in Julia Language. In +turn, ExaModels.jl has the capability to efficiently evaluate first and +second-order derivatives using GPU accelerators.