diff --git a/include/parpeamici/hierarchicalOptimization.h b/include/parpeamici/hierarchicalOptimization.h index c9868cd1..6620d93a 100644 --- a/include/parpeamici/hierarchicalOptimization.h +++ b/include/parpeamici/hierarchicalOptimization.h @@ -7,9 +7,7 @@ #include -#include #include -#include namespace parpe { diff --git a/include/parpeamici/multiConditionProblem.h b/include/parpeamici/multiConditionProblem.h index 101437bf..7ce634ba 100644 --- a/include/parpeamici/multiConditionProblem.h +++ b/include/parpeamici/multiConditionProblem.h @@ -15,7 +15,6 @@ #include #include -#include /** @file multiConditionProblem.h * Interfaces between AMICI model and parPE optimization problem */ diff --git a/include/parpecommon/hdf5Misc.h b/include/parpecommon/hdf5Misc.h index 5247aa9b..5a5d6fec 100644 --- a/include/parpecommon/hdf5Misc.h +++ b/include/parpecommon/hdf5Misc.h @@ -9,7 +9,6 @@ #include #include #include -#include namespace parpe { diff --git a/include/parpecommon/misc.h b/include/parpecommon/misc.h index c092eccd..b797c2da 100644 --- a/include/parpecommon/misc.h +++ b/include/parpecommon/misc.h @@ -4,10 +4,7 @@ #include #include -#include -#include #include -#include #include #include diff --git a/include/parpecommon/model.h b/include/parpecommon/model.h index 121bbbe1..53c9ca98 100644 --- a/include/parpecommon/model.h +++ b/include/parpecommon/model.h @@ -4,7 +4,6 @@ #include #include -#include namespace parpe { diff --git a/include/parpeoptimization/localOptimizationCeres.h b/include/parpeoptimization/localOptimizationCeres.h index 6a30a007..2e841dac 100644 --- a/include/parpeoptimization/localOptimizationCeres.h +++ b/include/parpeoptimization/localOptimizationCeres.h @@ -3,7 +3,6 @@ #include -#include namespace parpe { diff --git a/include/parpeoptimization/localOptimizationIpoptTNLP.h b/include/parpeoptimization/localOptimizationIpoptTNLP.h index e0f319b8..3ef15f19 100644 --- a/include/parpeoptimization/localOptimizationIpoptTNLP.h +++ b/include/parpeoptimization/localOptimizationIpoptTNLP.h @@ -11,8 +11,6 @@ #include #endif -#include -#include #include #include diff --git a/include/parpeoptimization/multiStartOptimization.h b/include/parpeoptimization/multiStartOptimization.h index eba5d81d..f5c68ef9 100644 --- a/include/parpeoptimization/multiStartOptimization.h +++ b/include/parpeoptimization/multiStartOptimization.h @@ -3,7 +3,6 @@ #include -#include #include namespace parpe { diff --git a/src/parpeamici/amiciMisc.cpp b/src/parpeamici/amiciMisc.cpp index 3a0d7929..0869c296 100644 --- a/src/parpeamici/amiciMisc.cpp +++ b/src/parpeamici/amiciMisc.cpp @@ -5,12 +5,11 @@ #include #include +#include + namespace parpe { -using amici::ExpData; -using amici::Model; using amici::ReturnData; -using amici::Solver; std::unique_ptr run_amici_simulation( amici::Solver &solver, diff --git a/src/parpecommon/misc.cpp b/src/parpecommon/misc.cpp index 7d4f9a69..c09ea370 100644 --- a/src/parpecommon/misc.cpp +++ b/src/parpecommon/misc.cpp @@ -1,13 +1,9 @@ #include #include -#include -#include #include #include -#include #include -#include #include #include #include diff --git a/src/parpeoptimization/localOptimizationIpoptTNLP.cpp b/src/parpeoptimization/localOptimizationIpoptTNLP.cpp index ba8ea941..a16d0d83 100644 --- a/src/parpeoptimization/localOptimizationIpoptTNLP.cpp +++ b/src/parpeoptimization/localOptimizationIpoptTNLP.cpp @@ -4,8 +4,6 @@ #include #include -#include -#include //#include namespace parpe { @@ -202,7 +200,7 @@ LocalOptimizationIpoptTNLP::finalize_solution( // obj_value 0.0 along with the respective flag. This does not make too // much sense. Set to NAN. if(status == INVALID_NUMBER_DETECTED && obj_value == 0.0) { - obj_value = NAN; + obj_value = std::numeric_limits::quiet_NaN(); } reporter.finished(obj_value, gsl::span(x, n), status); diff --git a/src/parpeoptimization/optimizationProblem.cpp b/src/parpeoptimization/optimizationProblem.cpp index d138dcb7..dae0d9e5 100644 --- a/src/parpeoptimization/optimizationProblem.cpp +++ b/src/parpeoptimization/optimizationProblem.cpp @@ -404,7 +404,7 @@ void OptimizationReporter::finished(double optimalCost, // the optimal parameters from the optimizer. since we don't know them, rather set to nan if (logger_) logger_->logmessage(loglevel::info, "cachedCost != optimalCost && parameters.empty()"); - cached_parameters_.assign(cached_parameters_.size(), NAN); + cached_parameters_.assign(cached_parameters_.size(), std::numeric_limits::quiet_NaN()); cached_cost_ = optimalCost; } // else: our cached parameters were better. use those