Skip to content

Commit

Permalink
Merge pull request #998 from Libensemble/feature/aposmm_ibcdfo
Browse files Browse the repository at this point in the history
A first ibcdfo test
  • Loading branch information
jmlarson1 authored Dec 1, 2023
2 parents bafbd56 + 04da3b1 commit 8022a2c
Show file tree
Hide file tree
Showing 9 changed files with 289 additions and 52 deletions.
10 changes: 9 additions & 1 deletion .github/workflows/basic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -85,12 +85,20 @@ jobs:
pip install -r install/testing_requirements.txt
pip install -r install/misc_feature_requirements.txt
git clone --recurse-submodules -b refactor/pounders_API https://github.com/POptUS/IBCDFO.git
pushd IBCDFO/minq/py/minq5/
export PYTHONPATH="$PYTHONPATH:$(pwd)"
echo "PYTHONPATH=$PYTHONPATH" >> $GITHUB_ENV
popd
pushd IBCDFO/ibcdfo_pypkg/
pip install -e .
popd
- name: Install mpi4py and MPI from conda
run: |
conda install mpi4py ${{ matrix.mpi-version }}
- name: Install generator dependencies
if: matrix.os != 'windows-latest' && steps.cache.outputs.cache-hit != 'true'
run: |
python -m pip install --upgrade pip
pip install mpmath
Expand Down
14 changes: 11 additions & 3 deletions .github/workflows/extra.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,6 @@ jobs:
pip install mpi4py
- name: Install generator dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: |
python -m pip install --upgrade pip
conda env update --file install/gen_deps_environment.yml
Expand Down Expand Up @@ -131,7 +130,7 @@ jobs:
cd ..
- name: Install generator dependencies for Ubuntu tests
if: matrix.os == 'ubuntu-latest' && steps.cache.outputs.cache-hit != 'true' && matrix.python-version != '3.12'
if: matrix.os == 'ubuntu-latest' && matrix.python-version != '3.12'
run: |
sudo apt-get install bc
Expand All @@ -153,7 +152,7 @@ jobs:
- name: Copy heffte exe on cache-hit
if: matrix.os == 'ubuntu-latest' && steps.cache.outputs.cache-hit != 'false' && matrix.python-version != '3.12'
if: matrix.os == 'ubuntu-latest' && matrix.python-version != '3.12'
run: |
cd /home/runner/work/libensemble/libensemble
cp ./heffte/build/benchmarks/speed3d_c2c ./libensemble/tests/regression_tests/
Expand All @@ -169,6 +168,15 @@ jobs:
sed -i -e "s/pyzmq>=22.1.0,<23.0.0/pyzmq>=23.0.0,<24.0.0/" ./balsam/setup.cfg
cd balsam; pip install -e .; cd ..
git clone --recurse-submodules -b refactor/pounders_API https://github.com/POptUS/IBCDFO.git
pushd IBCDFO/minq/py/minq5/
export PYTHONPATH="$PYTHONPATH:$(pwd)"
echo "PYTHONPATH=$PYTHONPATH" >> $GITHUB_ENV
popd
pushd IBCDFO/ibcdfo_pypkg/
pip install -e .
popd
- uses: actions/cache/save@v3
name: Save dependencies to cache
if: matrix.os == 'ubuntu-latest'
Expand Down
88 changes: 76 additions & 12 deletions libensemble/gen_funcs/aposmm_localopt_support.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
"run_local_nlopt",
"run_local_tao",
"run_local_dfols",
"run_local_ibcdfo_pounders",
"run_local_scipy_opt",
"run_external_localopt",
]
Expand All @@ -19,15 +20,20 @@
import libensemble.gen_funcs
from libensemble.message_numbers import EVAL_GEN_TAG, STOP_TAG # Only used to simulate receiving from manager

optimizer_list = ["petsc", "nlopt", "dfols", "scipy", "external"]

class APOSMMException(Exception):
"""Raised for any exception in APOSMM"""


optimizer_list = ["petsc", "nlopt", "dfols", "scipy", "ibcdfo", "external"]
optimizers = libensemble.gen_funcs.rc.aposmm_optimizers

if optimizers is not None:
if not isinstance(optimizers, list):
optimizers = [optimizers]
unrec = set(optimizers) - set(optimizer_list)
if unrec:
print(f"APOSMM Warning: unrecognized optimizers {unrec}")
raise APOSMMException(f"APOSMM Error: unrecognized optimizers {unrec}")

# Preferable to import globally in most cases
if "petsc" in optimizers:
Expand All @@ -36,16 +42,14 @@
import nlopt # noqa: F401
if "dfols" in optimizers:
import dfols # noqa: F401
if "ibcdfo" in optimizers:
from ibcdfo import pounders # noqa: F401
if "scipy" in optimizers:
from scipy import optimize as sp_opt # noqa: F401
if "external" in optimizers:
pass


class APOSMMException(Exception):
"""Raised for any exception in APOSMM"""


class ConvergedMsg(object):
"""
Message communicated when a local optimization is converged.
Expand Down Expand Up @@ -117,8 +121,12 @@ def __init__(self, user_specs, x0, f0, grad0=None):
run_local_opt = run_local_scipy_opt
elif user_specs["localopt_method"] in ["dfols"]:
run_local_opt = run_local_dfols
elif user_specs["localopt_method"] in ["ibcdfo_pounders"]:
run_local_opt = run_local_ibcdfo_pounders
elif user_specs["localopt_method"] in ["external_localopt"]:
run_local_opt = run_external_localopt
else:
raise APOSMMException(f"APOSMM Error: unrecognized method {user_specs['localopt_method']}")

self.parent_can_read.clear()
self.process = Process(
Expand Down Expand Up @@ -155,12 +163,7 @@ def iterate(self, data):
elif "fvec" in data.dtype.names:
self.comm_queue.put((data["x_on_cube"], data["fvec"]))
else:
self.comm_queue.put(
(
data["x_on_cube"],
data["f"],
)
)
self.comm_queue.put((data["x_on_cube"], data["f"]))

self.child_can_read.set()
self.parent_can_read.wait()
Expand Down Expand Up @@ -414,6 +417,67 @@ def run_local_dfols(user_specs, comm_queue, x0, f0, child_can_read, parent_can_r
finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs)


def run_local_ibcdfo_pounders(user_specs, comm_queue, x0, f0, child_can_read, parent_can_read):
"""
Runs a IBCDFO local optimization run starting at ``x0``, governed by the
parameters in ``user_specs``.
Although IBCDFO methods can receive previous evaluations, few other methods
support that, so APOSMM assumes the first point will be re-evaluated (but
not be sent back to the manager).
"""
n = len(x0)
# Define bound constraints (lower <= x <= upper)
lb = np.zeros(n)
ub = np.ones(n)

# Set random seed (for reproducibility)
np.random.seed(0)

dist_to_bound = min(min(ub - x0), min(x0 - lb))
assert dist_to_bound > np.finfo(np.float64).eps, "The distance to the boundary is too small"

run_max_eval = user_specs.get("run_max_eval", 100 * (n + 1))
g_tol = 1e-8
delta_0 = 0.5 * dist_to_bound
m = len(f0)

if "hfun" in user_specs:
Options = {"hfun": user_specs["hfun"], "combinemodels": user_specs["combinemodels"]}
else:
Options = None

[X, F, hF, flag, xkin] = pounders.pounders(
lambda x: scipy_dfols_callback_fun(x, comm_queue, child_can_read, parent_can_read, user_specs),
x0,
n,
run_max_eval,
g_tol,
delta_0,
m,
lb,
ub,
Options=Options,
)

assert flag >= 0, "IBCDFO errored"

x_opt = X[xkin]

if flag == 0:
opt_flag = 1
else:
print(
"[APOSMM] The IBCDFO run started from " + str(x0) + " stopped with an exit "
"flag of " + str(flag) + ". No point from this run will be "
"ruled as a minimum! APOSMM may start a new run from some point "
"in this run."
)
opt_flag = 0

finish_queue(x_opt, opt_flag, comm_queue, parent_can_read, user_specs)


def run_local_tao(user_specs, comm_queue, x0, f0, child_can_read, parent_can_read):
"""
Runs a PETSc/TAO local optimization run starting at ``x0``, governed by the
Expand Down
71 changes: 41 additions & 30 deletions libensemble/gen_funcs/persistent_aposmm.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,12 @@ def aposmm(H, persis_info, gen_specs, libE_info):
points must satisfy
- ``'rk_const' [float]``: Multiplier in front of the r_k value
- ``'max_active_runs' [int]``: Bound on number of runs APOSMM is advancing
- ``'stop_after_k_minima' [int]``: Tell APOSMM to stop after this many
local minima have been identified by a local optimization run.
- ``'stop_after_k_runs' [int]``: Tell APOSMM to stop after this many runs
have ended. (The number of ended runs may be less than the number of
minima if, for example, a local optimization run ends due to a evaluation
constraint, but not convergence criteria.)
If the rules in ``decide_where_to_start_localopt`` produces more than
``'max_active_runs'`` in some iteration, then existing runs are prioritized.
Expand Down Expand Up @@ -130,24 +136,30 @@ def aposmm(H, persis_info, gen_specs, libE_info):
unless opt_flag is 1)
opt_flag: 1 if the run ended with an optimal point (x_opt) or
0 if it ended because e.g., maxiters/maxevals were reached
num_samples_needed: Number of additional uniformly drawn samples needed
num_samples: Number of additional uniformly drawn samples needed
Description of persistent variables used to maintain the state of APOSMM
persis_info['total_runs']: Running count of started/completed localopt runs
persis_info['run_order']: Sequence of indices of points in unfinished runs
persis_info['old_runs']: Sequence of indices of points in finished runs
"""

try:
user_specs = gen_specs["user"]
ps = PersistentSupport(libE_info, EVAL_GEN_TAG)
n, n_s, rk_const, ld, mu, nu, comm, local_H = initialize_APOSMM(H, user_specs, libE_info)
local_opters, sim_id_to_child_inds, run_order, run_pts, total_runs, fields_to_pass = initialize_children(
user_specs
)
(
local_opters,
sim_id_to_child_inds,
run_order,
run_pts,
total_runs,
ended_runs,
fields_to_pass,
) = initialize_children(user_specs)

if user_specs["initial_sample_size"] != 0:
# Send our initial sample. We don't need to check that n_s is large enough:
# the alloc_func only returns when the initial sample has function values.
Expand Down Expand Up @@ -177,8 +189,12 @@ def aposmm(H, persis_info, gen_specs, libE_info):
persis_info["run_order"] = run_order
break

if np.sum(local_H["local_min"]) >= user_specs.get("stop_after_this_many_minima", np.inf):
if np.sum(local_H["local_min"]) >= user_specs.get("stop_after_k_minima", np.inf) or len(
ended_runs
) >= user_specs.get("stop_after_k_runs", np.inf):
# This break happens here so the manager can be informed about the last minima.
clean_up_and_stop(local_opters)
persis_info["run_order"] = run_order
break

n_s, n_r = update_local_H_after_receiving(local_H, n, n_s, user_specs, Work, calc_in, fields_to_pass)
Expand All @@ -194,6 +210,7 @@ def aposmm(H, persis_info, gen_specs, libE_info):
opt_ind = update_history_optimal(x_opt, opt_flag, local_H, run_order[child_idx])
new_opt_inds_to_send_mgr.append(opt_ind)
local_opters.pop(child_idx)
ended_runs.append(child_idx)
else:
add_to_local_H(local_H, x_new, user_specs, local_flag=1, on_cube=True)
new_inds_to_send_mgr.append(len(local_H) - 1)
Expand Down Expand Up @@ -221,9 +238,7 @@ def aposmm(H, persis_info, gen_specs, libE_info):

local_opters[total_runs] = local_opter

x_new = local_opter.iterate(
local_H[ind][fields_to_pass]
) # Assuming the second point can't be ruled optimal
x_new = local_opter.iterate(local_H[ind][fields_to_pass]) # Assuming the second x won't be optimal

add_to_local_H(local_H, x_new, user_specs, local_flag=1, on_cube=True)
new_inds_to_send_mgr.append(len(local_H) - 1)
Expand All @@ -239,18 +254,16 @@ def aposmm(H, persis_info, gen_specs, libE_info):
total_runs += 1

if first_pass:
num_samples_needed = persis_info["nworkers"] - 1 - len(new_inds_to_send_mgr)
num_samples = persis_info["nworkers"] - 1 - len(new_inds_to_send_mgr)
first_pass = False
else:
num_samples_needed = n_r - len(new_inds_to_send_mgr)
num_samples = n_r - len(new_inds_to_send_mgr)

if num_samples_needed > 0:
if num_samples > 0:
persis_info = add_k_sample_points_to_local_H(
num_samples_needed, user_specs, persis_info, n, comm, local_H, sim_id_to_child_inds
)
new_inds_to_send_mgr = new_inds_to_send_mgr + list(
range(len(local_H) - num_samples_needed, len(local_H))
num_samples, user_specs, persis_info, n, comm, local_H, sim_id_to_child_inds
)
new_inds_to_send_mgr = new_inds_to_send_mgr + list(range(len(local_H) - num_samples, len(local_H)))

if not user_specs.get("standalone"):
ps.send(local_H[new_inds_to_send_mgr + new_opt_inds_to_send_mgr][[i[0] for i in gen_specs["out"]]])
Expand Down Expand Up @@ -589,7 +602,10 @@ def decide_where_to_start_localopt(H, n, n_s, rk_const, ld=0, mu=0, nu=0):
def calc_rk(n, n_s, rk_const, lhs_divisions=0):
"""Calculate the critical distance r_k"""
if lhs_divisions == 0:
r_k = rk_const * (log(n_s) / n_s) ** (1 / n)
if n_s == 1:
r_k = 1e8
else:
r_k = rk_const * (log(n_s) / n_s) ** (1 / n)
else:
k = np.floor(n_s / lhs_divisions).astype(int)
if k <= 1: # to prevent r_k=0
Expand Down Expand Up @@ -668,11 +684,7 @@ def initialize_APOSMM(H, user_specs, libE_info):
"ind_of_better_s",
]
if any([i in H.dtype.names for i in over_written_fields]):
print(
"\n[APOSMM] persistent_aposmm ignores any given values in these fields: "
+ str(over_written_fields)
+ "\n"
)
print("\n[APOSMM] Ignoring given values in these fields: " + str(over_written_fields) + "\n")

initialize_dists_and_inds(local_H, len(H))

Expand All @@ -681,11 +693,11 @@ def initialize_APOSMM(H, user_specs, libE_info):

n_s = np.sum(~local_H["local_pt"])

assert (
n_s > 0 or user_specs["initial_sample_size"] > 0
), "APOSMM requires a positive initial_sample_size, or some existing points in order to determine where to start local optimization runs."
msg = "APOSMM requires a positive initial_sample_size, or some existing points in order to determine where to start local optimization runs."
assert n_s > 0 or user_specs["initial_sample_size"] > 0, msg

if "sample_points" in user_specs:
assert user_specs["sample_points"].ndim == 2, "Must have 2 dimensions for sample points"
assert isinstance(user_specs["sample_points"], np.ndarray)

return n, n_s, rk_c, ld, mu, nu, comm, local_H
Expand All @@ -696,10 +708,9 @@ def initialize_children(user_specs):
local_opters = {}
sim_id_to_child_inds = {}
run_order = {}
run_pts = (
{}
) # This can differ from 'x_on_cube' if, for example, user_specs['periodic'] is True and run points are off the cube.
run_pts = {} # These can differ from 'x_on_cube' (e.g., if user_specs['periodic']=1 and runs leave unit cube)
total_runs = 0
ended_runs = []
if user_specs["localopt_method"] in ["LD_MMA", "blmvm", "scipy_BFGS"]:
fields_to_pass = ["x_on_cube", "f", "grad"]
elif user_specs["localopt_method"] in [
Expand All @@ -714,12 +725,12 @@ def initialize_children(user_specs):
"nm",
]:
fields_to_pass = ["x_on_cube", "f"]
elif user_specs["localopt_method"] in ["pounders", "dfols"]:
elif user_specs["localopt_method"] in ["pounders", "ibcdfo_pounders", "dfols"]:
fields_to_pass = ["x_on_cube", "fvec"]
else:
raise NotImplementedError(f"Unknown local optimization method {user_specs['localopt_method']}.")

return local_opters, sim_id_to_child_inds, run_order, run_pts, total_runs, fields_to_pass
return local_opters, sim_id_to_child_inds, run_order, run_pts, total_runs, ended_runs, fields_to_pass


def add_k_sample_points_to_local_H(k, user_specs, persis_info, n, comm, local_H, sim_id_to_child_inds):
Expand Down
5 changes: 4 additions & 1 deletion libensemble/sim_funcs/chwirut1.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,10 @@ def chwirut_eval(H, _, sim_specs):

else:
O["fvec"][i] = EvaluateFunction(x)
O["f"][i] = sim_specs["user"]["combine_component_func"](O["fvec"][i])
if "combine_component_func" in sim_specs["user"]:
O["f"][i] = sim_specs["user"]["combine_component_func"](O["fvec"][i])
else:
O["f"][i] = np.sum(O["fvec"][i] ** 2)

return O

Expand Down
Loading

0 comments on commit 8022a2c

Please sign in to comment.