Skip to content

Commit

Permalink
Update iopt version (#150)
Browse files Browse the repository at this point in the history
* Add starting point

* Add categorical parameters, treat discrete and categorical as discrete

* Fix discrete setting

* Add n_jobs

* Fix for 0.2.22

* Fix eps

* Fix initial point

* Add multi objective

* Rebase

* Minor
  • Loading branch information
YamLyubov authored Dec 8, 2023
1 parent 46b53a4 commit 085ca3d
Show file tree
Hide file tree
Showing 5 changed files with 167 additions and 77 deletions.
5 changes: 0 additions & 5 deletions docs/source/api/tuning.rst
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,6 @@ You can tune all parameters of graph nodes simultaneously using ``SimultaneousTu
.. note::
``IOptTuner`` implements deterministic algorithm.

For now ``IOptTuner`` can not be constrained by time, so constrain execution by number of iterations.

Also ``IOptTuner`` can optimise only `continuous` and `discrete` parameters but not `categorical` ones.
`Categorical` parameters will be ignored while tuning.

``IOptTuner`` is implemented using `IOpt library`_. See the `documentation`_ (in Russian) to learn more about
the optimisation algorithm.

Expand Down
72 changes: 72 additions & 0 deletions examples/tuning_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
from datetime import timedelta

from golem.core.optimisers.graph import OptNode, OptGraph
from golem.core.optimisers.objective import ObjectiveEvaluate, Objective
from golem.core.tuning.iopt_tuner import IOptTuner
from golem.core.tuning.search_space import SearchSpace
from test.unit.utils import ParamsSumMetric


def opt_graph_with_params():
node_a = OptNode('a')
node_b = OptNode({'name': 'b', 'params': {'b2': 0.7, 'b3': 2}})
node_c = OptNode('c', nodes_from=[node_a])
node_d = OptNode('d', nodes_from=[node_b])
node_final = OptNode('e', nodes_from=[node_c, node_d])
graph = OptGraph(node_final)
return graph


def get_search_space():
params_per_operation = {
'a': {
'a1': {
'sampling-scope': [2, 7],
'type': 'discrete'
},
'a2': {
'sampling-scope': [1e-3, 1],
'type': 'continuous'
},
'a3': {
'sampling-scope': [['A', 'B', 'C']],
'type': 'categorical'
}
},
'b': {
'b1': {
'sampling-scope': [["first", "second", "third"]],
'type': 'categorical'
},
'b2': {
'sampling-scope': [0.04, 1.0],
'type': 'continuous'
},
},
'e': {
'e1': {
'sampling-scope': [0.05, 1.0],
'type': 'continuous'
},
'e2': {
'sampling-scope': [0.05, 1.0],
'type': 'continuous'
}
},
'k': {
'k': {
'sampling-scope': [1e-2, 10.0],
'type': 'continuous'
}
}}
return SearchSpace(params_per_operation)


if __name__ == '__main__':
search_space = get_search_space()
graph = opt_graph_with_params()
# search for parameters that will maximize their sum
obj_eval = ObjectiveEvaluate(Objective({'sum_metric': ParamsSumMetric.get_value}))

tuner = IOptTuner(obj_eval, search_space, iterations=10, n_jobs=-1)
tuned_graph = tuner.tune(graph)
158 changes: 88 additions & 70 deletions golem/core/tuning/iopt_tuner.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
from copy import deepcopy
from dataclasses import dataclass, field
from datetime import timedelta
from typing import List, Dict, Generic, Tuple, Any, Optional

import numpy as np
from iOpt.method.listener import ConsoleFullOutputListener
from iOpt.output_system.listeners.console_outputers import ConsoleOutputListener
from iOpt.problem import Problem
from iOpt.solver import Solver
from iOpt.solver_parametrs import SolverParameters
from iOpt.trial import Point, FunctionValue

from golem.core.adapter import BaseOptimizationAdapter
from golem.core.optimisers.genetic.evaluation import determine_n_jobs
from golem.core.optimisers.graph import OptGraph
from golem.core.optimisers.objective import ObjectiveEvaluate
from golem.core.tuning.search_space import SearchSpace, get_node_operation_parameter_label
from golem.core.tuning.search_space import SearchSpace, get_node_operation_parameter_label, convert_parameters
from golem.core.tuning.tuner_interface import BaseTuner, DomainGraphForTune
from golem.utilities.data_structures import ensure_wrapped_in_sequence


@dataclass
Expand All @@ -36,60 +40,50 @@ def from_parameters_dicts(float_parameters_dict: Optional[Dict[str, List]] = Non
upper_bounds_of_float_parameters = [bounds[1] for bounds in float_parameters_dict.values()]
discrete_parameters_vals = [values_set for values_set in discrete_parameters_dict.values()]

# TODO: Remove - for now IOpt handles only float variables, so we treat discrete parameters as float ones
float_parameters_names.extend(discrete_parameters_names)
lower_bounds_of_discrete_parameters = [bounds[0] for bounds in discrete_parameters_dict.values()]
upper_bounds_of_discrete_parameters = [bounds[1] for bounds in discrete_parameters_dict.values()]
lower_bounds_of_float_parameters.extend(lower_bounds_of_discrete_parameters)
upper_bounds_of_float_parameters.extend(upper_bounds_of_discrete_parameters)

return IOptProblemParameters(float_parameters_names, discrete_parameters_names,
return IOptProblemParameters(float_parameters_names,
discrete_parameters_names,
lower_bounds_of_float_parameters,
upper_bounds_of_float_parameters, discrete_parameters_vals)
upper_bounds_of_float_parameters,
discrete_parameters_vals)


class GolemProblem(Problem, Generic[DomainGraphForTune]):
def __init__(self, graph: DomainGraphForTune,
objective_evaluate: ObjectiveEvaluate,
problem_parameters: IOptProblemParameters):
problem_parameters: IOptProblemParameters,
objectives_number: int = 1):
super().__init__()
self.objective_evaluate = objective_evaluate
self.graph = graph

self.numberOfObjectives = 1
self.numberOfConstraints = 0
self.number_of_objectives = objectives_number
self.number_of_constraints = 0

self.discreteVariableNames = problem_parameters.discrete_parameters_names
self.discreteVariableValues = problem_parameters.discrete_parameters_vals
self.numberOfDiscreteVariables = len(self.discreteVariableNames)
self.discrete_variable_names = problem_parameters.discrete_parameters_names
self.discrete_variable_values = problem_parameters.discrete_parameters_vals
self.number_of_discrete_variables = len(self.discrete_variable_names)

self.floatVariableNames = problem_parameters.float_parameters_names
self.lowerBoundOfFloatVariables = problem_parameters.lower_bounds_of_float_parameters
self.upperBoundOfFloatVariables = problem_parameters.upper_bounds_of_float_parameters
self.numberOfFloatVariables = len(self.floatVariableNames)
self.float_variable_names = problem_parameters.float_parameters_names
self.lower_bound_of_float_variables = problem_parameters.lower_bounds_of_float_parameters
self.upper_bound_of_float_variables = problem_parameters.upper_bounds_of_float_parameters
self.number_of_float_variables = len(self.float_variable_names)

self._default_metric_value = np.inf

def Calculate(self, point: Point, functionValue: FunctionValue) -> FunctionValue:
def calculate(self, point: Point, function_value: FunctionValue) -> FunctionValue:
new_parameters = self.get_parameters_dict_from_iopt_point(point)
BaseTuner.set_arg_graph(self.graph, new_parameters)
graph_fitness = self.objective_evaluate(self.graph)
metric_value = graph_fitness.value if graph_fitness.valid else self._default_metric_value
functionValue.value = metric_value
return functionValue
function_value.value = metric_value
return function_value

def get_parameters_dict_from_iopt_point(self, point: Point) -> Dict[str, Any]:
"""Constructs a dict with all hyperparameters """
float_parameters = dict(zip(self.floatVariableNames, point.floatVariables)) \
if point.floatVariables is not None else {}
discrete_parameters = dict(zip(self.discreteVariableNames, point.discreteVariables)) \
if point.discreteVariables is not None else {}

# TODO: Remove workaround - for now IOpt handles only float variables, so discrete parameters
# are optimized as continuous and we need to round them
for parameter_name in float_parameters:
if parameter_name in self.discreteVariableNames:
float_parameters[parameter_name] = round(float_parameters[parameter_name])
float_parameters = dict(zip(self.float_variable_names, point.float_variables)) \
if point.float_variables is not None else {}
discrete_parameters = dict(zip(self.discrete_variable_names, point.discrete_variables)) \
if point.discrete_variables is not None else {}

parameters_dict = {**float_parameters, **discrete_parameters}
return parameters_dict
Expand Down Expand Up @@ -122,8 +116,9 @@ def __init__(self, objective_evaluate: ObjectiveEvaluate,
search_space: SearchSpace,
adapter: Optional[BaseOptimizationAdapter] = None,
iterations: int = 100,
timeout: timedelta = timedelta(minutes=5),
n_jobs: int = -1,
eps: float = 0.01,
eps: float = 0.001,
r: float = 2.0,
evolvent_density: int = 10,
eps_r: float = 0.001,
Expand All @@ -133,42 +128,57 @@ def __init__(self, objective_evaluate: ObjectiveEvaluate,
search_space,
adapter,
iterations=iterations,
timeout=timeout,
n_jobs=n_jobs,
deviation=deviation, **kwargs)
self.n_jobs = determine_n_jobs(self.n_jobs)
self.solver_parameters = SolverParameters(r=np.double(r),
eps=np.double(eps),
itersLimit=iterations,
evolventDensity=evolvent_density,
epsR=np.double(eps_r),
refineSolution=refine_solution)
iters_limit=iterations,
evolvent_density=evolvent_density,
eps_r=np.double(eps_r),
refine_solution=refine_solution,
number_of_parallel_points=self.n_jobs,
timeout=round(timeout.total_seconds()/60) if self.timeout else -1)

def _tune(self, graph: DomainGraphForTune, show_progress: bool = True) -> DomainGraphForTune:
problem_parameters, initial_parameters = self._get_parameters_for_tune(graph)

has_parameters_to_optimize = (len(problem_parameters.discrete_parameters_names) > 0 or
len(problem_parameters.float_parameters_names) > 0)
if self._check_if_tuning_possible(graph, has_parameters_to_optimize):
self.objectives_number = len(ensure_wrapped_in_sequence(self.init_metric))
is_multi_objective = self.objectives_number > 1

if self._check_if_tuning_possible(graph, has_parameters_to_optimize, supports_multi_objective=True):
if initial_parameters:
initial_point = Point(**initial_parameters)
self.solver_parameters.startPoint = initial_point
self.solver_parameters.start_point = initial_point

problem = GolemProblem(graph, self.objective_evaluate, problem_parameters)
problem = GolemProblem(graph, self.objective_evaluate, problem_parameters, self.objectives_number)
solver = Solver(problem, parameters=self.solver_parameters)

if show_progress:
console_output = ConsoleFullOutputListener(mode='full')
solver.AddListener(console_output)

solution = solver.Solve()
best_point = solution.bestTrials[0].point
best_parameters = problem.get_parameters_dict_from_iopt_point(best_point)
final_graph = self.set_arg_graph(graph, best_parameters)

self.was_tuned = True
console_output = ConsoleOutputListener(mode='full')
solver.add_listener(console_output)

solver.solve()
solution = solver.get_results()
if not is_multi_objective:
best_point = solution.best_trials[0].point
best_parameters = problem.get_parameters_dict_from_iopt_point(best_point)
tuned_graphs = self.set_arg_graph(graph, best_parameters)
self.was_tuned = True
else:
tuned_graphs = []
for best_trial in solution.best_trials:
best_parameters = problem.get_parameters_dict_from_iopt_point(best_trial.point)
tuned_graph = self.set_arg_graph(deepcopy(graph), best_parameters)
tuned_graphs.append(tuned_graph)
self.was_tuned = True
else:
final_graph = graph
tuned_graphs = graph

return final_graph
return tuned_graphs

def _get_parameters_for_tune(self, graph: OptGraph) -> Tuple[IOptProblemParameters, dict]:
""" Method for defining the search space
Expand All @@ -182,26 +192,28 @@ def _get_parameters_for_tune(self, graph: OptGraph) -> Tuple[IOptProblemParamete
"""
float_parameters_dict = {}
discrete_parameters_dict = {}
initial_parameters = {'floatVariables': [], 'discreteVariables': []}
has_init_parameters = any(len(node.parameters) > 0 for node in graph.nodes)
initial_parameters = {'float_variables': [], 'discrete_variables': []} if has_init_parameters else None
for node_id, node in enumerate(graph.nodes):
operation_name = node.name

# Assign unique prefix for each model hyperparameter
# label - number of node in the graph
float_node_parameters, discrete_node_parameters = get_node_parameters_for_iopt(self.search_space,
node_id,
operation_name)

# Set initial parameters for search
for parameter, bounds in float_node_parameters.items():
# If parameter is not set use parameter minimum possible value
initaial_value = node.parameters.get(parameter) or bounds[0]
initial_parameters['floatVariables'].append(initaial_value)

for parameter, bounds in discrete_node_parameters.items():
# If parameter is not set use parameter minimum possible value
initaial_value = node.parameters.get(parameter) or bounds[0]
initial_parameters['discreteVariables'].append(initaial_value)
float_node_parameters, discrete_node_parameters = get_node_parameters_for_iopt(
self.search_space,
node_id,
operation_name)
if has_init_parameters:
# Set initial parameters for search
for parameter, bounds in convert_parameters(float_node_parameters).items():
# If parameter is not set use parameter minimum possible value
initial_value = node.parameters.get(parameter) or bounds[0]
initial_parameters['float_variables'].append(initial_value)

for parameter, values in convert_parameters(discrete_node_parameters).items():
# If parameter is not set use the last value
initial_value = node.parameters.get(parameter) or values[-1]
initial_parameters['discrete_variables'].append(initial_value)

float_parameters_dict.update(float_node_parameters)
discrete_parameters_dict.update(discrete_node_parameters)
Expand Down Expand Up @@ -230,16 +242,22 @@ def get_node_parameters_for_iopt(search_space: SearchSpace, node_id: int, operat

discrete_parameters_dict = {}
float_parameters_dict = {}
categorical_parameters_dict = {}

for parameter_name, parameter_properties in parameters_dict.items():
node_op_parameter_name = get_node_operation_parameter_label(node_id, operation_name, parameter_name)

parameter_type = parameter_properties.get('type')
if parameter_type == 'discrete':
discrete_parameters_dict.update({node_op_parameter_name: parameter_properties
.get('sampling-scope')})
discrete_parameters_dict.update({node_op_parameter_name: list(range(*parameter_properties
.get('sampling-scope')))})
elif parameter_type == 'continuous':
float_parameters_dict.update({node_op_parameter_name: parameter_properties
.get('sampling-scope')})
elif parameter_type == 'categorical':
categorical_parameters_dict.update({node_op_parameter_name: parameter_properties
.get('sampling-scope')[0]})

# IOpt does not distinguish between discrete and categorical parameters
discrete_parameters_dict = {**discrete_parameters_dict, **categorical_parameters_dict}
return float_parameters_dict, discrete_parameters_dict
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ psutil>=5.9.2

# Optimisation
hyperopt>=0.2.7
iOpt==0.1.6
iOpt==0.2.22
optuna>=3.2.0

# Tests
Expand Down
7 changes: 6 additions & 1 deletion test/unit/tuning/test_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ def search_space():
'hyperopt-dist': hp.loguniform,
'sampling-scope': [1e-3, 1],
'type': 'continuous'
},
'a3': {
'hyperopt-dist': hp.choice,
'sampling-scope': [['A', 'B', 'C']],
'type': 'categorical'
}
},
'b': {
Expand Down Expand Up @@ -109,7 +114,7 @@ def test_node_tuning(search_space, graph):
assert tuner.init_metric >= tuner.obtained_metric


@pytest.mark.parametrize('tuner_cls', [OptunaTuner])
@pytest.mark.parametrize('tuner_cls', [OptunaTuner, IOptTuner])
@pytest.mark.parametrize('init_graph, adapter, obj_eval',
[(mock_graph_with_params(), MockAdapter(),
MockObjectiveEvaluate(Objective({'sum_metric': ParamsSumMetric.get_value,
Expand Down

0 comments on commit 085ca3d

Please sign in to comment.