Skip to content

Commit

Permalink
Add categorical parameters, treat discrete and categorical as discrete
Browse files Browse the repository at this point in the history
  • Loading branch information
YamLyubov committed Aug 23, 2023
1 parent afd0dc4 commit eba4897
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 26 deletions.
49 changes: 23 additions & 26 deletions golem/core/tuning/iopt_tuner.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from dataclasses import dataclass, field
from random import choice
from typing import List, Dict, Generic, Tuple, Any, Optional

import numpy as np
Expand Down Expand Up @@ -36,16 +37,11 @@ def from_parameters_dicts(float_parameters_dict: Optional[Dict[str, List]] = Non
upper_bounds_of_float_parameters = [bounds[1] for bounds in float_parameters_dict.values()]
discrete_parameters_vals = [values_set for values_set in discrete_parameters_dict.values()]

# TODO: Remove - for now IOpt handles only float variables, so we treat discrete parameters as float ones
float_parameters_names.extend(discrete_parameters_names)
lower_bounds_of_discrete_parameters = [bounds[0] for bounds in discrete_parameters_dict.values()]
upper_bounds_of_discrete_parameters = [bounds[1] for bounds in discrete_parameters_dict.values()]
lower_bounds_of_float_parameters.extend(lower_bounds_of_discrete_parameters)
upper_bounds_of_float_parameters.extend(upper_bounds_of_discrete_parameters)

return IOptProblemParameters(float_parameters_names, discrete_parameters_names,
return IOptProblemParameters(float_parameters_names,
discrete_parameters_names,
lower_bounds_of_float_parameters,
upper_bounds_of_float_parameters, discrete_parameters_vals)
upper_bounds_of_float_parameters,
discrete_parameters_vals)


class GolemProblem(Problem, Generic[DomainGraphForTune]):
Expand Down Expand Up @@ -85,12 +81,6 @@ def get_parameters_dict_from_iopt_point(self, point: Point) -> Dict[str, Any]:
discrete_parameters = dict(zip(self.discreteVariableNames, point.discreteVariables)) \
if point.discreteVariables is not None else {}

# TODO: Remove workaround - for now IOpt handles only float variables, so discrete parameters
# are optimized as continuous and we need to round them
for parameter_name in float_parameters:
if parameter_name in self.discreteVariableNames:
float_parameters[parameter_name] = round(float_parameters[parameter_name])

parameters_dict = {**float_parameters, **discrete_parameters}
return parameters_dict

Expand Down Expand Up @@ -195,20 +185,21 @@ def _get_parameters_for_tune(self, graph: OptGraph) -> Tuple[IOptProblemParamete

# Assign unique prefix for each model hyperparameter
# label - number of node in the graph
float_node_parameters, discrete_node_parameters = get_node_parameters_for_iopt(self.search_space,
node_id,
operation_name)
float_node_parameters, discrete_node_parameters = get_node_parameters_for_iopt(
self.search_space,
node_id,
operation_name)

# Set initial parameters for search
for parameter, bounds in float_node_parameters.items():
# If parameter is not set use parameter minimum possible value
initaial_value = node.parameters.get(parameter) or bounds[0]
initial_parameters['floatVariables'].append(initaial_value)
initial_value = node.parameters.get(parameter) or bounds[0]
initial_parameters['floatVariables'].append(initial_value)

for parameter, bounds in discrete_node_parameters.items():
# If parameter is not set use parameter minimum possible value
initaial_value = node.parameters.get(parameter) or bounds[0]
initial_parameters['discreteVariables'].append(initaial_value)
for parameter, values in discrete_node_parameters.items():
# If parameter is not set use parameter random value
initial_value = node.parameters.get(parameter) or choice(values)
initial_parameters['discreteVariables'].append(initial_value)

float_parameters_dict.update(float_node_parameters)
discrete_parameters_dict.update(discrete_node_parameters)
Expand Down Expand Up @@ -237,16 +228,22 @@ def get_node_parameters_for_iopt(search_space: SearchSpace, node_id: int, operat

discrete_parameters_dict = {}
float_parameters_dict = {}
categorical_parameters_dict = {}

for parameter_name, parameter_properties in parameters_dict.items():
node_op_parameter_name = get_node_operation_parameter_label(node_id, operation_name, parameter_name)

parameter_type = parameter_properties.get('type')
if parameter_type == 'discrete':
discrete_parameters_dict.update({node_op_parameter_name: parameter_properties
.get('sampling-scope')})
discrete_parameters_dict.update({node_op_parameter_name: [range(*parameter_properties
.get('sampling-scope'))]})
elif parameter_type == 'continuous':
float_parameters_dict.update({node_op_parameter_name: parameter_properties
.get('sampling-scope')})
elif parameter_type == 'categorical':
categorical_parameters_dict.update({node_op_parameter_name: parameter_properties
.get('sampling-scope')[0]})

# IOpt does not distinguish between discrete and categorical parameters
discrete_parameters_dict = {**discrete_parameters_dict, **categorical_parameters_dict}
return float_parameters_dict, discrete_parameters_dict
5 changes: 5 additions & 0 deletions test/unit/tuning/test_tuning.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,11 @@ def search_space():
'hyperopt-dist': hp.loguniform,
'sampling-scope': [1e-3, 1],
'type': 'continuous'
},
'a3': {
'hyperopt-dist': hp.choice,
'sampling-scope': [['A', 'B', 'C']],
'type': 'categorical'
}
},
'b': {
Expand Down

0 comments on commit eba4897

Please sign in to comment.