Skip to content

Commit

Permalink
Factor out the functionality of best config tracking into a separate …
Browse files Browse the repository at this point in the history
…abstract Optimizer class (microsoft#709)
  • Loading branch information
motus authored Mar 13, 2024
1 parent e881987 commit 808bce3
Show file tree
Hide file tree
Showing 5 changed files with 58 additions and 38 deletions.
2 changes: 2 additions & 0 deletions mlos_bench/mlos_bench/optimizers/base_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ def register(self, tunables: TunableGroups, status: Status,
score: Optional[Union[float, Dict[str, float]]] = None) -> Optional[float]:
"""
Register the observation for the given configuration.
Base class' implementations logs and increments the iteration count.
Parameters
----------
Expand All @@ -294,6 +295,7 @@ def register(self, tunables: TunableGroups, status: Status,
"""
_LOG.info("Iteration %d :: Register: %s = %s score: %s",
self._iter, tunables, status, score)
self._iter += 1
if status.is_succeeded() == (score is None): # XOR
raise ValueError("Status and score must be consistent.")
return self._get_score(status, score)
Expand Down
18 changes: 3 additions & 15 deletions mlos_bench/mlos_bench/optimizers/grid_search_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@
from mlos_bench.environments.status import Status
from mlos_bench.tunables.tunable import TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups
from mlos_bench.optimizers.base_optimizer import Optimizer
from mlos_bench.optimizers.track_best_optimizer import TrackBestOptimizer
from mlos_bench.optimizers.convert_configspace import configspace_data_to_tunable_values
from mlos_bench.services.base_service import Service

_LOG = logging.getLogger(__name__)


class GridSearchOptimizer(Optimizer):
class GridSearchOptimizer(TrackBestOptimizer):
"""
Grid search optimizer.
"""
Expand All @@ -41,7 +41,7 @@ def __init__(self,

# Track the grid as a set of tuples of tunable values and reconstruct the
# dicts as necessary.
# Note: this is not the most effecient way to do this, but avoids
# Note: this is not the most efficient way to do this, but avoids
# introducing a new data structure for hashable dicts.
# See https://github.com/microsoft/MLOS/pull/690 for further discussion.

Expand Down Expand Up @@ -160,12 +160,6 @@ def suggest(self) -> TunableGroups:
def register(self, tunables: TunableGroups, status: Status,
score: Optional[Union[float, dict]] = None) -> Optional[float]:
registered_score = super().register(tunables, status, score)
if status.is_succeeded() and (
self._best_score is None or (registered_score is not None and registered_score < self._best_score)
):
self._best_score = registered_score
self._best_config = tunables.copy()
self._iter += 1
try:
config = dict(ConfigSpace.Configuration(self.config_space, values=tunables.get_param_values()))
self._suggested_configs.remove(tuple(config.values()))
Expand All @@ -180,9 +174,3 @@ def not_converged(self) -> bool:
len(self._pending_configs), list(self._pending_configs.keys()))
return False
return bool(self._pending_configs)

def get_best_observation(self) -> Union[Tuple[float, TunableGroups], Tuple[None, None]]:
if self._best_score is None:
return (None, None)
assert self._best_config is not None
return (self._best_score * self._opt_sign, self._best_config)
1 change: 0 additions & 1 deletion mlos_bench/mlos_bench/optimizers/mlos_core_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ def register(self, tunables: TunableGroups, status: Status,
df_config = self._to_df([tunables.get_param_values()])
_LOG.debug("Score: %s Dataframe:\n%s", score, df_config)
self._opt.register(df_config, pd.Series([score], dtype=float))
self._iter += 1
return score

def get_best_observation(self) -> Union[Tuple[float, TunableGroups], Tuple[None, None]]:
Expand Down
25 changes: 3 additions & 22 deletions mlos_bench/mlos_bench/optimizers/mock_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,19 @@
import random
import logging

from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from typing import Callable, Dict, Optional, Sequence

from mlos_bench.environments.status import Status
from mlos_bench.tunables.tunable import Tunable, TunableValue
from mlos_bench.tunables.tunable_groups import TunableGroups

from mlos_bench.optimizers.base_optimizer import Optimizer
from mlos_bench.optimizers.track_best_optimizer import TrackBestOptimizer
from mlos_bench.services.base_service import Service

_LOG = logging.getLogger(__name__)


class MockOptimizer(Optimizer):
class MockOptimizer(TrackBestOptimizer):
"""
Mock optimizer to test the Environment API.
"""
Expand All @@ -38,8 +38,6 @@ def __init__(self,
"float": lambda tunable: rnd.uniform(*tunable.range),
"int": lambda tunable: rnd.randint(*tunable.range),
}
self._best_config: Optional[TunableGroups] = None
self._best_score: Optional[float] = None

def bulk_register(self, configs: Sequence[dict], scores: Sequence[Optional[float]],
status: Optional[Sequence[Status]] = None, is_warm_up: bool = False) -> bool:
Expand Down Expand Up @@ -71,20 +69,3 @@ def suggest(self) -> TunableGroups:
tunable.value = self._random[tunable.type](tunable)
_LOG.info("Iteration %d :: Suggest: %s", self._iter, tunables)
return tunables

def register(self, tunables: TunableGroups, status: Status,
score: Optional[Union[float, dict]] = None) -> Optional[float]:
registered_score = super().register(tunables, status, score)
if status.is_succeeded() and (
self._best_score is None or (registered_score is not None and registered_score < self._best_score)
):
self._best_score = registered_score
self._best_config = tunables.copy()
self._iter += 1
return registered_score

def get_best_observation(self) -> Union[Tuple[float, TunableGroups], Tuple[None, None]]:
if self._best_score is None:
return (None, None)
assert self._best_config is not None
return (self._best_score * self._opt_sign, self._best_config)
50 changes: 50 additions & 0 deletions mlos_bench/mlos_bench/optimizers/track_best_optimizer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
#
"""
Mock optimizer for mlos_bench.
"""

import logging
from abc import ABCMeta
from typing import Optional, Tuple, Union

from mlos_bench.environments.status import Status
from mlos_bench.tunables.tunable_groups import TunableGroups

from mlos_bench.optimizers.base_optimizer import Optimizer
from mlos_bench.services.base_service import Service

_LOG = logging.getLogger(__name__)


class TrackBestOptimizer(Optimizer, metaclass=ABCMeta):
"""
Base Optimizer class that keeps track of the best score and configuration.
"""

def __init__(self,
tunables: TunableGroups,
config: dict,
global_config: Optional[dict] = None,
service: Optional[Service] = None):
super().__init__(tunables, config, global_config, service)
self._best_config: Optional[TunableGroups] = None
self._best_score: Optional[float] = None

def register(self, tunables: TunableGroups, status: Status,
score: Optional[Union[float, dict]] = None) -> Optional[float]:
registered_score = super().register(tunables, status, score)
if status.is_succeeded() and (
self._best_score is None or (registered_score is not None and registered_score < self._best_score)
):
self._best_score = registered_score
self._best_config = tunables.copy()
return registered_score

def get_best_observation(self) -> Union[Tuple[float, TunableGroups], Tuple[None, None]]:
if self._best_score is None:
return (None, None)
assert self._best_config is not None
return (self._best_score * self._opt_sign, self._best_config)

0 comments on commit 808bce3

Please sign in to comment.