Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
hadarshavit committed Dec 4, 2024
1 parent 1b651e5 commit be46d67
Show file tree
Hide file tree
Showing 3 changed files with 91 additions and 74 deletions.
55 changes: 34 additions & 21 deletions smac/acquisition/maximizer/differential_evolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
from ConfigSpace import Configuration, ConfigurationSpace
from scipy.optimize._differentialevolution import DifferentialEvolutionSolver

from smac.acquisition.function.abstract_acquisition_function import AbstractAcquisitionFunction
from smac.acquisition.function.abstract_acquisition_function import (
AbstractAcquisitionFunction,
)
from smac.acquisition.maximizer import AbstractAcquisitionMaximizer
from smac.utils.configspace import transform_continuous_designs

Expand All @@ -26,7 +28,7 @@ class DifferentialEvolution(AbstractAcquisitionMaximizer):
[1] Storn, R and Price, K, Differential Evolution - a Simple and Efficient Heuristic for Global
Optimization over Continuous Spaces, Journal of Global Optimization, 1997, 11, 341 - 359.
Parameters
Parameters
----------
configspace : ConfigurationSpace
acquisition_function : AbstractAcquisitionFunction
Expand All @@ -44,16 +46,19 @@ class DifferentialEvolution(AbstractAcquisitionMaximizer):
The recombination constant.
seed : int, defaults to 0
"""
def __init__(self,
configspace: ConfigurationSpace,
acquisition_function: AbstractAcquisitionFunction | None = None,
max_iter: int = 1000,
challengers: int = 50000,
strategy: str = "best1bin",
polish: bool = True,
mutation: tuple[float, float] = (0.5, 1.0),
recombination: float =0.7,
seed: int = 0):

def __init__(
self,
configspace: ConfigurationSpace,
acquisition_function: AbstractAcquisitionFunction | None = None,
max_iter: int = 1000,
challengers: int = 50000,
strategy: str = "best1bin",
polish: bool = True,
mutation: tuple[float, float] = (0.5, 1.0),
recombination: float = 0.7,
seed: int = 0,
):
super().__init__(configspace, acquisition_function, challengers, seed)
# raise NotImplementedError("DifferentialEvolution is not yet implemented.")
self.max_iter = max_iter
Expand All @@ -74,12 +79,18 @@ def _maximize(
def func(x: np.ndarray) -> np.ndarray:
assert self._acquisition_function is not None
if len(x.shape) == 1:
return -self._acquisition_function([transform_continuous_designs(
design=np.expand_dims(x, axis=0), origin="Diffrential Evolution", configspace=self._configspace
)[0]])
return -self._acquisition_function(transform_continuous_designs(
design=x.T, origin="Diffrential Evolution", configspace=self._configspace
))
return -self._acquisition_function(
[
transform_continuous_designs(
design=np.expand_dims(x, axis=0),
origin="Diffrential Evolution",
configspace=self._configspace,
)[0]
]
)
return -self._acquisition_function(
transform_continuous_designs(design=x.T, origin="Diffrential Evolution", configspace=self._configspace)
)

ds = DifferentialEvolutionSolver(
func,
Expand All @@ -97,14 +108,16 @@ def func(x: np.ndarray) -> np.ndarray:
disp=False,
init="latinhypercube",
atol=0,
vectorized=True
vectorized=True,
)

_ = ds.solve()
for pop, val in zip(ds.population, ds.population_energies):
rc = transform_continuous_designs(
design=np.expand_dims(pop, axis=0), origin="Acquisition Function Maximizer: Differential Evolution", configspace=self._configspace
)[0]
design=np.expand_dims(pop, axis=0),
origin="Acquisition Function Maximizer: Differential Evolution",
configspace=self._configspace,
)[0]
configs.append((-val, rc))

configs.sort(key=lambda t: t[0])
Expand Down
109 changes: 56 additions & 53 deletions smac/utils/configspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,19 @@
BetaIntegerHyperparameter,
CategoricalHyperparameter,
Constant,
IntegerHyperparameter,
NormalFloatHyperparameter,
NormalIntegerHyperparameter,
NumericalHyperparameter,
OrdinalHyperparameter,
UniformFloatHyperparameter,
UniformIntegerHyperparameter,
IntegerHyperparameter,
NumericalHyperparameter,
)
from ConfigSpace.util import ForbiddenValueError, deactivate_inactive_hyperparameters
from ConfigSpace.util import get_one_exchange_neighbourhood
from ConfigSpace.util import (
ForbiddenValueError,
deactivate_inactive_hyperparameters,
get_one_exchange_neighbourhood,
)

__copyright__ = "Copyright 2022, automl.org"
__license__ = "3-clause BSD"
Expand Down Expand Up @@ -186,58 +189,58 @@ def print_config_changes(


def transform_continuous_designs(
design: np.ndarray, origin: str, configspace: ConfigurationSpace
) -> list[Configuration]:
"""Transforms the continuous designs into a discrete list of configurations.
Parameters
----------
design : np.ndarray
Array of hyperparameters originating from the initial design strategy.
origin : str | None, defaults to None
Label for a configuration where it originated from.
configspace : ConfigurationSpace
Returns
-------
configs : list[Configuration]
Continuous transformed configs.
"""
params = configspace.get_hyperparameters()
for idx, param in enumerate(params):
if isinstance(param, IntegerHyperparameter):
design[:, idx] = param._inverse_transform(param._transform(design[:, idx]))
elif isinstance(param, NumericalHyperparameter):
continue
elif isinstance(param, Constant):
design_ = np.zeros(np.array(design.shape) + np.array((0, 1)))
design_[:, :idx] = design[:, :idx]
design_[:, idx + 1 :] = design[:, idx:]
design = design_
elif isinstance(param, CategoricalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.choices), dtype=int)
elif isinstance(param, OrdinalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.sequence), dtype=int)
else:
raise ValueError("Hyperparameter not supported when transforming a continuous design.")
design: np.ndarray, origin: str, configspace: ConfigurationSpace
) -> list[Configuration]:
"""Transforms the continuous designs into a discrete list of configurations.
Parameters
----------
design : np.ndarray
Array of hyperparameters originating from the initial design strategy.
origin : str | None, defaults to None
Label for a configuration where it originated from.
configspace : ConfigurationSpace
Returns
-------
configs : list[Configuration]
Continuous transformed configs.
"""
params = configspace.get_hyperparameters()
for idx, param in enumerate(params):
if isinstance(param, IntegerHyperparameter):
design[:, idx] = param._inverse_transform(param._transform(design[:, idx]))
elif isinstance(param, NumericalHyperparameter):
continue
elif isinstance(param, Constant):
design_ = np.zeros(np.array(design.shape) + np.array((0, 1)))
design_[:, :idx] = design[:, :idx]
design_[:, idx + 1 :] = design[:, idx:]
design = design_
elif isinstance(param, CategoricalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.choices), dtype=int)
elif isinstance(param, OrdinalHyperparameter):
v_design = design[:, idx]
v_design[v_design == 1] = 1 - 10**-10
design[:, idx] = np.array(v_design * len(param.sequence), dtype=int)
else:
raise ValueError("Hyperparameter not supported when transforming a continuous design.")

configs = []
for vector in design:
try:
conf = deactivate_inactive_hyperparameters(
configuration=None, configuration_space=configspace, vector=vector
)
except ForbiddenValueError:
continue
configs = []
for vector in design:
try:
conf = deactivate_inactive_hyperparameters(
configuration=None, configuration_space=configspace, vector=vector
)
except ForbiddenValueError:
continue

conf.origin = origin
configs.append(conf)
conf.origin = origin
configs.append(conf)

return configs
return configs


# def check_subspace_points(
Expand Down
1 change: 1 addition & 0 deletions tests/test_acquisition/test_maximizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,7 @@ def test_sorted_random_search_categorical(configspace_categorical, acquisition_f

values = rs._maximize(start_points, 1, _sorted=True)


# --------------------------------------------------------------
# TestLocalAndRandomSearch
# --------------------------------------------------------------
Expand Down

0 comments on commit be46d67

Please sign in to comment.