Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update copyright to 2025, update institution name #1195

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
# 2.2.0

## Features
- Add example to specify total budget (fidelity units) instead of n_trials for multi-fidelity/Hyperband (#1121)

## Dependencies
- Update numpy NaN (#1122) and restrict numpy version
- Upgrade to ConfigSpace 1.x.x (#1124)

# 2.1.0

## Improvements
Expand Down
2 changes: 1 addition & 1 deletion CITATION.cff
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ date-released: "2016-08-17"
url: "https://automl.github.io/SMAC3/master/index.html"
repository-code: "https://github.com/automl/SMAC3"

version: "2.1.0"
version: "2.2.0"

type: "software"
keywords:
Expand Down
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ SHELL := /bin/bash

NAME := SMAC3
PACKAGE_NAME := smac
VERSION := 2.1.0
VERSION := 2.2.0

DIR := "${CURDIR}"
SOURCE_DIR := ${PACKAGE_NAME}
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -144,4 +144,4 @@ If you use SMAC in one of your research projects, please cite our
}
```

Copyright (C) 2016-2022 [AutoML Group](http://www.automl.org).
Copyright (C) 2016-2025 Leibniz University Hanover, Institute of AI.
4 changes: 3 additions & 1 deletion benchmark/src/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
from collections import defaultdict
from pathlib import Path

from smac.utils.numpyencoder import NumpyEncoder

import pandas as pd
from src.tasks import TASKS # noqa: E402
from src.utils.exceptions import NotSupportedError # noqa: E402
Expand Down Expand Up @@ -79,7 +81,7 @@ def _save_data(self) -> None:
"""Saves the internal data to the file."""
print("Saving data...")
with open(str(RAW_FILENAME), "w") as f:
json.dump(self._data, f, indent=4)
json.dump(self._data, f, indent=4, cls=NumpyEncoder)

def _fill_keys(self) -> None:
"""Fill data with keys based on computer name, tasks, and selected version."""
Expand Down
2 changes: 1 addition & 1 deletion benchmark/src/models/ac_branin.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def configspace(self) -> ConfigurationSpace:
x2 = Float("x2", (0, 15), default=7.5)

# Add hyperparameters and conditions to our configspace
cs.add_hyperparameters([x2])
cs.add([x2])

return cs

Expand Down
2 changes: 1 addition & 1 deletion benchmark/src/models/branin.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def configspace(self) -> ConfigurationSpace:
x2 = Float("x2", (0, 15), default=0)

# Add hyperparameters and conditions to our configspace
cs.add_hyperparameters([x1, x2])
cs.add([x1, x2])

return cs

Expand Down
2 changes: 1 addition & 1 deletion benchmark/src/models/himmelblau.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def configspace(self) -> ConfigurationSpace:
y = Float("y", (-5, 5))

# Add hyperparameters and conditions to our configspace
cs.add_hyperparameters([x, y])
cs.add([x, y])

return cs

Expand Down
4 changes: 2 additions & 2 deletions benchmark/src/models/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def configspace(self) -> ConfigurationSpace:
learning_rate_init = Float("learning_rate_init", (0.0001, 1.0), default=0.001, log=True)

# Add all hyperparameters at once:
cs.add_hyperparameters([n_layer, n_neurons, activation, solver, batch_size, learning_rate, learning_rate_init])
cs.add([n_layer, n_neurons, activation, solver, batch_size, learning_rate, learning_rate_init])

# Adding conditions to restrict the hyperparameter space...
# ... since learning rate is used when solver is 'sgd'.
Expand All @@ -44,7 +44,7 @@ def configspace(self) -> ConfigurationSpace:
use_batch_size = InCondition(child=batch_size, parent=solver, values=["sgd", "adam"])

# We can also add multiple conditions on hyperparameters at once:
cs.add_conditions([use_lr, use_batch_size, use_lr_init])
cs.add([use_lr, use_batch_size, use_lr_init])

return cs

Expand Down
4 changes: 2 additions & 2 deletions benchmark/src/models/svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@ def configspace(self) -> ConfigurationSpace:
use_gamma_value = InCondition(child=gamma_value, parent=gamma, values=["value"])

# Add hyperparameters and conditions to our configspace
cs.add_hyperparameters([kernel, C, shrinking, degree, coef, gamma, gamma_value])
cs.add_conditions([use_degree, use_coef, use_gamma, use_gamma_value])
cs.add([kernel, C, shrinking, degree, coef, gamma, gamma_value])
cs.add([use_degree, use_coef, use_gamma, use_gamma_value])

return cs

Expand Down
2 changes: 1 addition & 1 deletion docs/3_getting_started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ ranges and default values.
"species": ["mouse", "cat", "dog"], # Categorical
})

Please see the documentation of `ConfigSpace <https://automl.github.io/ConfigSpace/main/>`_ for more details.
Please see the documentation of `ConfigSpace <https://automl.github.io/ConfigSpace/latest/>`_ for more details.


Target Function
Expand Down
33 changes: 0 additions & 33 deletions docs/advanced_usage/9_parallelism.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,39 +21,6 @@ SMAC supports multiple workers natively via Dask. Just specify ``n_workers`` in
When using multiple workers, SMAC is not reproducible anymore.


.. warning ::

You cannot use resource limitation (pynisher, via the `scenario` arguments `trail_walltime_limit` and `trial_memory_limit`).
This is because pynisher works by running your function inside of a subprocess.
Once in the subprocess, the resources will be limited for that process before running your function.
This does not work together with pickling - which is required by dask to schedule jobs on the cluster, even on a local one.


.. warning ::

Start/run SMAC inside ``if __name__ == "__main__"`` in your script otherwise Dask is not able to correctly
spawn jobs and probably this runtime error will be raised:

.. code-block ::

RuntimeError:
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.

This probably means that you are not using fork to start your
child processes and you have forgotten to use the proper idiom
in the main module:

if __name__ == '__main__':
freeze_support()
...

The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce an executable.




Running on a Cluster
--------------------
You can also pass a custom dask client, e.g. to run on a slurm cluster.
Expand Down
2 changes: 2 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
"version": version,
"versions": {
f"v{version}": "#",
"v2.2.0": "https://automl.github.io/SMAC3/v2.2.0/",
"v2.1.0": "https://automl.github.io/SMAC3/v2.1.0/",
"v2.0.1": "https://automl.github.io/SMAC3/v2.0.1/",
"v2.0.0": "https://automl.github.io/SMAC3/v2.0.0/",
"v2.0.0b1": "https://automl.github.io/SMAC3/v2.0.0b1/",
Expand Down
4 changes: 2 additions & 2 deletions examples/1_basics/1_quadratic_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from smac import HyperparameterOptimizationFacade as HPOFacade
from smac import RunHistory, Scenario

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand All @@ -26,7 +26,7 @@ class QuadraticFunction:
def configspace(self) -> ConfigurationSpace:
cs = ConfigurationSpace(seed=0)
x = Float("x", (-5, 5), default=-5)
cs.add_hyperparameters([x])
cs.add([x])

return cs

Expand Down
6 changes: 3 additions & 3 deletions examples/1_basics/2_svm_cv.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

from smac import HyperparameterOptimizationFacade, Scenario

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand Down Expand Up @@ -46,8 +46,8 @@ def configspace(self) -> ConfigurationSpace:
use_gamma_value = InCondition(child=gamma_value, parent=gamma, values=["value"])

# Add hyperparameters and conditions to our configspace
cs.add_hyperparameters([kernel, C, shrinking, degree, coef, gamma, gamma_value])
cs.add_conditions([use_degree, use_coef, use_gamma, use_gamma_value])
cs.add([kernel, C, shrinking, degree, coef, gamma, gamma_value])
cs.add([use_degree, use_coef, use_gamma, use_gamma_value])

return cs

Expand Down
4 changes: 2 additions & 2 deletions examples/1_basics/3_ask_and_tell.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from smac import HyperparameterOptimizationFacade, Scenario
from smac.runhistory.dataclasses import TrialValue

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand All @@ -20,7 +20,7 @@ def configspace(self) -> ConfigurationSpace:
cs = ConfigurationSpace(seed=0)
x0 = Float("x0", (-5, 10), default=-3)
x1 = Float("x1", (-5, 10), default=-4)
cs.add_hyperparameters([x0, x1])
cs.add([x0, x1])

return cs

Expand Down
4 changes: 2 additions & 2 deletions examples/1_basics/4_callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from smac import Scenario
from smac.runhistory import TrialInfo, TrialValue

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand All @@ -27,7 +27,7 @@ def configspace(self) -> ConfigurationSpace:
cs = ConfigurationSpace(seed=0)
x0 = Float("x0", (-5, 10), default=-3)
x1 = Float("x1", (-5, 10), default=-4)
cs.add_hyperparameters([x0, x1])
cs.add([x0, x1])

return cs

Expand Down
4 changes: 2 additions & 2 deletions examples/1_basics/5_continue.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from smac.main.smbo import SMBO
from smac.runhistory import TrialInfo, TrialValue

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand All @@ -47,7 +47,7 @@ class QuadraticFunction:
def configspace(self) -> ConfigurationSpace:
cs = ConfigurationSpace(seed=0)
x = Float("x", (-5, 5), default=-5)
cs.add_hyperparameters([x])
cs.add([x])

return cs

Expand Down
8 changes: 4 additions & 4 deletions examples/1_basics/6_priors.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from smac import HyperparameterOptimizationFacade, Scenario
from smac.acquisition.function import PriorAcquisitionFunction

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand Down Expand Up @@ -95,13 +95,13 @@ def configspace(self) -> ConfigurationSpace:
"learning_rate_init",
lower=1e-5,
upper=1.0,
mu=np.log(1e-3),
sigma=np.log(10),
mu=1e-3, # will be transformed to log space later
sigma=10, # will be transformed to log space later
log=True,
)

# Add all hyperparameters at once:
cs.add_hyperparameters([n_layer, n_neurons, activation, optimizer, batch_size, learning_rate_init])
cs.add([n_layer, n_neurons, activation, optimizer, batch_size, learning_rate_init])

return cs

Expand Down
13 changes: 10 additions & 3 deletions examples/1_basics/7_parallelization_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
SLURM cluster. If you do not want to use a cluster but your local machine, set dask_client
to `None` and pass `n_workers` to the `Scenario`.

Sometimes, the submitted jobs by the slurm client might be cancelled once it starts. In that
case, you could try to start your job from a computing node

:warning: On some clusters you cannot spawn new jobs when running a SLURMCluster inside a
job instead of on the login node. No obvious errors might be raised but it can hang silently.

Expand All @@ -31,7 +34,7 @@

from smac import BlackBoxFacade, Scenario

__copyright__ = "Copyright 2023, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand All @@ -41,7 +44,7 @@ def configspace(self) -> ConfigurationSpace:
cs = ConfigurationSpace(seed=0)
x0 = Float("x0", (-5, 10), default=-5, log=False)
x1 = Float("x1", (0, 15), default=2, log=False)
cs.add_hyperparameters([x0, x1])
cs.add([x0, x1])

return cs

Expand Down Expand Up @@ -77,7 +80,7 @@ def train(self, config: Configuration, seed: int = 0) -> float:
model = Branin()

# Scenario object specifying the optimization "environment"
scenario = Scenario(model.configspace, deterministic=True, n_trials=100)
scenario = Scenario(model.configspace, deterministic=True, n_trials=100, trial_walltime_limit=100)

# Create cluster
n_workers = 4 # Use 4 workers on the cluster
Expand All @@ -97,6 +100,10 @@ def train(self, config: Configuration, seed: int = 0) -> float:
walltime="00:10:00",
processes=1,
log_directory="tmp/smac_dask_slurm",
# if you would like to limit the resources consumption of each function evaluation with pynisher, you need to
# set nanny as False
# Otherwise, an error `daemonic processes are not allowed to have children` will raise!
nanny=False, # if you do not use pynisher to limit the memory/time usage, feel free to set this one as True
)
cluster.scale(jobs=n_workers)

Expand Down
6 changes: 3 additions & 3 deletions examples/2_multi_fidelity/1_mlp_epochs.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
from smac.intensifier.hyperband import Hyperband
from smac.intensifier.successive_halving import SuccessiveHalving

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand All @@ -65,7 +65,7 @@ def configspace(self) -> ConfigurationSpace:
learning_rate_init = Float("learning_rate_init", (0.0001, 1.0), default=0.001, log=True)

# Add all hyperparameters at once:
cs.add_hyperparameters([n_layer, n_neurons, activation, solver, batch_size, learning_rate, learning_rate_init])
cs.add([n_layer, n_neurons, activation, solver, batch_size, learning_rate, learning_rate_init])

# Adding conditions to restrict the hyperparameter space...
# ... since learning rate is only used when solver is 'sgd'.
Expand All @@ -76,7 +76,7 @@ def configspace(self) -> ConfigurationSpace:
use_batch_size = InCondition(child=batch_size, parent=solver, values=["sgd", "adam"])

# We can also add multiple conditions on hyperparameters at once:
cs.add_conditions([use_lr, use_batch_size, use_lr_init])
cs.add([use_lr, use_batch_size, use_lr_init])

return cs

Expand Down
4 changes: 2 additions & 2 deletions examples/2_multi_fidelity/2_sgd_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
from smac import MultiFidelityFacade as MFFacade
from smac import Scenario

__copyright__ = "Copyright 2021, AutoML.org Freiburg-Hannover"
__copyright__ = "Copyright 2025, Leibniz University Hanover, Institute of AI"
__license__ = "3-clause BSD"


Expand Down Expand Up @@ -76,7 +76,7 @@ def configspace(self) -> ConfigurationSpace:
learning_rate = Categorical("learning_rate", ["constant", "invscaling", "adaptive"], default="constant")
eta0 = Float("eta0", (0.00001, 1), default=0.1, log=True)
# Add the parameters to configuration space
cs.add_hyperparameters([alpha, l1_ratio, learning_rate, eta0])
cs.add([alpha, l1_ratio, learning_rate, eta0])

return cs

Expand Down
Loading
Loading