Skip to content

Commit

Permalink
interfaces for carp-s and hebo
Browse files Browse the repository at this point in the history
  • Loading branch information
TheEimer committed Apr 18, 2024
1 parent fd99f49 commit 93fe3cb
Show file tree
Hide file tree
Showing 15 changed files with 440 additions and 17 deletions.
61 changes: 61 additions & 0 deletions examples/configs/hebo_mlp.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@

defaults:
- _self_
- override hydra/sweeper: HyperHEBO


learning_rate: constant
learning_rate_init: 0.001
batch_size: 200
n_neurons: 10
n_layer: 1
solver: adam
activation: tanh

seed: 42
epochs: 10 # Default number of epochs

hydra:
sweeper:
budget: 10
search_space:
hyperparameters:
n_layer:
type: uniform_int
lower: 1
upper: 5
default: ${n_layer}
n_neurons:
type: uniform_int
lower: 8
upper: 1024
log: true
default_value: ${n_neurons}
activation:
type: categorical
choices: [ logistic, tanh, relu ]
default_value: ${activation}
solver:
type: categorical
choices: [ lbfgs, sgd, adam ]
default_value: ${solver}
batch_size:
type: uniform_int
lower: 30
upper: 300
default_value: ${batch_size}
learning_rate:
type: categorical
choices: [ constant, invscaling, adaptive ]
default_value: ${learning_rate}
learning_rate_init:
type: uniform_float
lower: 0.0001
upper: 1
default_value: ${learning_rate_init}
log: true

run:
dir: ./tmp/${now:%Y-%m-%d}/${now:%H-%M-%S}
sweep:
dir: ./tmp/${now:%Y-%m-%d}/${now:%H-%M-%S}
1 change: 1 addition & 0 deletions examples/configs/smac_bbo_branin.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ defaults:
hydra:
sweeper:
budget: 10
budget_variable: budget
sweeper_kwargs:
optimizer_kwargs:
smac_facade:
Expand Down
95 changes: 95 additions & 0 deletions examples/configs/smac_mf_mlp_carps.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@

defaults:
- _self_
- override hydra/sweeper: HyperCARPS


learning_rate: constant
learning_rate_init: 0.001
batch_size: 200
n_neurons: 10
n_layer: 1
solver: adam
activation: tanh

seed: 42
epochs: 10 # Default number of epochs

hydra:
sweeper:
budget: 10
budget_variable: epochs
sweeper_kwargs:
optimizer_kwargs:
_target_: carps.optimizers.smac20.SMAC3Optimizer
_partial_: true
smac_cfg:
smac_class: smac.facade.multi_fidelity_facade.MultiFidelityFacade
scenario:
seed: ${seed}
n_trials: 10
deterministic: true
n_workers: 1
output_directory: ${outdir}/${seed}/smac3_output
min_budget: 1
max_budget: 10
smac_kwargs:
dask_client: null
intensifier:
_target_: smac.intensifier.hyperband.Hyperband
_partial_: true
eta: 3
search_space:
hyperparameters:
n_layer:
type: uniform_int
lower: 1
upper: 5
default: ${n_layer}
n_neurons:
type: uniform_int
lower: 8
upper: 1024
log: true
default_value: ${n_neurons}
activation:
type: categorical
choices: [ logistic, tanh, relu ]
default_value: ${activation}
solver:
type: categorical
choices: [ lbfgs, sgd, adam ]
default_value: ${solver}
batch_size:
type: uniform_int
lower: 30
upper: 300
default_value: ${batch_size}
learning_rate:
type: categorical
choices: [ constant, invscaling, adaptive ]
default_value: ${learning_rate}
learning_rate_init:
type: uniform_float
lower: 0.0001
upper: 1
default_value: ${learning_rate_init}
log: true
conditions:
- child: batch_size
parent: solver
type: IN
values: [ sgd, adam ]
- child: learning_rate
parent: solver
type: EQ
value: sgd
- child: learning_rate_init
parent: solver
type: IN
values: [ sgd, adam ]

run:
dir: ./tmp/${now:%Y-%m-%d}/${now:%H-%M-%S}
sweep:
dir: ./tmp/${now:%Y-%m-%d}/${now:%H-%M-%S}
3 changes: 3 additions & 0 deletions hydra_plugins/hyper_carp_s/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .config import HyperCARPSConfig

__all__ = ["HyperCARPSConfig"]
31 changes: 31 additions & 0 deletions hydra_plugins/hyper_carp_s/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""Config for HyperCARPS sweeper."""

from __future__ import annotations

from dataclasses import dataclass, field
from typing import Any

from hydra.core.config_store import ConfigStore


@dataclass
class HyperCARPSConfig:
"""Config for HyperCARPS sweeper."""

_target_: str = "hydra_plugins.hypersweeper.hypersweeper.Hypersweeper"
opt_constructor: str = "hydra_plugins.hyper_carp_s.hyper_carp_s.make_carp_s"
search_space: dict | None = field(default_factory=dict)
resume: str | bool = False
budget: Any | None = None
budget_variable: str | None = None
loading_variable: str | None = None
saving_variable: str | None = None
sweeper_kwargs: dict | None = field(default_factory=dict)


ConfigStore.instance().store(
group="hydra/sweeper",
name="HyperCARPS",
node=HyperCARPSConfig,
provider="hypersweeper",
)
20 changes: 20 additions & 0 deletions hydra_plugins/hyper_carp_s/hyper_carp_s.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from carps.benchmarks.dummy_problem import DummyProblem
from hydra_plugins.hypersweeper import Info

class HyperCARPSAdapter:
def __init__(self, carps) -> None:
self.carps = carps

def ask(self):
carps_info = self.carps.ask()
info = Info(carps_info.config, carps_info.budget, None, carps_info.seed)
return info, False

def tell(self, info, value):
self.smac.tell(info, value)

def make_carp_s(configspace, carps_args):
problem = DummyProblem()
problem._configspace = configspace
optimizer = carps_args["optimizer"](problem)
return HyperCARPSAdapter(optimizer)
3 changes: 3 additions & 0 deletions hydra_plugins/hyper_hebo/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .config import HyperHEBOConfig

__all__ = ["HyperHEBOConfig"]
31 changes: 31 additions & 0 deletions hydra_plugins/hyper_hebo/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
"""Config for HyperHEBO sweeper."""

from __future__ import annotations

from dataclasses import dataclass, field
from typing import Any

from hydra.core.config_store import ConfigStore


@dataclass
class HyperHEBOConfig:
"""Config for HyperHEBO sweeper."""

_target_: str = "hydra_plugins.hypersweeper.hypersweeper.Hypersweeper"
opt_constructor: str = "hydra_plugins.hyper_hebo.hyper_hebo.make_hebo"
search_space: dict | None = field(default_factory=dict)
resume: str | bool = False
budget: Any | None = None
budget_variable: str | None = None
loading_variable: str | None = None
saving_variable: str | None = None
sweeper_kwargs: dict | None = field(default_factory=dict)


ConfigStore.instance().store(
group="hydra/sweeper",
name="HyperHEBO",
node=HyperHEBOConfig,
provider="hypersweeper",
)
Loading

0 comments on commit 93fe3cb

Please sign in to comment.