Skip to content

Commit

Permalink
Merge pull request #150 from optimas-org/feature/fixed_parameters
Browse files Browse the repository at this point in the history
Support changing the range and fixing the value of `VaryingParameter`s
  • Loading branch information
MaxThevenet authored Dec 1, 2023
2 parents f0dba9c + 484a19a commit 914139c
Show file tree
Hide file tree
Showing 12 changed files with 468 additions and 106 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/publish-to-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ jobs:
id-token: write
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.8
- name: Set up Python 3.11
uses: actions/setup-python@v4
with:
python-version: 3.8
python-version: 3.11
- name: Install pypa/build
run: >-
python3 -m
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/unix.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8, 3.9, '3.10', 3.11]
python-version: [3.9, '3.10', 3.11]

steps:
- uses: actions/checkout@v2
Expand Down
2 changes: 1 addition & 1 deletion .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ version: 2
build:
os: ubuntu-20.04
tools:
python: "3.8"
python: "3.11"
nodejs: "16"

# Build documentation in the docs/ directory with Sphinx
Expand Down
48 changes: 48 additions & 0 deletions optimas/core/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,13 @@ def __init__(
dtype: Optional[np.dtype] = float,
) -> None:
super().__init__(name, dtype)
self._check_range(lower_bound, upper_bound)
self._lower_bound = lower_bound
self._upper_bound = upper_bound
self._is_fidelity = is_fidelity
self._fidelity_target_value = fidelity_target_value
self._default_value = default_value
self._is_fixed = False

@property
def lower_bound(self) -> float:
Expand All @@ -90,6 +92,52 @@ def default_value(self) -> float:
"""Get the default value of the varying parameter."""
return self._default_value

@property
def is_fixed(self) -> bool:
"""Get whether the parameter is fixed to a certain value."""
return self._is_fixed

def update_range(self, lower_bound: float, upper_bound: float) -> None:
"""Update range of the parameter.
Parameters
----------
lower_bound, upper_bound : float
Lower and upper bounds of the range in which the parameter can vary.
"""
self._check_range(lower_bound, upper_bound)
self._lower_bound = lower_bound
self._upper_bound = upper_bound

def fix_value(self, value: float) -> None:
"""Fix the value of the parameter.
The value must be within the range of the parameter.
Parameters
----------
value : float
The value to which the parameter will be fixed.
"""
if value < self.lower_bound or value > self.upper_bound:
raise ValueError(
f"The value {value} is outside of the range of parameter "
f"{self.name}: [{self.lower_bound},{self.upper_bound}]"
)
self._default_value = value
self._is_fixed = True

def free_value(self) -> None:
"""Free the value of the parameter."""
self._is_fixed = False

def _check_range(self, lower_bound, upper_bound):
if upper_bound <= lower_bound:
raise ValueError(
"Inconsistent range bounds. "
f"Upper bound ({upper_bound}) < lower bound ({lower_bound})."
)


class TrialParameter(Parameter):
"""Defines a parameter that can be attached to a trial.
Expand Down
11 changes: 11 additions & 0 deletions optimas/generators/ax/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,13 @@ class AxGenerator(Generator):
For some generators, it might be necessary to attach additional
parameters to the trials. If so, they can be given here as a list.
By default, ``None``.
allow_fixed_parameters : bool, optional
Whether the generator supports ``VaryingParameter``s whose value
has been fixed. By default, False.
allow_updating_parameters : list of TrialParameter
Whether the generator supports updating the ``VaryingParameter``s.
If so, the `_update_parameter` method must be implemented.
By default, False.
"""

Expand All @@ -65,6 +72,8 @@ def __init__(
model_save_period: Optional[int] = 5,
model_history_dir: Optional[str] = "model_history",
custom_trial_parameters: Optional[TrialParameter] = None,
allow_fixed_parameters: Optional[bool] = False,
allow_updating_parameters: Optional[bool] = False,
) -> None:
super().__init__(
varying_parameters=varying_parameters,
Expand All @@ -77,6 +86,8 @@ def __init__(
model_save_period=model_save_period,
model_history_dir=model_history_dir,
custom_trial_parameters=custom_trial_parameters,
allow_fixed_parameters=allow_fixed_parameters,
allow_updating_parameters=allow_updating_parameters,
)
self._determine_torch_device()

Expand Down
92 changes: 88 additions & 4 deletions optimas/generators/ax/service/base.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,27 @@
"""Contains the definition of the base Ax generator using the service API."""

from typing import List, Optional
from typing import List, Optional, Dict
import os

import torch
from packaging import version
from ax.version import version as ax_version
from ax.service.ax_client import AxClient
from ax.core.observation import ObservationFeatures
from ax.service.utils.instantiation import (
InstantiationBase,
ObjectiveProperties,
)
from ax.modelbridge.registry import Models
from ax.modelbridge.generation_strategy import (
GenerationStep,
GenerationStrategy,
)

from optimas.utils.other import update_object
from optimas.core import Objective, Trial, VaryingParameter, Parameter
from optimas.generators.ax.base import AxGenerator
from optimas.generators.base import Generator
from .custom_ax import CustomAxClient as AxClient


class AxServiceGenerator(AxGenerator):
Expand All @@ -34,6 +44,11 @@ class AxServiceGenerator(AxGenerator):
enforce_n_init : bool, optional
Whether to enforce the generation of `n_init` Sobol trials, even if
external data is supplied. By default, ``False``.
fit_out_of_design : bool, optional
Whether to fit the surrogate model taking into account evaluations
outside of the range of the varying parameters. This can be useful
if the range of parameter has been reduced during the optimization.
By default, False.
use_cuda : bool, optional
Whether to allow the generator to run on a CUDA GPU. By default
``False``.
Expand Down Expand Up @@ -62,6 +77,7 @@ def __init__(
analyzed_parameters: Optional[List[Parameter]] = None,
n_init: Optional[int] = 4,
enforce_n_init: Optional[bool] = False,
fit_out_of_design: Optional[bool] = False,
use_cuda: Optional[bool] = False,
gpu_id: Optional[int] = 0,
dedicated_resources: Optional[bool] = False,
Expand All @@ -79,15 +95,27 @@ def __init__(
save_model=save_model,
model_save_period=model_save_period,
model_history_dir=model_history_dir,
allow_fixed_parameters=True,
allow_updating_parameters=True,
)
self._n_init = n_init
self._enforce_n_init = enforce_n_init
self._fit_out_of_design = fit_out_of_design
self._ax_client = self._create_ax_client()
self._fixed_features = None

def _ask(self, trials: List[Trial]) -> List[Trial]:
"""Fill in the parameter values of the requested trials."""
for trial in trials:
parameters, trial_id = self._ax_client.get_next_trial()
try:
parameters, trial_id = self._ax_client.get_next_trial(
fixed_features=self._fixed_features
)
# Occurs when not using a CustomAxClient (i.e., when the AxClient
# is provided by the user using an AxClientGenerator). In that
# case, there is also no need to support FixedFeatures.
except TypeError:
parameters, trial_id = self._ax_client.get_next_trial()
trial.parameter_values = [
parameters.get(var.name) for var in self._varying_parameters
]
Expand Down Expand Up @@ -130,7 +158,55 @@ def _tell(self, trials: List[Trial]) -> None:
gs.current_step.num_trials -= 1

def _create_ax_client(self) -> AxClient:
"""Create Ax client (must be implemented by subclasses)."""
"""Create Ax client."""
bo_model_kwargs = {
"torch_dtype": torch.double,
"torch_device": torch.device(self.torch_device),
"fit_out_of_design": self._fit_out_of_design,
}
ax_client = AxClient(
generation_strategy=GenerationStrategy(
self._create_generation_steps(bo_model_kwargs)
),
verbose_logging=False,
)
ax_client.create_experiment(
parameters=self._create_ax_parameters(),
objectives=self._create_ax_objectives(),
)
return ax_client

def _create_ax_parameters(self) -> List:
"""Create list of parameters to pass to an Ax."""
parameters = []
fixed_parameters = {}
for var in self._varying_parameters:
parameters.append(
{
"name": var.name,
"type": "range",
"bounds": [var.lower_bound, var.upper_bound],
"is_fidelity": var.is_fidelity,
"target_value": var.fidelity_target_value,
}
)
if var.is_fixed:
fixed_parameters[var.name] = var.default_value
# Store fixed parameters as fixed features.
self._fixed_features = ObservationFeatures(fixed_parameters)
return parameters

def _create_ax_objectives(self) -> Dict[str, ObjectiveProperties]:
"""Create list of objectives to pass to an Ax."""
objectives = {}
for obj in self.objectives:
objectives[obj.name] = ObjectiveProperties(minimize=obj.minimize)
return objectives

def _create_generation_steps(
self, bo_model_kwargs: Dict
) -> List[GenerationStep]:
"""Create generation steps (must be implemented by subclasses)."""
raise NotImplementedError

def _save_model_to_file(self) -> None:
Expand Down Expand Up @@ -172,3 +248,11 @@ def _update(self, new_generator: Generator) -> None:
super()._update(new_generator)
update_object(original_ax_client, new_generator._ax_client)
self._ax_client = original_ax_client

def _update_parameter(self, parameter):
"""Update a parameter from the search space."""
parameters = self._create_ax_parameters()
new_search_space = InstantiationBase.make_search_space(parameters, None)
self._ax_client.experiment.search_space.update_parameter(
new_search_space.parameters[parameter.name]
)
Loading

0 comments on commit 914139c

Please sign in to comment.