Source code for aepsych.generators.random_generator

#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.

# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.

from typing import Dict, Optional, Union

import numpy as np
import torch
from aepsych.config import Config
from aepsych.generators.base import AEPsychGenerationStep, AEPsychGenerator
from aepsych.models.base import AEPsychMixin
from aepsych.utils import _process_bounds
from ax.modelbridge import Models


[docs]class RandomGenerator(AEPsychGenerator): """Generator that generates points randomly without an acquisition function.""" _requires_model = False def __init__( self, lb: Union[np.ndarray, torch.Tensor], ub: Union[np.ndarray, torch.Tensor], dim: Optional[int] = None, ): """Iniatialize RandomGenerator. Args: lb (Union[np.ndarray, torch.Tensor]): Lower bounds of each parameter. ub (Union[np.ndarray, torch.Tensor]): Upper bounds of each parameter. dim (int, optional): Dimensionality of the parameter space. If None, it is inferred from lb and ub. """ self.lb, self.ub, self.dim = _process_bounds(lb, ub, dim) self.bounds_ = torch.stack([self.lb, self.ub])
[docs] def gen( self, num_points: int = 1, model: Optional[AEPsychMixin] = None, # included for API compatibility. ) -> torch.Tensor: """Query next point(s) to run by randomly sampling the parameter space. Args: num_points (int, optional): Number of points to query. Currently, only 1 point can be queried at a time. Returns: np.ndarray: Next set of point(s) to evaluate, [num_points x dim]. """ X = self.bounds_[0] + torch.rand((num_points, self.bounds_.shape[1])) * ( self.bounds_[1] - self.bounds_[0] ) return X
[docs] @classmethod def from_config(cls, config: Config): classname = cls.__name__ lb = config.gettensor(classname, "lb") ub = config.gettensor(classname, "ub") dim = config.getint(classname, "dim", fallback=None) return cls(lb=lb, ub=ub, dim=dim)
[docs]class AxRandomGenerator(AEPsychGenerationStep): classname = "RandomGenerator" model = Models.UNIFORM
[docs] @classmethod def get_config_options(cls, config: Config, name: str) -> Dict: seed = config.getint(cls.classname, "seed", fallback=None) deduplicate = config.getboolean(cls.classname, "deduplicate", fallback=True) opts = { "model": cls.model, "model_kwargs": {"seed": seed, "deduplicate": deduplicate}, } opts.update(super().get_config_options(config, name)) return opts