Skip to content

Commit

Permalink
implement SampleAroundGenerator (#376)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #376

This generator samples in a window around a set of pre-defined points.

Reviewed By: tymmsc

Differential Revision: D62345993

fbshipit-source-id: 216873b74756677575ff1ddb0d1ac7fcc99f5b75
  • Loading branch information
crasanders authored and facebook-github-bot committed Sep 9, 2024
1 parent cd6942e commit 5564713
Show file tree
Hide file tree
Showing 3 changed files with 123 additions and 14 deletions.
5 changes: 3 additions & 2 deletions aepsych/generators/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

from ..config import Config
from .epsilon_greedy_generator import EpsilonGreedyGenerator
from .manual_generator import ManualGenerator
from .manual_generator import ManualGenerator, SampleAroundPointsGenerator
from .monotonic_rejection_generator import MonotonicRejectionGenerator
from .monotonic_thompson_sampler_generator import MonotonicThompsonSamplerGenerator
from .optimize_acqf_generator import OptimizeAcqfGenerator
Expand All @@ -28,10 +28,11 @@
"SobolGenerator",
"EpsilonGreedyGenerator",
"ManualGenerator",
"SampleAroundPointsGenerator",
"PairwiseOptimizeAcqfGenerator",
"PairwiseSobolGenerator",
"IntensityAwareSemiPGenerator",
"AcqfThompsonSamplerGenerator"
"AcqfThompsonSamplerGenerator",
]

Config.register_module(sys.modules[__name__])
95 changes: 84 additions & 11 deletions aepsych/generators/manual_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,19 @@
# LICENSE file in the root directory of this source tree.

import warnings
from typing import Optional, Union
from typing import Optional, Union, Dict

import numpy as np
import torch
from aepsych.config import Config
from aepsych.generators.base import AEPsychGenerator
from aepsych.models.base import AEPsychMixin
from aepsych.utils import _process_bounds
from torch.quasirandom import SobolEngine


class ManualGenerator(AEPsychGenerator):
"""Generator that generates points from the Sobol Sequence."""
"""Generator that generates points from a predefined list."""

_requires_model = False

Expand All @@ -28,17 +29,20 @@ def __init__(
points: Union[np.ndarray, torch.Tensor],
dim: Optional[int] = None,
shuffle: bool = True,
seed: Optional[int] = None,
):
"""Iniatialize SobolGenerator.
"""Iniatialize ManualGenerator.
Args:
lb (Union[np.ndarray, torch.Tensor]): Lower bounds of each parameter.
ub (Union[np.ndarray, torch.Tensor]): Upper bounds of each parameter.
points (Union[np.ndarray, torch.Tensor]): The points that will be generated.
dim (int, optional): Dimensionality of the parameter space. If None, it is inferred from lb and ub.
shuffle (bool): Whether or not to shuffle the order of the points. True by default.
"""
self.seed = seed
self.lb, self.ub, self.dim = _process_bounds(lb, ub, dim)
if shuffle:
np.random.seed(self.seed)
np.random.shuffle(points)
self.points = torch.tensor(points)
self.max_asks = len(self.points)
Expand All @@ -65,13 +69,82 @@ def gen(
return points

@classmethod
def from_config(cls, config: Config):
classname = cls.__name__
def from_config(cls, config: Config, name: Optional[str] = None):
return cls(**cls.get_config_options(config, name))

lb = config.gettensor(classname, "lb")
ub = config.gettensor(classname, "ub")
dim = config.getint(classname, "dim", fallback=None)
points = config.getarray(classname, "points")
shuffle = config.getboolean(classname, "shuffle", fallback=True)
@classmethod
def get_config_options(cls, config: Config, name: Optional[str] = None) -> Dict:
if name is None:
name = cls.__name__

lb = config.gettensor(name, "lb")
ub = config.gettensor(name, "ub")
dim = config.getint(name, "dim", fallback=None)
points = config.getarray(name, "points")
shuffle = config.getboolean(name, "shuffle", fallback=True)
seed = config.getint(name, "seed", fallback=None)

options = {
"lb": lb,
"ub": ub,
"dim": dim,
"points": points,
"shuffle": shuffle,
"seed": seed,
}

return options


class SampleAroundPointsGenerator(ManualGenerator):
"""Generator that samples in a window around reference points in a predefined list."""

def __init__(
self,
lb: Union[np.ndarray, torch.Tensor],
ub: Union[np.ndarray, torch.Tensor],
window: Union[np.ndarray, torch.Tensor],
points: Union[np.ndarray, torch.Tensor],
samples_per_point: int,
dim: Optional[int] = None,
shuffle: bool = True,
seed: Optional[int] = None,
):
"""Iniatialize SampleAroundPointsGenerator.
Args:
lb (Union[np.ndarray, torch.Tensor]): Lower bounds of each parameter.
ub (Union[np.ndarray, torch.Tensor]): Upper bounds of each parameter.
window (Union[np.ndarray, torch.Tensor]): How far away to sample from the reference point along each dimension.
points (Union[np.ndarray, torch.Tensor]): The points that will be generated.
samples_per_point (int): How many samples around each point to take.
dim (int, optional): Dimensionality of the parameter space. If None, it is inferred from lb and ub.
shuffle (bool): Whether or not to shuffle the order of the points. True by default.
seed (int, optional): Random seed.
"""
lb, ub, dim = _process_bounds(lb, ub, dim)
points = torch.Tensor(points)
self.engine = SobolEngine(dimension=dim, scramble=True, seed=seed)
generated = []
for point in points:
p_lb = torch.max(point - window, lb)
p_ub = torch.min(point + window, ub)
grid = self.engine.draw(samples_per_point)
grid = p_lb + (p_ub - p_lb) * grid
generated.append(grid)
generated = torch.Tensor(np.vstack(generated))

super().__init__(lb, ub, generated, dim, shuffle, seed)

@classmethod
def get_config_options(cls, config: Config, name: Optional[str] = None) -> Dict:
if name is None:
name = cls.__name__

options = super().get_config_options(config)

window = config.gettensor(name, "window")
samples_per_point = config.getint(name, "samples_per_point")

options.update({"window": window, "samples_per_point": samples_per_point})

return cls(lb=lb, ub=ub, dim=dim, points=points, shuffle=shuffle)
return options
37 changes: 36 additions & 1 deletion tests/generators/test_manual_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import numpy as np
import numpy.testing as npt
from aepsych.config import Config
from aepsych.generators import ManualGenerator
from aepsych.generators import ManualGenerator, SampleAroundPointsGenerator


class TestManualGenerator(unittest.TestCase):
Expand Down Expand Up @@ -43,6 +43,7 @@ def test_manual_generator(self):
[ManualGenerator]
points = {points}
seed = 123
"""
config = Config()
config.update(config_str=config_str)
Expand All @@ -55,8 +56,42 @@ def test_manual_generator(self):
p3 = list(gen.gen()[0])
p4 = list(gen.gen()[0])

self.assertNotEqual([p1, p2, p3, p4], points) # make sure it shuffled
self.assertEqual(sorted([p1, p2, p3, p4]), points)
self.assertEqual(gen.max_asks, len(points))
self.assertEqual(gen.seed, 123)


class TestSampleAroundPointsGenerator(unittest.TestCase):
def test_sample_around_points_generator(self):
points = [[0.5, 0], [0.5, 1]]
window = [0.1, 2]
samples_per_point = 2
config_str = f"""
[common]
lb = [0, 0]
ub = [1, 1]
parnames = [par1, par2]
[SampleAroundPointsGenerator]
points = {points}
window = {window}
samples_per_point = {samples_per_point}
seed = 123
"""
config = Config()
config.update(config_str=config_str)
gen = SampleAroundPointsGenerator.from_config(config)
npt.assert_equal(gen.lb.numpy(), np.array([0, 0]))
npt.assert_equal(gen.ub.numpy(), np.array([1, 1]))
self.assertEqual(gen.max_asks, len(points * samples_per_point))
self.assertEqual(gen.seed, 123)

points = gen.gen(gen.max_asks)
for i in range(len(window)):
npt.assert_array_less(points[:, i], points[:, i] + window[i])
npt.assert_array_less(np.array([0] * len(points)), points[:, i])
npt.assert_array_less(points[:, i], np.array([1] * len(points)))


if __name__ == "__main__":
Expand Down

0 comments on commit 5564713

Please sign in to comment.