How to use the nevergrad.instrumentation.var.Array function in nevergrad

To help you get started, we’ve selected a few nevergrad examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github facebookresearch / nevergrad / nevergrad / functions / photonics / core.py View on Github external
assert not dimension % 4, f"points length should be a multiple of 4, got {dimension}"
    n = dimension // 4
    arrays: List[inst.var.Array] = []
    if name == "bragg":
        # n multiple of 2, from 16 to 80
        # domain (n=60): [2,3]^30 x [0,300]^30
        arrays.extend([inst.var.Array(n).bounded(2, 3, transform=transform) for _ in range(2)])
        arrays.extend([inst.var.Array(n).bounded(0, 300, transform=transform) for _ in range(2)])
    elif name == "chirped":
        # n multiple of 2, from 10 to 80
        # domain (n=60): [0,300]^60
        arrays = [inst.var.Array(n).bounded(0, 300, transform=transform) for _ in range(4)]
    elif name == "morpho":
        # n multiple of 4, from 16 to 60
        # domain (n=60): [0,300]^15 x [0,600]^15 x [30,600]^15 x [0,300]^15
        arrays.extend([inst.var.Array(n).bounded(0, 300, transform=transform),
                       inst.var.Array(n).bounded(0, 600, transform=transform),
                       inst.var.Array(n).bounded(30, 600, transform=transform),
                       inst.var.Array(n).bounded(0, 300, transform=transform)])
    else:
        raise NotImplementedError(f"Transform for {name} is not implemented")
    instrumentation = inst.Instrumentation(*arrays)
    assert instrumentation.dimension == dimension
    return instrumentation
github facebookresearch / nevergrad / nevergrad / optimization / test_optimizerlib.py View on Github external
def test_constrained_optimization() -> None:
    instrumentation = inst.Instrumentation(x=inst.var.Array(1), y=inst.var.Scalar())
    optimizer = optlib.OnePlusOne(instrumentation, budget=100)
    optimizer.instrumentation.random_state.seed(12)
    optimizer.instrumentation.set_cheap_constraint_checker(lambda x, y: x[0] >= 1)  # type:ignore
    recom = optimizer.minimize(_square)
    np.testing.assert_array_almost_equal([recom.kwargs["x"][0], recom.kwargs["y"]], [1.005573e+00, 3.965783e-04])
github facebookresearch / nevergrad / nevergrad / functions / mlda / problems.py View on Github external
def __init__(self, points: np.ndarray, num_clusters: int, rescale: bool = True) -> None:
        self.num_clusters = num_clusters
        self._points = np.array(points, copy=True)
        if rescale:
            self._points -= np.mean(self._points, axis=0, keepdims=True)
            self._points /= np.std(self._points, axis=0, keepdims=True)
        super().__init__(self._compute_distance, inst.var.Array(num_clusters, points.shape[1]))
        self._descriptors.update(num_clusters=num_clusters, rescale=rescale)
github facebookresearch / nevergrad / nevergrad / functions / stsp / core.py View on Github external
def __init__(self, seed: int = 0, the_dimension: int = 500) -> None:
        instrumentation = Instrumentation(inst.var.Array(the_dimension))
        self.x = instrumentation.random_state.normal(size=the_dimension)
        self.y = instrumentation.random_state.normal(size=the_dimension)
        super().__init__(self._simulate_stsp, instrumentation)
        self._descriptors.update(seed=seed)
        self.order = np.arange(0, self.instrumentation.dimension)
github facebookresearch / nevergrad / nevergrad / functions / mlda / problems.py View on Github external
def __init__(self, proximity_array: np.ndarray) -> None:
        self._proximity = proximity_array
        self._proximity_2 = self._proximity**2
        self._proximity_2[self._proximity_2 == 0] = 1  # avoid ZeroDivision (for diagonal terms, or identical points)
        super().__init__(self._compute_distance, inst.var.Array(self._proximity.shape[0], 2))
github facebookresearch / nevergrad / nevergrad / functions / mlda / problems.py View on Github external
def __init__(self, x: np.ndarray, y: np.ndarray) -> None:
        assert x.ndim == 1
        assert y.ndim == 1
        self._x = x
        self._y = y
        super().__init__(self._compute_loss, inst.var.Array(10))
github facebookresearch / nevergrad / nevergrad / functions / functionlib.py View on Github external
if not isinstance(translation_factor, (float, int)):
            raise TypeError(f"Got non-float value {translation_factor}")
        if name not in corefuncs.registry:
            available = ", ".join(self.list_sorted_function_names())
            raise ValueError(f'Unknown core function "{name}". Available names are:\n-----\n{available}')
        # record necessary info and prepare transforms
        self._dimension = block_dimension * num_blocks + useless_variables
        self._func = corefuncs.registry[name]
        # special case
        info = corefuncs.registry.get_info(self._parameters["name"])
        only_index_transform = info.get("no_transform", False)
        # variable
        self.transform_var = ArtificialVariable(dimension=self._dimension, num_blocks=num_blocks, block_dimension=block_dimension,
                                                translation_factor=translation_factor, rotation=rotation, hashing=hashing,
                                                only_index_transform=only_index_transform)
        super().__init__(self.noisy_function, inst.var.Array(1 if hashing else self._dimension))
        self.instrumentation = self.instrumentation.with_name("")
        self._aggregator = {"max": np.max, "mean": np.mean, "sum": np.sum}[aggregator]
        info = corefuncs.registry.get_info(self._parameters["name"])
        # add descriptors
        self._descriptors.update(**self._parameters, useful_dimensions=block_dimension * num_blocks,
                                 discrete=any(x in name for x in ["onemax", "leadingones", "jump"]))
        # transforms are initialized at runtime to avoid slow init