How to use the nevergrad.benchmark.xpbase.registry.register function in nevergrad

To help you get started, we’ve selected a few nevergrad examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github facebookresearch / nevergrad / nevergrad / benchmark / frozenexperiments.py View on Github external
@registry.register
def illcond(seed: Optional[int] = None) -> Iterator[Experiment]:
    """All optimizers on ill cond problems
    """
    seedg = create_seed_generator(seed)
    for budget in [500, 1000, 2000, 4000]:
        for optim in ["SQP", "DE", "CMA", "PSO", "RotationInvariantDE", "NelderMead"]:
            for rotation in [True, False]:
                for name in ["ellipsoid", "cigar"]:
                    function = ArtificialFunction(name=name, rotation=rotation, block_dimension=100)
                    yield Experiment(function, optim, budget=budget, seed=next(seedg))
github facebookresearch / nevergrad / nevergrad / benchmark / experiments.py View on Github external
@registry.register
def multimodal(seed: Optional[int] = None) -> Iterator[Experiment]:
    # prepare list of parameters to sweep for independent variables
    seedg = create_seed_generator(seed)
    names = ["hm", "rastrigin", "griewank", "rosenbrock", "ackley", "lunacek", "deceptivemultimodal"]
    # Keep in mind that Rosenbrock is multimodal in high dimension http://ieeexplore.ieee.org/document/6792472/.
    optims = ["NaiveTBPSA", "TBPSA",
              "CMA", "PSO", "DE", "MiniDE", "QrDE", "MiniQrDE", "LhsDE", "OnePlusOne", "SQP", "Cobyla", "Powell",
              "TwoPointsDE", "OnePointDE", "AlmostRotationInvariantDE", "RotationInvariantDE",
              "Portfolio", "ASCMADEthird", "ASCMADEQRthird", "ASCMA2PDEthird", "CMandAS2", "CMandAS", "CM",
              "MultiCMA", "TripleCMA", "MultiScaleCMA", "RSQP", "RCobyla", "RPowell", "SQPCMA"] + list(
        sorted(x for x, y in ng.optimizers.registry.items() if "chain" in x or "BO" in x))
    functions = [
        ArtificialFunction(name, block_dimension=bd, useless_variables=bd * uv_factor)
        for name in names
        for bd in [3, 25]
        for uv_factor in [0, 5]
github facebookresearch / nevergrad / nevergrad / benchmark / experiments.py View on Github external
@registry.register
def multiobjective_example(seed: Optional[int] = None) -> Iterator[Experiment]:
    # prepare list of parameters to sweep for independent variables
    seedg = create_seed_generator(seed)
    optims = ["NaiveTBPSA", "PSO", "DE", "SQP", "LhsDE", "RandomSearch", "NGO", "CMA", "BO", "LBO", "SQP", "RSQP"]
    mofuncs: List[PackedFunctions] = []
    for name1 in ["sphere", "cigar"]:
        for name2 in ["sphere", "cigar", "hm"]:
            mofuncs += [PackedFunctions([ArtificialFunction(name1, block_dimension=7),
                                         ArtificialFunction(name2, block_dimension=7)],
                                        upper_bounds=np.array((50., 50.)))]
        for name3 in ["sphere", "ellipsoid"]:
            mofuncs += [PackedFunctions([ArtificialFunction(name1, block_dimension=6),
                                         ArtificialFunction(name3, block_dimension=6),
                                         ArtificialFunction(name2, block_dimension=6)],
                                        upper_bounds=np.array((100, 100, 1000.)))]
    # functions are not initialized and duplicated at yield time, they will be initialized in the experiment (no need to seed here)
github facebookresearch / nevergrad / nevergrad / benchmark / experiments.py View on Github external
@registry.register
def yabbob(seed: Optional[int] = None, parallel: bool = False, big: bool = False, noise: bool = False, hd: bool = False) -> Iterator[Experiment]:
    """Yet Another Black-Box Optimization Benchmark.
    """
    seedg = create_seed_generator(seed)
    optims = ["NaiveTBPSA", "TBPSA", "NGO", "CMA", "PSO", "DE", "MiniDE", "QrDE", "MiniQrDE", "LhsDE", "OnePlusOne",
              "TwoPointsDE", "OnePointDE", "AlmostRotationInvariantDE", "RotationInvariantDE"]
    if not parallel:
        optims += ["SQP", "Cobyla", "Powell", "chainCMASQP"]
    #optims += [x for x, y in ng.optimizers.registry.items() if "chain" in x]
    names = ["hm", "rastrigin", "griewank", "rosenbrock", "ackley", "lunacek", "deceptivemultimodal", "bucherastrigin", "multipeak"]
    names += ["sphere", "doublelinearslope", "stepdoublelinearslope"]
    names += ["cigar", "altcigar", "ellipsoid", "altellipsoid", "stepellipsoid", "discus", "bentcigar"]
    names += ["deceptiveillcond", "deceptivemultimodal", "deceptivepath"]
    # Deceptive path is related to the sharp ridge function; there is a long path to the optimum.
    # Deceptive illcond is related to the difference of powers function; the conditioning varies as we get closer to the optimum.
    # Deceptive multimodal is related to the Weierstrass function and to the Schaffers function.
github facebookresearch / nevergrad / nevergrad / benchmark / cec2019_experiments.py View on Github external
@registry.register
def oneshotcec(seed: Optional[int] = None) -> Iterator[Experiment]:
    seedg = create_seed_generator(seed)
    names = ["sphere", "rastrigin", "cigar"]
    optims = sorted(x for x, y in optimization.registry.items() if y.one_shot and
                    not any(z in x for z in ["Large", "Small", "Stupid", "Zero"]))
    optims.append("CustomOptimizer")
    functions = [ArtificialFunction(name, block_dimension=bd, useless_variables=bd * uv_factor)
                 for name in names for bd in [3, 25] for uv_factor in [0, 5]]
    for func in functions:
        for optim in optims:
            for budget in [30, 100, 300, 1000, 3000]:
                yield Experiment(func.duplicate(), optim, budget=budget, num_workers=budget, seed=next(seedg))
github facebookresearch / nevergrad / nevergrad / benchmark / experiments.py View on Github external
@registry.register
def realworld(seed: Optional[int] = None) -> Iterator[Experiment]:
    # This experiment contains:
    # - a subset of MLDA (excluding the perceptron: 10 functions rescaled or not.
    # - ARCoating https://arxiv.org/abs/1904.02907: 1 function.
    # - The 007 game: 1 function, noisy.
    # - PowerSystem: a power system simulation problem.
    # - STSP: a simple TSP problem.
    # MLDA stuff, except the Perceptron.
    funcs: List[Union[InstrumentedFunction, rl.agents.TorchAgentFunction]] = [
        _mlda.Clustering.from_mlda(name, num, rescale) for name, num in [("Ruspini", 5), ("German towns", 10)] for rescale in [True, False]
    ]
    funcs += [
        _mlda.SammonMapping.from_mlda("Virus", rescale=False),
        _mlda.SammonMapping.from_mlda("Virus", rescale=True),
        _mlda.SammonMapping.from_mlda("Employees"),
    ]
github facebookresearch / nevergrad / nevergrad / benchmark / frozenexperiments.py View on Github external
@registry.register
def repeated_basic(seed: Optional[int] = None) -> Iterator[Experiment]:
    """Test settings
    """
    seedg = create_seed_generator(seed)
    function = ArtificialFunction(name="sphere", block_dimension=2, noise_level=1)
    optims: List[Union[str, optimizerlib.base.OptimizerFamily]] = ["OnePlusOne", optimizerlib.DifferentialEvolution()]
    for _ in range(5):
        for optim in optims:
            yield Experiment(function.duplicate(), optimizer=optim, num_workers=2, budget=4, seed=next(seedg))
github facebookresearch / nevergrad / nevergrad / benchmark / frozenexperiments.py View on Github external
@registry.register
def dim10_smallbudget(seed: Optional[int] = None) -> Iterator[Experiment]:
    # prepare list of parameters to sweep for independent variables
    seedg = create_seed_generator(seed)
    names = ["sphere"]
    optims = sorted(x for x, y in optimization.registry.items() if y.one_shot and "arg" not in x and "mal" not in x)
    functions = [ArtificialFunction(name, block_dimension=bd, num_blocks=n_blocks, useless_variables=bd * uv_factor * n_blocks)
                 for name in names for bd in [10] for uv_factor in [0] for n_blocks in [1]]
    # functions are not initialized and duplicated at yield time, they will be initialized in the experiment (no need to seed here)
    for func in functions:
        for optim in optims:
            for budget in [4, 8, 16, 32]:
                # duplicate -> each Experiment has different randomness
                yield Experiment(func.duplicate(), optim, budget=budget, num_workers=1, seed=next(seedg))
github facebookresearch / nevergrad / nevergrad / benchmark / experiments.py View on Github external
@registry.register
def realworld_oneshot(seed: Optional[int] = None) -> Iterator[Experiment]:
    # This experiment contains:
    # - a subset of MLDA (excluding the perceptron: 10 functions rescaled or not.
    # - ARCoating https://arxiv.org/abs/1904.02907: 1 function.
    # - The 007 game: 1 function, noisy.
    # - PowerSystem: a power system simulation problem.
    # - STSP: a simple TSP problem.
    # MLDA stuff, except the Perceptron.
    funcs: List[Union[InstrumentedFunction, rl.agents.TorchAgentFunction]] = [
        _mlda.Clustering.from_mlda(name, num, rescale) for name, num in [("Ruspini", 5), ("German towns", 10)] for rescale in [True, False]
    ]
    funcs += [
        _mlda.SammonMapping.from_mlda("Virus", rescale=False),
        _mlda.SammonMapping.from_mlda("Virus", rescale=True),
        _mlda.SammonMapping.from_mlda("Employees"),
    ]
github facebookresearch / nevergrad / nevergrad / benchmark / frozenexperiments.py View on Github external
@registry.register
def oneshot3(seed: Optional[int] = None) -> Iterator[Experiment]:
    # General experiment comparing one-shot optimizers, excluding those with "large" or "small"
    # in the name.
    seedg = create_seed_generator(seed)
    names = ["sphere", "altcigar", "cigar", "ellipsoid", "rosenbrock", "rastrigin", "altellipsoid"]
    optims = sorted(x for x, y in optimization.registry.items() if y.one_shot and "arg" not in x and "mal" not in x)
    functions = [ArtificialFunction(name, block_dimension=bd) for name in names for bd in [4, 20]]
    # functions are not initialized and duplicated at yield time, they will be initialized in the experiment
    for func in functions:
        for optim in optims:
            for budget in [30, 60, 100]:
                # duplicate -> each Experiment has different randomness
                yield Experiment(func.duplicate(), optim, budget=budget, num_workers=1, seed=next(seedg))