Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
"""
Simple example using json instead of pickle for enhanced cross-platform compatibility.
To run this example, just run:
> bbopt ./skopt_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__, protocol="json")
if __name__ == "__main__":
bb.run()
# Set up log uniform and log normal parameters.
x0 = bb.loguniform("x0", 1, 10, guess=5)
x1 = bb.lognormvariate("x1", 0, 1, guess=1)
# Set the goal to be the sum.
y = x0 + x1
bb.minimize(y)
# Finally, we'll print out the value we used for debugging purposes.
if __name__ == "__main__":
from argparse import ArgumentParser
from pprint import pprint
import numpy as np
from matplotlib import pyplot as plt
from sklearn import datasets
from keras.models import Sequential
from keras.layers import Dense
from keras.optimizers import SGD
from keras.utils import to_categorical
from keras.regularizers import l1_l2
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
# Load data into X and y:
iris = datasets.load_iris()
X = iris.data
y = to_categorical(iris.target)
# Split data into training, validation, and testing:
train_split = int(.6*len(X))
validate_split = train_split + int(.2*len(X))
X_train, X_validate, X_test = X[:train_split], X[train_split:validate_split], X[validate_split:]
y_train, y_validate, y_test = y[:train_split], y[train_split:validate_split], y[validate_split:]
"""
Example using a mixture distribution over many different possible algorithms.
To run this example, just run:
> bbopt ./mixture_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
bb.run_backend("mixture", [
("random", 1),
("tree_structured_parzen_estimator", 1),
("annealing", 1),
("gaussian_process", 1),
("random_forest", 1),
("extra_trees", 1),
("gradient_boosted_regression_trees", 1),
])
# If we're not serving, store which algorithm the
# mixture backend has selected.
from bbopt.backends.mixture import MixtureBackend
if isinstance(bb.backend, MixtureBackend):
def __init__(self, args):
super().__init__(args)
self.bb = BlackBoxOptimizer(file=self.args.trials_name)
if not self.test:
data = TrainData.from_both(self.args.tags_file, self.args.tags_folder, self.args.folder)
_, self.test = data.load(False, True)
from keras.callbacks import ModelCheckpoint
for i in list(self.callbacks):
if isinstance(i, ModelCheckpoint):
self.callbacks.remove(i)
"""
Example of using BBopt with conditional parameters that only appear
during some runs depending on the value(s) of other parameters.
To run this example, just run:
> bbopt ./conditional_hyperopt_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
bb.run(alg="tree_structured_parzen_estimator")
# We set the x parameter conditional on the use_high parameter.
use_high = bb.randbool("use high", guess=False)
assert isinstance(use_high, bool)
if use_high:
x = bb.randrange("x high", 5, 20)
else:
x = bb.randrange("x low", 5)
# We set x as the thing we want to optimize.
bb.maximize(x)
"""
Example of using some of the array-based parameter definition methods.
To run this example, just run:
> bbopt ./skopt_example.py
"""
import numpy as np
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
bb.run()
# Generate 1 x 5 and 5 x 1 random vectors.
x0 = bb.rand("x0", 1, 5, guess=np.zeros((1, 5))) # entries uniform in [0, 1)
x1 = bb.randn("x1", 5, 1, guess=np.zeros((5, 1))) # entries standard normal
# Set the loss to be their dot product.
y = float(x0.dot(x1))
bb.minimize(y)
# Finally, we'll print out the value we used for debugging purposes.
if __name__ == "__main__":
"""
Simple example using json instead of pickle for enhanced cross-platform compatibility.
To run this example, just run:
> bbopt ./skopt_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__, protocol="json")
if __name__ == "__main__":
bb.run()
# Set up log uniform and log normal parameters.
x0 = bb.loguniform("x0", 1, 10, guess=5)
x1 = bb.lognormvariate("x1", 0, 1, guess=1)
# Set the goal to be the sum.
y = x0 + x1
bb.minimize(y)
# Finally, we'll print out the value we used for debugging purposes.
if __name__ == "__main__":
"""
Example of using BBopt with conditional parameters that only appear
during some runs depending on the value(s) of other parameters.
To run this example, just run:
> bbopt ./conditional_hyperopt_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
bb.run(alg="tree_structured_parzen_estimator")
# We set the x parameter conditional on the use_high parameter.
use_high = bb.randbool("use high", guess=False)
assert isinstance(use_high, bool)
if use_high:
x = bb.randrange("x high", 10, 20)
else:
x = bb.randrange("x low", 10)
# We set x as the thing we want to optimize.
bb.maximize(x)
"""
Example of using BBopt with conditional parameters that only appear
during some runs depending on the value(s) of other parameters.
To run this example, just run:
> bbopt ./conditional_hyperopt_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
bb.run(alg="tree_structured_parzen_estimator")
# We set the x parameter conditional on the use_high parameter.
use_high = bb.randbool("use high", guess=False)
assert isinstance(use_high, bool)
if use_high:
x = bb.randrange("x high", 5, 20)
else:
x = bb.randrange("x low", 5)
# We set x as the thing we want to optimize.
bb.maximize(x)
"""
Example of using the hyperopt backend with BBopt.
To run this example, just run:
> bbopt ./hyperopt_example.py
"""
# BBopt setup:
from bbopt import BlackBoxOptimizer
bb = BlackBoxOptimizer(file=__file__)
if __name__ == "__main__":
bb.run(alg="tree_structured_parzen_estimator")
# Let's use some parameters!
x0 = bb.randint("x0", 1, 10, guess=5)
x1 = bb.normalvariate("x1", mu=0, sigma=1)
x2 = bb.choice("x2", [-10, -1, 0, 1, 10])
# And let's set our goal!
y = x0 + x1*x2
bb.minimize(y)
# Finally, we'll print out the value we used for debugging purposes.