Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self._optimizers = [tuple(0.0 for _ in range(self.dim))]
super().__init__(noise_std=noise_std, negate=negate)
def evaluate_true(self, X: Tensor) -> Tensor:
result = torch.zeros_like(X[..., 0])
for i in range(self.dim // 4):
i_ = i + 1
part1 = (X[..., 4 * i_ - 4] + 10.0 * X[..., 4 * i_ - 3]) ** 2
part2 = 5.0 * (X[..., 4 * i_ - 2] - X[..., 4 * i_ - 1]) ** 2
part3 = (X[..., 4 * i_ - 3] - 2.0 * X[..., 4 * i_ - 2]) ** 4
part4 = 10.0 * (X[..., 4 * i_ - 4] - X[..., 4 * i_ - 1]) ** 4
result += part1 + part2 + part3 + part4
return result
class Rastrigin(SyntheticTestFunction):
_optimal_value = 0.0
def __init__(
self, dim=2, noise_std: Optional[float] = None, negate: bool = False
) -> None:
self.dim = dim
self._bounds = [(-5.12, 5.12) for _ in range(self.dim)]
self._optimizers = [tuple(0.0 for _ in range(self.dim))]
super().__init__(noise_std=noise_std, negate=negate)
def evaluate_true(self, X: Tensor) -> Tensor:
return 10.0 * self.dim + torch.sum(
X ** 2 - 10.0 * torch.cos(2.0 * math.pi * X), dim=-1
)
tuple(
math.pow(2.0, -(1.0 - 2.0 ** (-(i - 1))))
for i in range(1, self.dim + 1)
)
]
super().__init__(noise_std=noise_std, negate=negate)
def evaluate_true(self, X: Tensor) -> Tensor:
d = self.dim
part1 = (X[..., 0] - 1) ** 2
i = X.new(range(2, d + 1))
part2 = torch.sum(i * (2.0 * X[..., 1:] ** 2 - X[..., :-1]) ** 2, dim=1)
return part1 + part2
class EggHolder(SyntheticTestFunction):
r"""Eggholder test function.
Two-dimensional function (usually evaluated on `[-512, 512]^2`):
E(x) = (x_2 + 47) sin(R1(x)) - x_1 * sin(R2(x))
where `R1(x) = sqrt(|x_2 + x_1 / 2 + 47|)`, `R2(x) = sqrt|x_1 - (x_2 + 47)|)`.
"""
dim = 2
_bounds = [(-512.0, 512.0), (-512.0, 512.0)]
_optimal_value = -959.6407
_optimizers = [(512.0, 404.2319)]
_check_grad_at_opt: bool = False
def evaluate_true(self, X: Tensor) -> Tensor:
@property
def optimizers(self) -> Tensor:
if self.dim == 4:
raise NotImplementedError()
return super().optimizers
def evaluate_true(self, X: Tensor) -> Tensor:
self.to(device=X.device, dtype=X.dtype)
inner_sum = torch.sum(self.A * (X.unsqueeze(1) - 0.0001 * self.P) ** 2, dim=2)
H = -torch.sum(self.ALPHA * torch.exp(-inner_sum), dim=1)
if self.dim == 4:
H = (1.1 + H) / 0.839
return H
class HolderTable(SyntheticTestFunction):
r"""Holder Table synthetic test function.
Two-dimensional function (typically evaluated on `[0, 10] x [0, 10]`):
`H(x) = - | sin(x_1) * cos(x_2) * exp(| 1 - ||x|| / pi | ) |`
H has 4 global minima with `H(z_i) = -19.2085` at
z_1 = ( 8.05502, 9.66459)
z_2 = (-8.05502, -9.66459)
z_3 = (-8.05502, 9.66459)
z_4 = ( 8.05502, -9.66459)
"""
dim = 2
_bounds = [(-10.0, 10.0)]
dim = 2
_bounds = [(-3.0, 3.0), (-2.0, 2.0)]
_optimal_value = -1.0316
_optimizers = [(0.0898, -0.7126), (-0.0898, 0.7126)]
def evaluate_true(self, X: Tensor) -> Tensor:
x1, x2 = X[..., 0], X[..., 1]
return (
(4 - 2.1 * x1 ** 2 + x1 ** 4 / 3) * x1 ** 2
+ x1 * x2
+ (4 * x2 ** 2 - 4) * x2 ** 2
)
class StyblinskiTang(SyntheticTestFunction):
r"""Styblinski-Tang synthtetic test function.
d-dimensional function (usually evaluated on the hypercube `[-5, 5]^d`):
H(x) = 0.5 * sum_{i=1}^d (x_i^4 - 16 * x_i^2 + 5 * x_i)
H has a single global mininimum `H(z) = -39.166166 * d` at `z = [-2.903534]^d`
"""
def __init__(
self, dim=2, noise_std: Optional[float] = None, negate: bool = False
) -> None:
self.dim = dim
self._bounds = [(-5.0, 5.0) for _ in range(self.dim)]
self._optimal_value = -39.166166 * self.dim
self._optimizers = [tuple(-2.903534 for _ in range(self.dim))]
self.dim = dim
self._bounds = [(-32.768, 32.768) for _ in range(self.dim)]
self._optimizers = [tuple(0.0 for _ in range(self.dim))]
super().__init__(noise_std=noise_std, negate=negate)
self.a = 20
self.b = 0.2
self.c = 2 * math.pi
def evaluate_true(self, X: Tensor) -> Tensor:
a, b, c = self.a, self.b, self.c
part1 = -a * torch.exp(-b / math.sqrt(self.dim) * torch.norm(X, dim=-1))
part2 = -torch.exp(torch.mean(torch.cos(c * X), dim=-1))
return part1 + part2 + a + math.e
class Beale(SyntheticTestFunction):
dim = 2
_optimal_value = 0.0
_bounds = [(-4.5, 4.5), (-4.5, 4.5)]
_optimizers = [(3.0, 0.5)]
def evaluate_true(self, X: Tensor) -> Tensor:
x1, x2 = X[..., 0], X[..., 1]
part1 = (1.5 - x1 + x1 * x2) ** 2
part2 = (2.25 - x1 + x1 * x2 ** 2) ** 2
part3 = (2.625 - x1 + x1 * x2 ** 3) ** 2
return part1 + part2 + part3
class Branin(SyntheticTestFunction):
r"""Branin test function.
],
dtype=torch.float,
)
self.register_buffer("C", C_t.transpose(-1, -2))
def evaluate_true(self, X: Tensor) -> Tensor:
self.to(device=X.device, dtype=X.dtype)
beta = self.beta / 10.0
result = -sum(
1 / (torch.sum((X - self.C[i]) ** 2, dim=-1) + beta[i])
for i in range(self.m)
)
return result
class SixHumpCamel(SyntheticTestFunction):
dim = 2
_bounds = [(-3.0, 3.0), (-2.0, 2.0)]
_optimal_value = -1.0316
_optimizers = [(0.0898, -0.7126), (-0.0898, 0.7126)]
def evaluate_true(self, X: Tensor) -> Tensor:
x1, x2 = X[..., 0], X[..., 1]
return (
(4 - 2.1 * x1 ** 2 + x1 ** 4 / 3) * x1 ** 2
+ x1 * x2
+ (4 * x2 ** 2 - 4) * x2 ** 2
)
class StyblinskiTang(SyntheticTestFunction):
def __init__(
self, dim=2, noise_std: Optional[float] = None, negate: bool = False
) -> None:
self.dim = dim
self._bounds = [(-5.0, 10.0) for _ in range(self.dim)]
self._optimizers = [tuple(1.0 for _ in range(self.dim))]
super().__init__(noise_std=noise_std, negate=negate)
def evaluate_true(self, X: Tensor) -> Tensor:
return torch.sum(
100.0 * (X[..., 1:] - X[..., :-1] ** 2) ** 2 + (X[..., :-1] - 1) ** 2,
dim=-1,
)
class Shekel(SyntheticTestFunction):
r"""Shekel synthtetic test function.
4-dimensional function (usually evaluated on `[0, 10]^4`):
f(x) = -sum_{i=1}^10 (sum_{j=1}^4 (x_j - A_{ji})^2 + C_i)^{-1}
f has one minimizer for its global minimum at `z_1 = (4, 4, 4, 4)` with
`f(z_1) = -10.5363`.
"""
dim = 4
_bounds = [(0.0, 10.0), (0.0, 10.0), (0.0, 10.0), (0.0, 10.0)]
_optimizers = [(4.000747, 3.99951, 4.00075, 3.99951)]
def __init__(
self, m: int = 10, noise_std: Optional[float] = None, negate: bool = False
def __init__(
self, dim=2, noise_std: Optional[float] = None, negate: bool = False
) -> None:
self.dim = dim
self._bounds = [(-5.12, 5.12) for _ in range(self.dim)]
self._optimizers = [tuple(0.0 for _ in range(self.dim))]
super().__init__(noise_std=noise_std, negate=negate)
def evaluate_true(self, X: Tensor) -> Tensor:
return 10.0 * self.dim + torch.sum(
X ** 2 - 10.0 * torch.cos(2.0 * math.pi * X), dim=-1
)
class Rosenbrock(SyntheticTestFunction):
r"""Rosenbrock synthetic test function.
d-dimensional function (usually evaluated on `[-5, 10]^d`):
f(x) = sum_{i=1}^{d-1} (100 (x_{i+1} - x_i^2)^2 + (x_i - 1)^2)
f has one minimizer for its global minimum at `z_1 = (1, 1, ..., 1)` with
`f(z_i) = 0.0`.
"""
_optimal_value = 0.0
def __init__(
self, dim=2, noise_std: Optional[float] = None, negate: bool = False
) -> None:
self.dim = dim
This
1. sets the default device to be `torch.device("cpu")`
2. ensures that no warnings are suppressed by default.
"""
device = torch.device("cpu")
def setUp(self):
warnings.resetwarnings()
settings.debug._set_state(False)
warnings.simplefilter("always", append=True)
class SyntheticTestFunctionBaseTestCase:
functions: List[SyntheticTestFunction]
def test_forward(self):
for dtype in (torch.float, torch.double):
for batch_shape in (torch.Size(), torch.Size([2])):
for f in self.functions:
f.to(device=self.device, dtype=dtype)
X = torch.rand(*batch_shape, f.dim, device=self.device, dtype=dtype)
X = f.bounds[0, :] + X * (f.bounds[1, :] - f.bounds[0, :])
res = f(X)
f(X, noise=False)
self.assertEqual(res.dtype, dtype)
self.assertEqual(res.device.type, self.device.type)
self.assertEqual(res.shape, batch_shape)
def test_optimal_value(self):
for dtype in (torch.float, torch.double):
dim = 2
_bounds = [(-10.0, 10.0)]
_optimal_value = -19.2085
_optimizers = [
(8.05502, 9.66459),
(-8.05502, -9.66459),
(-8.05502, 9.66459),
(8.05502, -9.66459),
]
def evaluate_true(self, X: Tensor) -> Tensor:
term = torch.abs(1 - torch.norm(X, dim=1) / math.pi)
return -torch.abs(torch.sin(X[..., 0]) * torch.cos(X[..., 1]) * torch.exp(term))
class Levy(SyntheticTestFunction):
r"""Levy synthetic test function.
d-dimensional function (usually evaluated on `[-10, 10]^d`):
f(x) = sin^2(pi w_1) +
sum_{i=1}^{d-1} (w_i-1)^2 (1 + 10 sin^2(pi w_i + 1)) +
(w_d - 1)^2 (1 + sin^2(2 pi w_d))
where `w_i = 1 + (x_i - 1) / 4` for all `i`.
f has one minimizer for its global minimum at `z_1 = (1, 1, ..., 1)` with
`f(z_1) = 0`.
"""
_optimal_value = 0.0