Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
standard_error / abs(self.mean) / self.rtol)
def remove_unfinished(self):
"""Remove uncomputed data from the learner."""
pass
def plot(self):
vals = [v for v in self.data.values() if v is not None]
if not vals:
return hv.Histogram([[], []])
num_bins = int(max(5, sqrt(self.n)))
vals = hv.Points(vals)
return hv.operation.histogram(vals, num_bins=num_bins, dimension=1)
class Learner1D(BaseLearner):
"""Learns and predicts a function 'f:ℝ → ℝ'.
Parameters
----------
function : callable
The function to learn. Must take a single real parameter and
return a real number.
bounds : pair of reals
The bounds of the interval on which to learn 'function'.
"""
def __init__(self, function, bounds):
self.function = function
# A dict storing the loss function for each interval x_n.
self.losses = {}
vest = v[:, j, None] + ((p[:, :, :] - p[:, j, None, :]) *
g[:, j, None, :]).sum(axis=-1)
dev += abs(vest - v).max(axis=1)
q = p[:, :-1, :] - p[:, -1, None, :]
areas = abs(q[:, 0, 0] * q[:, 1, 1] - q[:, 0, 1] * q[:, 1, 0])
areas /= special.gamma(n_points_per_triangle)
areas = np.sqrt(areas)
vs_scale = vs[tri.vertices].ptp()
if vs_scale != 0:
dev /= vs_scale
return dev * areas
class Learner2D(BaseLearner):
"""Learns and predicts a function 'f: ℝ^2 → ℝ'.
Parameters
----------
function : callable
The function to learn. Must take a tuple of two real
parameters and return a real number.
bounds : list of 2-tuples
A list ``[(a1, b1), (a2, b2)]`` containing bounds,
one per dimension.
Attributes
----------
points_combined
Sample points so far including the unknown interpolated ones.
values_combined
add_data : bool, default: True
If True, add the chosen points to this
learner's 'data' with 'None' for the 'y'
values. Set this to False if you do not
want to modify the state of the learner.
"""
pass
def __getstate__(self):
return copy(self.__dict__)
def __setstate__(self, state):
self.__dict__ = state
class AverageLearner(BaseLearner):
"""A naive implementation of adaptive computing of averages.
The learned function must depend on an integer input variable that
represents the source of randomness.
Parameters:
-----------
atol : float
Desired absolute tolerance
rtol : float
Desired relative tolerance
"""
def __init__(self, function, atol=None, rtol=None):
if atol is None and rtol is None:
raise Exception('At least one of `atol` and `rtol` should be set.')
return hv.Scatter(self.data)
else:
return hv.Scatter([])
def remove_unfinished(self):
self.data_interp = {}
self.losses_combined = copy(self.losses)
self.neighbors_combined = copy(self.neighbors)
def dispatch(child_functions, arg):
index, x = arg
return child_functions[index](x)
class BalancingLearner(BaseLearner):
"""Choose the optimal points from a set of learners.
Parameters
----------
learners : sequence of BaseLearner
The learners from which to choose. These must all have the same type.
Notes
-----
This learner compares the 'loss' calculated from the "child" learners.
This requires that the 'loss' from different learners *can be meaningfully
compared*. For the moment we enforce this restriction by requiring that
all learners are the same type but (depending on the internals of the
learner) it may be that the loss cannot be compared *even between learners
of the same type*. In this case the BalancingLearner will behave in an
undefined way.