Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
metric_fn = lambda p: manifold(CRPScore, Normal)(np.array(p)[:, np.newaxis]).metric()
grad_fn = lambda p: manifold(CRPScore, Normal)(np.array(p)[:, np.newaxis]).d_score(rvs).mean(axis=0)
fisher_fn = lambda p: manifold(LogScore, Normal)(np.array(p)[:, np.newaxis]).metric()
grad_fn = lambda p: manifold(LogScore, Normal)(np.array(p)[:, np.newaxis]).d_score(rvs).mean(axis=0)
nll_fn = lambda p: manifold(LogScore, Normal)(np.array(p)[:, np.newaxis]).score(rvs).mean()
fisher_fn = lambda p: manifold(LogScore, Normal)(np.array(p)[:, np.newaxis]).metric()
def __init__(self, Dist=Normal, Score=LogScore,
Base=default_tree_learner, gradient='natural',
n_estimators=500, learning_rate=0.01, minibatch_frac=1.0,
verbose=True, verbose_eval=100, tol=1e-4,
random_state=None):
self.Dist = Dist
self.Score = Score
self.Base = Base
self.Manifold = manifold(Score, Dist)
self.gradient = gradient
self.n_estimators = n_estimators
self.learning_rate = learning_rate
self.minibatch_frac = minibatch_frac
self.verbose = verbose
self.verbose_eval = verbose_eval
self.init_params = None
self.base_models = []
self.scalings = []
self.tol = tol
self.random_state = check_random_state(random_state)
self.best_val_loss_itr = None
grad_fn = lambda p: manifold(LogScore, Normal)(np.array(p)[:, np.newaxis]).d_score(rvs).mean(axis=0)