Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def Ftrl(**kwargs):
return Optimizer('ftrl')
The initial number of updates.
multi_precision : bool, optional
Flag to control the internal precision of the optimizer.::
False: results in using the same precision as the weights (default),
True: makes internal 32-bit copy of the weights and applies gradients
in 32-bit precision even if actual weights used in the model have lower precision.
Turning this on can improve convergence and accuracy when training with float16.
"""
name = name.lower()
if name not in optims:
err_str = '"%s" is not among the following optimizer list:\n\t' % (name)
err_str += '%s' % ('\n\t'.join(sorted(optims)))
raise ValueError(err_str)
optim = Optimizer(name)
return optim
def FTML(**kwargs):
return Optimizer('ftml')
def _get_search_space_strs(self):
optim_strs = []
for optim in self.optim_list:
if isinstance(optim, Optimizer):
optim_strs.append(optim.name)
elif isinstance(optim, str):
optim_strs.append(optim)
else:
raise NotImplementedError
return optim_strs
def Adam(**kwargs):
return Optimizer('adam')
def DCASGD(**kwargs):
return Optimizer('dcasgd')
def LBSGD(**kwargs):
return Optimizer('lbsgd')
def Signum(**kwargs):
return Optimizer('signum')
def Nadam(**kwargs):
return Optimizer('nadam')