Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def LayerSetup(self, bottom):
return _ops.LRN(bottom, **self.arguments)
def LayerSetup(self, bottom):
return _ops.SElu(bottom, **self.arguments)
def shape(input, name=None, out_type=dtypes.int64):
return _ops.Shape(input, name=name)
def __call__(self, shape, dtype=None, **kwargs):
if dtype is None: dtype = self.dtype
if self.distribution == "normal":
return _ops.GlorotNormal(
shape=shape,
scale=self.scale * 2.,
mode=self.mode,
dtype=dtype.name,
)
else:
return _ops.GlorotUniform(
shape=shape,
scale=self.scale * 3.,
mode=self.mode,
dtype=dtype.name,
)
The ``y`` should be a 1d vector.
Parameters
----------
y: Tensor
The input tensor.
nb_class : int
The number of classes.
Returns
-------
Tensor
The one hot matrix.
"""
flat_y = _ops.Flatten(y, keep_axes=1)
return _ops.OneHot(flat_y, depth=nb_class)
def ones(shape, dtype=dtypes.float32, name=None):
return ops.Fill(shape, value=1.0, name=name)
def LayerSetup(self, bottom):
return _ops.Pow(bottom, **self.arguments)
def argmax(input, axis=None, name=None, dimension=None):
if dimension is not None:
if axis is not None:
raise ValueError("cannot specify both 'axis' and 'dimension'.")
axis = dimension
elif axis is None: axis = 0
return ops.Argmax(input, axis=axis, name=name)
def LayerSetup(self, bottom):
inputs = [bottom] + [blob['data'] for blob in self._blobs]
return _ops.PRelu(inputs, **self.arguments)
def divide(x, y, name=None):
return _ops.Div([x, y], name=name)