Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
large_height = (image_height // 2) - (image_height // 4)
large_width = (image_width // 2) - (image_width // 4)
weight[large_height:-large_height, large_width:-large_width] = weight_value
elif region is "circle":
radius = image_width // 3
cy = image_width // 2
cx = image_height // 2
y, x = np.ogrid[-radius: radius, -radius: radius]
index = x**2 + y**2 <= radius**2
weight[cy-radius:cy+radius, cx-radius:cx+radius][index] = 1
if n_channels is not None and weight.ndim == 2:
weight = np.concatenate([weight[..., None]]*n_channels, axis=-1)
return T.tensor(weight)
if isinstance(rank, int):
rank = [rank for _ in shape]
if orthogonal:
for i, (s, r) in enumerate(zip(shape, rank)):
if r > s:
warnings.warn('Selected orthogonal=True, but selected a rank larger than the tensor size for mode {0}: '
'rank[{0}]={1} > shape[{0}]={2}.'.format(i, r, s))
factors = []
for (s, r) in zip(shape, rank):
if orthogonal:
factor = T.tensor(rns.random_sample((s, s)), **context)
Q, _= T.qr(factor)
factors.append(T.tensor(Q[:, :r]))
else:
factors.append(T.tensor(rns.random_sample((s, r)), **context))
core = T.tensor(rns.random_sample(rank), **context)
if full:
return tucker_to_tensor((core, factors))
else:
return core, factors
rank = [rank for _ in shape]
if orthogonal:
for i, (s, r) in enumerate(zip(shape, rank)):
if r > s:
warnings.warn('Selected orthogonal=True, but selected a rank larger than the tensor size for mode {0}: '
'rank[{0}]={1} > shape[{0}]={2}.'.format(i, r, s))
factors = []
for (s, r) in zip(shape, rank):
if orthogonal:
factor = T.tensor(rns.random_sample((s, s)), **context)
Q, _= T.qr(factor)
factors.append(T.tensor(Q[:, :r]))
else:
factors.append(T.tensor(rns.random_sample((s, r)), **context))
core = T.tensor(rns.random_sample(rank), **context)
if full:
return tucker_to_tensor((core, factors))
else:
return core, factors
for i in range(1, T.ndim(X)): # The first dimension of X is the number of samples
W.append(T.tensor(rng.randn(X.shape[i], self.weight_rank), **T.context(X)))
# Norm of the weight tensor at each iteration
norm_W = []
weights = T.ones(self.weight_rank, **T.context(X))
for iteration in range(self.n_iter_max):
# Optimise each factor of W
for i in range(len(W)):
phi = T.reshape(
T.dot(partial_unfold(X, i, skip_begin=1),
khatri_rao(W, skip_matrix=i)),
(X.shape[0], -1))
inv_term = T.dot(T.transpose(phi), phi) + self.reg_W*T.tensor(np.eye(phi.shape[1]), **T.context(X))
W[i] = T.reshape(T.solve(inv_term, T.dot(T.transpose(phi), y)), (X.shape[i + 1], self.weight_rank))
weight_tensor_ = kruskal_to_tensor((weights, W))
norm_W.append(T.norm(weight_tensor_, 2))
# Convergence check
if iteration > 1:
weight_evolution = abs(norm_W[-1] - norm_W[-2]) / norm_W[-1]
if (weight_evolution <= self.tol):
if self.verbose:
print('\nConverged in {} iterations'.format(iteration))
break
self.weight_tensor_ = weight_tensor_
self.kruskal_weight_ = (weights, W)
random_state : `np.random.RandomState`
context : dict
context in which to create the tensor
Returns
-------
random_kruskal : ND-array or 2D-array list
ND-array : full tensor if `full` is True
2D-array list : list of factors otherwise
"""
if (rank > min(shape)) and orthogonal:
warnings.warn('Can only construct orthogonal tensors when rank <= min(shape) but got '
'a tensor with min(shape)={} < rank={}'.format(min(shape), rank))
rns = check_random_state(random_state)
factors = [T.tensor(rns.random_sample((s, rank)), **context) for s in shape]
weights = T.ones(rank, **context)
if orthogonal:
factors = [T.qr(factor)[0] for factor in factors]
if full:
return kruskal_to_tensor((weights, factors))
elif normalise_factors:
return kruskal_normalise((weights, factors))
else:
return KruskalTensor((weights, factors))
len(rank), n_dim + 1)
raise(ValueError(message))
# Make sure it's not a tuple but a list
rank = list(rank)
# Initialization
if rank[0] != 1:
message = 'Provided rank[0] == {} but boundaring conditions dictatate rank[0] == rank[-1] == 1: setting rank[0] to 1.'.format(rank[0])
raise ValueError(message)
if rank[-1] != 1:
message = 'Provided rank[-1] == {} but boundaring conditions dictatate rank[0] == rank[-1] == 1: setting rank[-1] to 1.'.format(rank[0])
raise ValueError(message)
rns = check_random_state(random_state)
factors = [T.tensor(rns.random_sample((rank[i], s, rank[i+1])), **context)\
for i, s in enumerate(shape)]
if full:
return mps_to_tensor(factors)
else:
return factors
"""
rns = check_random_state(random_state)
if isinstance(rank, int):
rank = [rank for _ in shape]
if orthogonal:
for i, (s, r) in enumerate(zip(shape, rank)):
if r > s:
warnings.warn('Selected orthogonal=True, but selected a rank larger than the tensor size for mode {0}: '
'rank[{0}]={1} > shape[{0}]={2}.'.format(i, r, s))
factors = []
for (s, r) in zip(shape, rank):
if orthogonal:
factor = T.tensor(rns.random_sample((s, s)), **context)
Q, _= T.qr(factor)
factors.append(T.tensor(Q[:, :r]))
else:
factors.append(T.tensor(rns.random_sample((s, r)), **context))
core = T.tensor(rns.random_sample(rank), **context)
if full:
return tucker_to_tensor((core, factors))
else:
return core, factors
if orthogonal:
for i, (s, r) in enumerate(zip(shape, rank)):
if r > s:
warnings.warn('Selected orthogonal=True, but selected a rank larger than the tensor size for mode {0}: '
'rank[{0}]={1} > shape[{0}]={2}.'.format(i, r, s))
factors = []
for (s, r) in zip(shape, rank):
if orthogonal:
factor = T.tensor(rns.random_sample((s, s)), **context)
Q, _= T.qr(factor)
factors.append(T.tensor(Q[:, :r]))
else:
factors.append(T.tensor(rns.random_sample((s, r)), **context))
core = T.tensor(rns.random_sample(rank), **context)
if full:
return tucker_to_tensor((core, factors))
else:
return core, factors
register_sparse_backend(backend_name)
return [k for k in dir(_LOADED_BACKENDS[backend_name]) if not k.startswith('_')]
override_module_dispatch(__name__, _get_backend_method, _get_backend_dir)
def dispatch_sparse(func):
@functools.wraps(func, assigned=('__name__', '__qualname__',
'__doc__', '__annotations__'))
def inner(*args, **kwargs):
with sparse_context():
return func(*args, **kwargs)
return inner
tensor = dispatch_sparse(backend.tensor)
is_tensor = dispatch_sparse(backend.is_tensor)
context = dispatch_sparse(backend.context)
shape = dispatch_sparse(backend.shape)
ndim = dispatch_sparse(backend.ndim)
to_numpy = dispatch_sparse(backend.to_numpy)
copy = dispatch_sparse(backend.copy)
concatenate = dispatch_sparse(backend.concatenate)
reshape = dispatch_sparse(backend.reshape)
moveaxis = dispatch_sparse(backend.moveaxis)
transpose = dispatch_sparse(backend.transpose)
arange = dispatch_sparse(backend.arange)
ones = dispatch_sparse(backend.ones)
zeros = dispatch_sparse(backend.zeros)
zeros_like = dispatch_sparse(backend.zeros_like)
eye = dispatch_sparse(backend.eye,)
clip = dispatch_sparse(backend.clip)