Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_matrix_from_rotation_vector(self):
angle = gs.pi / 3
expected = gs.array([[1. / 2, -gs.sqrt(3.) / 2],
[gs.sqrt(3.) / 2, 1. / 2]])
result = self.group.matrix_from_rotation_vector(gs.array([angle]))
self.assertAllClose(result, expected)
def test_log(self):
"""
The Riemannian exp and log are inverse functions of each other.
This test is the inverse of test_exp's.
"""
n = 3
group = self.so[n]
metric = self.metrics[3]['canonical']
theta = gs.pi / 5.
rot_vec_base_point = theta / gs.sqrt(3.) * gs.array([1., 1., 1.])
# Note: the rotation vector for the reference point
# needs to be regularized.
# The Logarithm of a point at itself gives 0.
rot_vec_1 = rot_vec_base_point
expected_1 = gs.array([0, 0, 0])
log_1 = metric.log(base_point=rot_vec_base_point,
point=rot_vec_1)
self.assertTrue(gs.allclose(log_1, expected_1))
# General case: this is the inverse test of test 1 for riemannian exp
expected_2 = gs.pi / 4 * gs.array([1, 0, 0])
phi = (gs.pi / 10) / (gs.tan(gs.pi / 10))
skew = gs.array([[0., -1., 1.],
[1., 0., -1.],
[-1., 1., 0.]])
order=order)
expected = quaternion
self.assertTrue(gs.allclose(result, expected, atol=1e-5),
' for {} Tait-Bryan angles with order {}\n'
'for point {}:\n'
' result = {};'
' expected = {}.'.format(
extrinsic_or_intrinsic,
order,
angle_type,
result,
expected))
point = gs.pi / (6. * gs.sqrt(3.)) * gs.array([1., 1., 1.])
quaternion = self.group.quaternion_from_rotation_vector(point)
tait_bryan_angles = self.group.tait_bryan_angles_from_quaternion(
quaternion,
extrinsic_or_intrinsic=extrinsic_or_intrinsic,
order=order)
result = self.group.quaternion_from_tait_bryan_angles(
tait_bryan_angles,
extrinsic_or_intrinsic=extrinsic_or_intrinsic,
order=order)
expected = quaternion
self.assertTrue(gs.allclose(result, expected),
' for {} Tait-Bryan angles with order {}\n'
'for point {}:\n'
def fibonnaci_points(self, n_points=16000):
"""Spherical Fibonacci point sets yield nearly uniform point
distributions on the unit sphere."""
x_vals = []
y_vals = []
z_vals = []
offset = 2. / n_points
increment = gs.pi * (3. - gs.sqrt(5.))
for i in range(n_points):
y = ((i * offset) - 1) + (offset / 2)
r = gs.sqrt(1 - pow(y, 2))
phi = ((i + 1) % n_points) * increment
x = gs.cos(phi) * r
z = gs.sin(phi) * r
x_vals.append(x)
y_vals.append(y)
z_vals.append(z)
x_vals = [(self.radius * i) for i in x_vals]
y_vals = [(self.radius * i) for i in y_vals]
z_vals = [(self.radius * i) for i in z_vals]
return gs.array([x_vals, y_vals, z_vals])
Parameters
----------
tangent_vec : array-like, shape=[..., n_sampling_points, ambient_dim]
Tangent vector to discrete curve.
base_curve : array-like, shape=[..., n_sampling_points, ambient_dim]
Point representing a discrete curve.
Returns
-------
norm : array-like, shape=[..., n_sampling_points]
Point-wise norms.
"""
sq_norm = self.pointwise_inner_product(
tangent_vec_a=tangent_vec, tangent_vec_b=tangent_vec,
base_curve=base_curve)
return gs.sqrt(sq_norm)
def symmetric_matrix_from_vector(self, vec):
"""
Convert a vector into a symmetric matrix.
"""
vec = gs.to_ndarray(vec, to_ndim=2)
_, vec_dim = vec.shape
mat_dim = int((gs.sqrt(8 * vec_dim + 1) - 1) / 2)
mat = gs.zeros((mat_dim,) * 2)
lower_triangle_indices = gs.tril_indices(mat_dim)
diag_indices = gs.diag_indices(mat_dim)
mat[lower_triangle_indices] = 2 * vec
mat[diag_indices] = vec
mat = make_symmetric(mat)
return mat
vec)
cross_prod_3 = gs.einsum(
'nijk,ni,nj->nk',
levi_civita_symbol,
basis_vec_3,
vec)
cross_prod_1 = gs.to_ndarray(cross_prod_1, to_ndim=3, axis=1)
cross_prod_2 = gs.to_ndarray(cross_prod_2, to_ndim=3, axis=1)
cross_prod_3 = gs.to_ndarray(cross_prod_3, to_ndim=3, axis=1)
skew_mat = gs.concatenate(
[cross_prod_1, cross_prod_2, cross_prod_3], axis=1)
else: # SO(n)
mat_dim = gs.cast(
((1. + gs.sqrt(1. + 8. * vec_dim)) / 2.), gs.int32)
skew_mat = gs.zeros((n_vecs,) + (self.n,) * 2)
upper_triangle_indices = gs.triu_indices(mat_dim, k=1)
for i in range(n_vecs):
skew_mat[i][upper_triangle_indices] = vec[i]
skew_mat[i] = skew_mat[i] - gs.transpose(skew_mat[i])
return skew_mat
base_point = gs.to_ndarray(base_point, to_ndim=3)
tangent_vec = gs.to_ndarray(tangent_vec, to_ndim=3)
n_sampling_points = base_point.shape[1]
base_curve_srv = self.square_root_velocity(base_point)
tangent_vec_derivative = (n_sampling_points - 1) * (
tangent_vec[:, 1:, :] - tangent_vec[:, :-1, :])
base_curve_velocity = (n_sampling_points - 1) * (
base_point[:, 1:, :] - base_point[:, :-1, :])
base_curve_velocity_norm = self.pointwise_norm(
base_curve_velocity, base_point[:, :-1, :])
inner_prod = self.pointwise_inner_product(
tangent_vec_derivative, base_curve_velocity, base_point[:, :-1, :])
coef_1 = 1 / gs.sqrt(base_curve_velocity_norm)
coef_2 = -1 / (2 * base_curve_velocity_norm**(5 / 2)) * inner_prod
term_1 = gs.einsum('ij,ijk->ijk', coef_1, tangent_vec_derivative)
term_2 = gs.einsum('ij,ijk->ijk', coef_2, base_curve_velocity)
srv_initial_derivative = term_1 + term_2
end_curve_srv = self.l2_metric(n_sampling_points - 1).exp(
tangent_vec=srv_initial_derivative, base_point=base_curve_srv)
end_curve_starting_point = self.ambient_metric.exp(
tangent_vec=tangent_vec[:, 0, :], base_point=base_point[:, 0, :])
end_curve = self.square_root_velocity_inverse(
end_curve_srv, end_curve_starting_point)
return end_curve
weighted_pdf : array-like, shape=[n_precision, n_gaussians,]
Probability density function computed for each point of
the mesh data, for each component of the GMM.
"""
distance_to_mean = metric.dist_broadcast(mesh_data, means)
variances_units = gs.expand_dims(variances, 0)
variances_units = gs.repeat(
variances_units, distance_to_mean.shape[0], axis=0)
distribution_normal = gs.exp(
-(distance_to_mean ** 2) / (2 * variances_units ** 2))
zeta_sigma = PI_2_3 * variances
zeta_sigma = zeta_sigma * gs.exp(
(variances ** 2 / 2) * gs.erf(variances / gs.sqrt(2)))
result_num = gs.expand_dims(mixture_coefficients, 0)
result_num = gs.repeat(
result_num, len(distribution_normal), axis=0)
result_num = result_num * distribution_normal
result_denum = gs.expand_dims(zeta_sigma, 0)
result_denum = gs.repeat(
result_denum, len(distribution_normal), axis=0)
weighted_pdf = result_num / result_denum
return weighted_pdf