Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# get the same mesh sudivided so every edge is shorter
# than a factor of our pitch
v, f = remesh.subdivide_to_size(mesh.vertices,
mesh.faces,
max_edge=max_edge,
max_iter=max_iter)
# convert the vertices to their voxel grid position
hit = v / pitch
# Provided edge_factor > 1 and max_iter is large enough, this is
# sufficient to preserve 6-connectivity at the level of voxels.
hit = np.round(hit).astype(int)
# remove duplicates
unique, inverse = grouping.unique_rows(hit)
# get the voxel centers in model space
occupied_index = hit[unique]
origin_index = occupied_index.min(axis=0)
origin_position = origin_index * pitch
voxels_sparse = (occupied_index - origin_index)
return voxels_sparse, origin_position
def merge_vertices(self):
"""
Merge vertices closer than tol.merge (default: 1e-8)
"""
# run unique rows
unique, inverse = grouping.unique_rows(self.vertices)
# apply unique mask to vertices
self.vertices = self.vertices[unique]
# apply unique mask to colors
if (self.colors is not None and
len(self.colors) == len(inverse)):
self.colors = self.colors[unique]
def edges_unique(self):
"""
The unique edges of the mesh.
Returns
----------
edges_unique : (n, 2) int
Vertex indices for unique edges
"""
unique, inverse = grouping.unique_rows(self.edges_sorted)
edges_unique = self.edges_sorted[unique]
# edges_unique will be added automatically by the decorator
# additional terms generated need to be added to the cache manually
self._cache['edges_unique_idx'] = unique
self._cache['edges_unique_inverse'] = inverse
return edges_unique
def remove_duplicate_entities(self):
"""
Remove entities that are duplicated
Alters
-------
self.entities: length same or shorter
"""
entity_hashes = np.array([hash(i) for i in self.entities])
unique, inverse = grouping.unique_rows(entity_hashes)
if len(unique) != len(self.entities):
self.entities = self.entities[unique]