Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# the (c,3) int set of vertex indices
faces = faces[face_index]
# the (c, 3, 3) float set of points in the triangles
triangles = vertices[faces]
# the 3 midpoints of each triangle edge
# stacked to a (3 * c, 3) float
mid = np.vstack([triangles[:, g, :].mean(axis=1)
for g in [[0, 1],
[1, 2],
[2, 0]]])
# for adjacent faces we are going to be generating
# the same midpoint twice so merge them here
mid_idx = (np.arange(len(face_index) * 3)).reshape((3, -1)).T
unique, inverse = grouping.unique_rows(mid)
mid = mid[unique]
mid_idx = inverse[mid_idx] + len(vertices)
# the new faces with correct winding
f = np.column_stack([faces[:, 0],
mid_idx[:, 0],
mid_idx[:, 2],
mid_idx[:, 0],
faces[:, 1],
mid_idx[:, 1],
mid_idx[:, 2],
mid_idx[:, 1],
faces[:, 2],
mid_idx[:, 0],
mid_idx[:, 1],
mid_idx[:, 2]]).reshape((-1, 3))
locations : (h, 3) float
[optional] Position of intersection in space
"""
(index_tri,
index_ray,
locations) = ray_triangle_id(
triangles=self.mesh.triangles,
ray_origins=ray_origins,
ray_directions=ray_directions,
tree=self.mesh.triangles_tree,
multiple_hits=multiple_hits,
triangles_normal=self.mesh.face_normals)
if return_locations:
if len(index_tri) == 0:
return index_tri, index_ray, locations
unique = grouping.unique_rows(
np.column_stack((locations, index_ray)))[0]
return index_tri[unique], index_ray[unique], locations[unique]
return index_tri, index_ray
Returns
------------
color: (4,) uint8, most common color
"""
if self.kind is None:
return DEFAULT_COLOR
elif self.kind == 'face':
colors = self.face_colors
elif self.kind == 'vertex':
colors = self.vertex_colors
else:
raise ValueError('color kind incorrect!')
# find the unique colors
unique, inverse = grouping.unique_rows(colors)
# the most commonly occurring color, or mode
# this will be an index of inverse, not colors
mode_index = np.bincount(inverse).argmax()
color = colors[unique[mode_index]]
return color
def add_boundary(boundary, start):
# coords is an (n, 2) ordered list of points on the polygon boundary
# the first and last points are the same, and there are no
# guarantees on points not being duplicated (which will
# later cause meshpy/triangle to shit a brick)
coords = np.array(boundary.coords)
# find indices points which occur only once, and sort them
# to maintain order
unique = np.sort(grouping.unique_rows(coords)[0])
cleaned = coords[unique]
vertices.append(cleaned)
facets.append(round_trip(start, len(cleaned)))
# holes require points inside the region of the hole, which we find
# by creating a polygon from the cleaned boundary region, and then
# using a representative point. You could do things like take the mean of
# the points, but this is more robust (to things like concavity), if
# slower.
test = Polygon(cleaned)
holes.append(np.array(test.representative_point().coords)[0])
return len(cleaned)
def components_csgraph():
"""
Find connected components using scipy.sparse.csgraph
"""
# label each node
labels = connected_component_labels(edges,
node_count=node_count)
# we have to remove results that contain nodes outside
# of the specified node set and reindex
contained = np.zeros(node_count, dtype=np.bool)
contained[nodes] = True
index = np.arange(node_count, dtype=np.int64)[contained]
components = grouping.group(labels[contained], min_len=min_len)
components = np.array([index[c] for c in components])
return components
vertex_path[0])])
ccw_direction = (ccw_check * 2) - 1
# make sure vertex path is correct type
vertex_path = np.asanyarray(vertex_path, dtype=np.int64)
# we will be saving entity indexes
entity_path = []
# loop through pairs of vertices
for i in np.arange(len(vertex_path) + 1):
# get two wrapped vertex positions
vertex_path_pos = np.mod(np.arange(2) + i, len(vertex_path))
vertex_index = vertex_path[vertex_path_pos]
entity_index = graph.get_edge_data(*vertex_index)['entity_index']
entity_path.append(entity_index)
# remove duplicate entities and order CCW
entity_path = grouping.unique_ordered(entity_path)[::ccw_direction]
# check to make sure there is more than one entity
if len(entity_path) == 1:
# apply CCW reverse in place if necessary
if ccw_direction < 0:
index = entity_path[0]
entities[index].reverse()
return entity_path
# traverse the entity path and reverse entities in place to
# align with this path ordering
round_trip = np.append(entity_path, entity_path[0])
round_trip = zip(round_trip[:-1], round_trip[1:])
for ea, eb in round_trip:
da, db = edge_direction(entities[ea].end_points,
entities[eb].end_points)
if da is not None:
"""
lines = np.asanyarray(lines, dtype=np.float64)
if util.is_shape(lines, (-1, (2, 3))):
# the case where we have a list of points
# we are going to assume they are connected
result = {'entities': np.array([Line(np.arange(len(lines)))]),
'vertices': lines}
return result
elif util.is_shape(lines, (-1, 2, (2, 3))):
# case where we have line segments in 2D or 3D
dimension = lines.shape[-1]
# convert lines to even number of (n, dimension) points
lines = lines.reshape((-1, dimension))
# merge duplicate vertices
unique, inverse = grouping.unique_rows(lines)
# use scipy edges_to_path to skip creating
# a bajillion individual line entities which
# will be super slow vs. fewer polyline entities
return edges_to_path(edges=inverse.reshape((-1, 2)),
vertices=lines[unique])
else:
raise ValueError('Lines must be (n,(2|3)) or (n,2,(2|3))')
return result
Parameters
------------
segments : (n, 2, (2|3)) float
Line segments in space
digits : int
How many digits to consider when merging vertices
Returns
-----------
unique : (m, 2, (2|3)) float
Segments with duplicates merged
"""
segments = np.asanyarray(segments, dtype=np.float64)
# find segments as unique indexes so we can find duplicates
inverse = grouping.unique_rows(
segments.reshape((-1, segments.shape[2])),
digits=digits)[1].reshape((-1, 2))
# make sure rows are sorted
inverse.sort(axis=1)
# remove segments where both indexes are the same
mask = np.zeros(len(segments), dtype=np.bool)
# only include the first occurance of a segment
mask[grouping.unique_rows(inverse)[0]] = True
# remove segments that are zero-length
mask[inverse[:, 0] == inverse[:, 1]] = False
# apply the unique mask
unique = segments[mask]
return unique
# preallocate transforms and geometries
nodes = self.graph.nodes_geometry
transforms = np.zeros((len(nodes), 4, 4))
geometries = [None] * len(nodes)
# collect list of transforms
for i, node in enumerate(nodes):
transforms[i], geometries[i] = self.graph[node]
# result is a copy
result = self.copy()
# remove all existing transforms
result.graph.clear()
for group in grouping.group(geometries):
# hashable reference to self.geometry
geometry = geometries[group[0]]
# original transform from world to geometry
original = transforms[group[0]]
# transform for geometry
new_geom = np.dot(scale_3D, original)
if result.geometry[geometry].vertices.shape[1] == 2:
# if our scene is 2D only scale in 2D
result.geometry[geometry].apply_transform(scale_2D)
else:
# otherwise apply the full transform
result.geometry[geometry].apply_transform(new_geom)
for node, T in zip(self.graph.nodes_geometry[group],
transforms[group]):
def remove_duplicate_faces(self):
"""
On the current mesh remove any faces which are duplicates.
Alters
----------
self.faces : removes duplicates
"""
unique, inverse = grouping.unique_rows(np.sort(self.faces, axis=1))
self.update_faces(unique)