Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import os
import platform
from .generic import MeshScript
from ..constants import log
from distutils.spawn import find_executable
_search_path = os.environ['PATH']
if platform.system() == 'Windows':
# split existing path by delimiter
_search_path = [i for i in _search_path.split(';') if len(i) > 0]
_search_path.append(r'C:\Program Files')
_search_path.append(r'C:\Program Files (x86)')
_search_path = ';'.join(_search_path)
log.debug('searching for vhacd in: %s', _search_path)
_vhacd_executable = None
for _name in ['vhacd', 'testVHACD']:
_vhacd_executable = find_executable(_name, path=_search_path)
if _vhacd_executable is not None:
break
exists = _vhacd_executable is not None
def convex_decomposition(mesh, **kwargs):
"""
Run VHACD to generate an approximate convex decomposition
of a single mesh.
source='world'):
# collect all the transform on the path
transforms = graph.multigraph_collect(G=g,
traversal=path,
attrib='matrix')
# combine them into a single transform
if len(transforms) == 1:
transform = transforms[0]
else:
transform = util.multi_dot(transforms)
# the last element of the path should be the geometry
last = path[-1][0]
# if someone included an undefined component, skip it
if last not in id_name:
log.debug('id {} included but not defined!'.format(last))
continue
# frame names unique
name = id_name[last] + util.unique_id()
# index in meshes
geom = id_name[last]
# collect parents if we want to combine later
if len(path) > 2:
parent = path[-2][0]
parents[parent].add(last)
graph_args.append({'frame_from': 'world',
'frame_to': name,
'matrix': transform,
'geometry': geom})
meshes[mesh_id]['metadata']['units'] = 'inches'
meshes[mesh_id]['metadata']['name'] = path_name[-1]
meshes[mesh_id]['metadata']['paths'] = np.array(path_str)
meshes[mesh_id]['metadata']['quantity'] = len(transforms_all)
meshes[mesh_id]['metadata'][
'transforms'] = np.array(transforms_all)
except:
log.error(
'STEP load processing error, aborting metadata!',
exc_info=True)
return meshes.values()
if _STEP_FACETER is None:
log.debug('STEP loading unavailable!')
_step_loaders = {}
else:
_step_loaders = {'step': load_step,
'stp': load_step}
edges = faces_to_edges(pair)
overlap = group_rows(np.sort(edges, axis=1),
require_count=2)
if len(overlap) == 0:
# only happens on non-watertight meshes
continue
edge_pair = edges[overlap[0]]
if edge_pair[0][0] == edge_pair[1][0]:
# if the edges aren't reversed, invert the order of one face
flipped += 1
faces[face_pair[1]] = faces[face_pair[1]][::-1]
if flipped > 0:
mesh.faces = faces
log.debug('flipped %d/%d edges', flipped, len(mesh.faces) * 3)
if util.is_file(file_obj):
file_obj.close()
if not isinstance(results, list):
results = [results]
loaded = []
for result in results:
kwargs.update(result)
loaded.append(load_kwargs(kwargs))
loaded[-1].metadata.update(metadata)
if len(loaded) == 1:
loaded = loaded[0]
# show the repr for loaded
log.debug('loaded {} using {}'.format(
str(loaded),
mesh_loaders[file_type].__name__))
finally:
# if we failed to load close file
if opened:
file_obj.close()
return loaded
"""
Verify that the cached values are still for the same
value of id_function and delete all stored items if
the value of id_function has changed.
"""
# if we are in a lock don't check anything
if self._lock != 0:
return
# check the hash of our data
id_new = self._id_function()
# things changed
if id_new != self.id_current:
if len(self.cache) > 0:
log.debug('%d items cleared from cache: %s',
len(self.cache),
str(list(self.cache.keys())))
# hash changed, so dump the cache
# do it manually rather than calling clear()
# as we are internal logic and can avoid function calls
self.cache = {}
# set the id to the new data hash
self.id_current = id_new
try:
convert_text(dict(chunk_raw))
except BaseException:
log.warning('failed to load text entity!',
exc_info=True)
# if the entity contains all relevant data we can
# cleanly load it from inside a single function
elif entity_type in loaders:
# the chunker converts an (n,2) list into a dict
chunker, loader = loaders[entity_type]
# convert data to dict
entity_data = chunker(chunk)
# append data to the lists we're collecting
loader(entity_data)
else:
log.debug('Entity type %s not supported',
entity_type)
# stack vertices into single array
vertices = util.vstack_empty(vertices).astype(np.float64)
# return result as kwargs for trimesh.path.Path2D constructor
result = {'vertices': vertices,
'entities': np.array(entities),
'metadata': metadata}
return result
def apply_transform(self, matrix):
"""
Apply a transform to the current primitive by
setting self.transform
Parameters
------------
matrix: (4,4) float
Homogeneous transformation
"""
matrix = np.asanyarray(matrix, order='C', dtype=np.float64)
if matrix.shape != (4, 4):
raise ValueError('Transformation matrix must be (4,4)!')
if util.allclose(matrix, np.eye(4), 1e-8):
log.debug('apply_transform received identity matrix')
return
new_transform = np.dot(matrix, self.primitive.transform)
self.primitive.transform = new_transform
return self
angle = segment / R
if (angle > tol.seg_angle).any():
if verbose:
log.debug('circle fit error: angle %s', str(angle))
return None
if final and (angle > tol.seg_angle_min).sum() < 3:
log.debug('final: angle %s', str(angle))
return None
# check segment length as a fraction of drawing scale
scaled = segment / scale
if (scaled > tol.seg_frac).any():
if verbose:
log.debug('circle fit error: segment %s', str(scaled))
return None
# check to make sure the line segments on the ends are actually
# tangent with the candidate circle fit
mid_pt = points[[0, -2]] + (vectors[[0, -1]] * .5)
radial = util.unitize(mid_pt - C)
ends = util.unitize(vectors[[0, -1]])
tangent = np.abs(np.arccos(util.diagonal_dot(radial, ends)))
tangent = np.abs(tangent - np.pi / 2).max()
if tangent > tol.tangent:
if verbose:
log.debug('circle fit error: tangent %f',
np.degrees(tangent))
return None