Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
_search_path = os.environ['PATH']
if platform.system() == 'Windows':
# split existing path by delimiter
_search_path = [i for i in _search_path.split(';') if len(i) > 0]
_search_path.append(r'C:\Program Files\Blender Foundation\Blender')
_search_path.append(r'C:\Program Files (x86)\Blender Foundation\Blender')
_search_path = ';'.join(_search_path)
log.debug('searching for blender in: %s', _search_path)
if platform.system() == 'Darwin':
_search_path = [i for i in _search_path.split(':') if len(i) > 0]
_search_path.append('/Applications/blender.app/Contents/MacOS')
_search_path.append('/Applications/Blender.app/Contents/MacOS')
_search_path.append('/Applications/Blender/blender.app/Contents/MacOS')
_search_path = ':'.join(_search_path)
log.debug('searching for blender in: %s', _search_path)
_blender_executable = find_executable('blender', path=_search_path)
exists = _blender_executable is not None
def boolean(meshes, operation='difference', debug=False):
if not exists:
raise ValueError('No blender available!')
operation = str.upper(operation)
if operation == 'INTERSECTION':
operation = 'INTERSECT'
# get the template from our resources folder
template = resources.get('blender.py.template')
script = template.replace('$OPERATION', operation)
def solid_to_mesh(solid, process=True):
occ_mesh = solid.createMesh()
occ_mesh.optimize()
faces = np.array(list(occ_mesh.triangles)).reshape((-1,3)).astype(int)
vertices = np.array(list(occ_mesh.vertices)).reshape((-1,3)).astype(float)
mesh = Trimesh(vertices=vertices, faces=faces, process=process)
if process and (not mesh.is_watertight):
log.warning('Mesh returned from openCASCADE isn\'t watertight!')
return mesh
tic.append(time.time())
# since we already explored the global space, set the bounds to be
# just around the sample that had the lowest volume
step = 2 * np.pi / sample_count
bounds = [(best[0] - step, best[0] + step),
(best[1] - step, best[1] + step)]
# run the local optimization
r = optimize.minimize(volume_from_angles,
best,
tol=angle_tol,
method='SLSQP',
bounds=bounds)
tic.append(time.time())
log.info('Performed search in %f and minimize in %f', *np.diff(tic))
# actually chunk the information about the cylinder
transform, radius, height = volume_from_angles(r['x'], return_data=True)
result = {'transform': transform,
'radius': radius,
'height': height}
return result
# figure out which triangles are in the cross section,
# and which of the three intersection cases they are in
cases = triangle_cases(signs)
# handlers for each case
handlers = (handle_basic,
handle_on_vertex,
handle_on_edge)
# the (m, 2, 3) line segments
lines = np.vstack([h(signs[c],
mesh.faces[c],
mesh.vertices)
for c, h in zip(cases, handlers)])
log.debug('mesh_cross_section found %i intersections',
len(lines))
if return_faces:
face_index = np.hstack([np.nonzero(c)[0] for c in cases])
return lines, face_index
return lines
file_type: str
What kind of file type do we have (eg: 'stl')
resolver : trimesh.visual.Resolver
Object to load referenced assets like materials and textures
kwargs : dict
Passed to geometry __init__
Returns
---------
geometry : Trimesh, Path2D, Path3D, Scene
Loaded geometry as trimesh classes
"""
# check to see if we're trying to load something
# that is already a native trimesh Geometry subclass
if isinstance(file_obj, Geometry):
log.info('Load called on %s object, returning input',
file_obj.__class__.__name__)
return file_obj
# parse the file arguments into clean loadable form
(file_obj, # file- like object
file_type, # str, what kind of file
metadata, # dict, any metadata from file name
opened, # bool, did we open the file ourselves
resolver # object to load referenced resources
) = parse_file_args(file_obj=file_obj,
file_type=file_type,
resolver=resolver)
try:
if isinstance(file_obj, dict):
# if we've been passed a dict treat it as kwargs
# one is index of texture coordinate (`vt`)
# count how many delimiters are in the first face line
# to see if our second value is texture or normals
count = sample_line.count('/')
if count == columns:
# case where each face line looks like:
# ' 75//139 76//141 77//141'
# which is vertex/nothing/normal
faces_norm = array[:, index + 1]
elif count == int(columns / 2):
# case where each face line looks like:
# '75/139 76/141 77/141'
# which is vertex/texture
faces_tex = array[:, index + 1]
else:
log.warning('face lines are weird: {}'.format(
sample_line))
elif columns == 9:
# if we have three values per vertex
# second value is always texture
faces_tex = array[:, index + 1]
# third value is reference to vertex normal (`vn`)
faces_norm = array[:, index + 2]
return faces, faces_tex, faces_norm
# load the bytes into a PIL image
image = PIL.Image.open(
util.wrap_as_stream(file_data))
# create a texture object
loaded['visual'] = visual.texture.TextureVisuals(
uv=uv, image=image)
except BaseException:
log.debug('failed to load texture: {}'.format(usemtl),
exc_info=True)
# apply the vertex order to the visual object
if 'visual' in loaded:
try:
loaded['visual'].update_vertices(vert_order)
except BaseException:
log.error('failed to update vertices',
exc_info=True)
loaded.pop('visual')
# this mesh is done so append the loaded mesh kwarg dict
meshes.append(loaded)
if ext != '':
raise ValueError('URDF path must be a directory!')
# Create directory if needed
if not os.path.exists(fullpath):
os.mkdir(fullpath)
elif not os.path.isdir(fullpath):
raise ValueError('URDF path must be a directory!')
# Perform a convex decomposition
try:
convex_pieces = convex_decomposition(mesh, **kwargs)
if not isinstance(convex_pieces, list):
convex_pieces = [convex_pieces]
except BaseException:
log.error('problem with convex decomposition, using hull',
exc_info=True)
convex_pieces = [mesh.convex_hull]
# Get the effective density of the mesh
effective_density = mesh.volume / sum([
m.volume for m in convex_pieces])
# open an XML tree
root = et.Element('robot', name='root')
# Loop through all pieces, adding each as a link
prev_link_name = None
for i, piece in enumerate(convex_pieces):
# Save each nearly convex mesh out to a file
piece_name = '{}_convex_piece_{}'.format(name, i)
V1 = util.unitize(points[0] - center)
V2 = util.unitize(np.cross(-N, V1))
t = np.linspace(0, angle, count)
discrete = np.tile(center, (count, 1))
discrete += R * np.cos(t).reshape((-1, 1)) * V1
discrete += R * np.sin(t).reshape((-1, 1)) * V2
# do an in-process check to make sure result endpoints
# match the endpoints of the source arc
if not close:
arc_dist = util.row_norm(points[[0, -1]] - discrete[[0, -1]])
arc_ok = (arc_dist < tol.merge).all()
if not arc_ok:
log.warning(
'failed to discretize arc (endpoint distance %s)',
str(arc_dist))
log.warning('Failed arc points: %s', str(points))
raise ValueError('Arc endpoints diverging!')
discrete = discrete[:, :(3 - is_2D)]
return discrete
def _create_mesh(self):
log.debug('creating mesh for Extrusion primitive')
# extrude the polygon along Z
mesh = creation.extrude_polygon(
polygon=self.primitive.polygon,
height=self.primitive.height,
transform=self.primitive.transform,
triangle_args=self.triangle_args)
# check volume here in unit tests
if tol.strict and mesh.volume < 0.0:
raise ValueError('matrix inverted mesh!')
# cache mesh geometry in the primitive
self._cache['vertices'] = mesh.vertices
self._cache['faces'] = mesh.faces