Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Set requiring lib install names to point to local copy
for requiring, orig_install_name in requirings.items():
set_install_name(requiring,
orig_install_name,
'@loader_path/' + basename(required))
# Make processed version of ``dependings_dict``
mapped_requiring = copied2orig.get(requiring, requiring)
procd_requirings[mapped_requiring] = orig_install_name
if required in copied_libs:
# Have copied this already, add any new requirings
copied_libs[required].update(procd_requirings)
continue
# Haven't see this one before, add entry to copied_libs
out_path = pjoin(lib_path, basename(required))
if exists(out_path):
raise DelocationError(out_path + ' already exists')
shutil.copy(required, lib_path)
copied2orig[out_path] = required
copied_libs[required] = procd_requirings
copied_libs = delocate_path(package_path, lib_path,
lib_filt_func, copy_filt_func)
if copied_libs and lib_path_exists:
raise DelocationError(
'{0} already exists in wheel but need to copy '
'{1}'.format(lib_path, '; '.join(copied_libs)))
if len(os.listdir(lib_path)) == 0:
shutil.rmtree(lib_path)
# Check architectures
if require_archs is not None:
stop_fast = not check_verbose
bads = check_archs(copied_libs, require_archs, stop_fast)
if len(bads) != 0:
if check_verbose:
print(bads_report(bads, pjoin(tmpdir, 'wheel')))
raise DelocationError(
"Some missing architectures in wheel")
# Change install ids to be unique within Python space
install_id_root = (DLC_PREFIX +
relpath(package_path, wheel_dir) +
'/')
for lib in copied_libs:
lib_base = basename(lib)
copied_path = pjoin(lib_path, lib_base)
set_install_id(copied_path, install_id_root + lib_base)
validate_signature(copied_path)
_merge_lib_dict(all_copied, copied_libs)
if len(all_copied):
rewrite_record(wheel_dir)
if len(all_copied) or not in_place:
dir2zip(wheel_dir, out_wheel)
return stripped_lib_dict(all_copied, wheel_dir + os.path.sep)
if out_wheel is None:
out_wheel = in_wheel
else:
out_wheel = abspath(out_wheel)
in_place = in_wheel == out_wheel
with TemporaryDirectory() as tmpdir:
all_copied = {}
wheel_dir = realpath(pjoin(tmpdir, 'wheel'))
zip2dir(in_wheel, wheel_dir)
for package_path in find_package_dirs(wheel_dir):
lib_path = pjoin(package_path, lib_sdir)
lib_path_exists = exists(lib_path)
copied_libs = delocate_path(package_path, lib_path,
lib_filt_func, copy_filt_func)
if copied_libs and lib_path_exists:
raise DelocationError(
'{0} already exists in wheel but need to copy '
'{1}'.format(lib_path, '; '.join(copied_libs)))
if len(os.listdir(lib_path)) == 0:
shutil.rmtree(lib_path)
# Check architectures
if require_archs is not None:
stop_fast = not check_verbose
bads = check_archs(copied_libs, require_archs, stop_fast)
if len(bads) != 0:
if check_verbose:
print(bads_report(bads, pjoin(tmpdir, 'wheel')))
raise DelocationError(
"Some missing architectures in wheel")
# Change install ids to be unique within Python space
install_id_root = (DLC_PREFIX +
relpath(package_path, wheel_dir) +
delocated_libs = set()
copied_basenames = set()
rp_root_path = realpath(root_path)
rp_lib_path = realpath(lib_path)
# Test for errors first to avoid getting half-way through changing the tree
for required, requirings in lib_dict.items():
if required.startswith('@'): # assume @rpath etc are correct
# But warn, because likely they are not
warnings.warn('Not processing required path {0} because it '
'begins with @'.format(required))
continue
r_ed_base = basename(required)
if relpath(required, rp_root_path).startswith('..'):
# Not local, plan to copy
if r_ed_base in copied_basenames:
raise DelocationError('Already planning to copy library with '
'same basename as: ' + r_ed_base)
if not exists(required):
raise DelocationError('library "{0}" does not exist'.format(
required))
copied_libs[required] = requirings
copied_basenames.add(r_ed_base)
else: # Is local, plan to set relative loader_path
delocated_libs.add(required)
# Modify in place now that we've checked for errors
for required in copied_libs:
shutil.copy(required, lib_path)
# Set rpath and install names for this copied library
for requiring, orig_install_name in lib_dict[required].items():
req_rel = relpath(rp_lib_path, dirname(requiring))
set_install_name(requiring, orig_install_name,
'@loader_path/{0}/{1}'.format(
rp_lib_path = realpath(lib_path)
# Test for errors first to avoid getting half-way through changing the tree
for required, requirings in lib_dict.items():
if required.startswith('@'): # assume @rpath etc are correct
# But warn, because likely they are not
warnings.warn('Not processing required path {0} because it '
'begins with @'.format(required))
continue
r_ed_base = basename(required)
if relpath(required, rp_root_path).startswith('..'):
# Not local, plan to copy
if r_ed_base in copied_basenames:
raise DelocationError('Already planning to copy library with '
'same basename as: ' + r_ed_base)
if not exists(required):
raise DelocationError('library "{0}" does not exist'.format(
required))
copied_libs[required] = requirings
copied_basenames.add(r_ed_base)
else: # Is local, plan to set relative loader_path
delocated_libs.add(required)
# Modify in place now that we've checked for errors
for required in copied_libs:
shutil.copy(required, lib_path)
# Set rpath and install names for this copied library
for requiring, orig_install_name in lib_dict[required].items():
req_rel = relpath(rp_lib_path, dirname(requiring))
set_install_name(requiring, orig_install_name,
'@loader_path/{0}/{1}'.format(
req_rel, basename(required)))
for required in delocated_libs:
# Set relative path for local library