Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_rebuild_rootfs_simple(self):
test_dir = self.extract_test_tar('rootfs/hello-world.tar')
img = list(image.Image.get_images_from_dir(test_dir))[0]
target_dir = self.get_temp_dir()
rebuild_rootfs(img, target_dir)
results = sorted([p.replace(target_dir, '')
for p in fileutils.resource_iter(target_dir)])
expected = ['/hello']
assert expected == results
not_ignored = []
for d in dirs:
p = os.path.join(top, d)
ign = ignore.is_ignored(p, ignore.default_ignores, {})
tp = fileutils.as_posixpath(p.replace(test_dir, ''))
result.append((tp, ign,))
if not ign:
not_ignored.append(d)
# skip ignored things
dirs[:] = not_ignored
for f in files:
p = os.path.join(top, f)
ign = ignore.is_ignored(p, ignore.default_ignores, {})
tp = fileutils.as_posixpath(p.replace(test_dir, ''))
result.append((tp, ign,))
expected = [
('/vcs', False),
('/vcs/.bzr', True),
('/vcs/.git', True),
('/vcs/.hg', True),
('/vcs/.repo', True),
('/vcs/.svn', True),
('/vcs/CVS', True),
('/vcs/_darcs', True),
('/vcs/_MTN', True),
('/vcs/.bzrignore', True),
('/vcs/.cvsignore', True),
('/vcs/.gitignore', True),
('/vcs/.hgignore', True),
def remove_backslashes_and_dotdots(directory):
"""
Walk a directory and rename the files if their names contain backslashes.
Return a list of errors if any.
"""
if on_linux:
directory = path_to_bytes(directory)
errors = []
for top, _, files in os.walk(directory):
for filename in files:
if not (WIN_PATH_SEP in filename or DOTDOT in filename):
continue
try:
new_path = fileutils.as_posixpath(filename)
new_path = new_path.strip(POSIX_PATH_SEP)
new_path = posixpath.normpath(new_path)
new_path = new_path.replace(DOTDOT, POSIX_PATH_SEP)
new_path = new_path.strip(POSIX_PATH_SEP)
new_path = posixpath.normpath(new_path)
segments = new_path.split(POSIX_PATH_SEP)
directory = os.path.join(top, *segments[:-1])
fileutils.create_dir(directory)
shutil.move(os.path.join(top, filename), os.path.join(top, *segments))
except Exception:
errors.append(os.path.join(top, filename))
return errors
else:
file_name = fileutils.file_name(location)
parsers = {
'setup.py': parse_setup_py,
'metadata.json': parse_metadata,
'PKG-INFO': parse_pkg_info,
'.whl': parse_wheel,
'.egg': parse_egg_binary,
'.tar.gz': parse_source_distribution,
'.zip': parse_source_distribution,
}
for name, parser in parsers.items():
if file_name.endswith(name):
package = parser(location)
if package:
parent_directory = fileutils.parent_directory(location)
parse_dependencies(parent_directory, package)
return package
"""
Return an string built from a list of `scanned_files` results and
the provided `template` identifier. The template defaults to the standard HTML
template format or can point to the path of a custom template file.
"""
# FIXME: This code is highly coupled with actual scans and may not
# support adding new scans at all
from licensedcode.cache import get_licenses_db
# FIXME: factor out the html vs custom from this function: we should get a template path
if template == 'html':
template = get_template(get_template_dir('html'))
else:
# load a custom template
tpath = fileutils.as_posixpath(os.path.abspath(os.path.expanduser(template)))
assert os.path.isfile(tpath)
tdir = fileutils.parent_directory(tpath)
tfile = fileutils.file_name(tpath)
template = get_template(tdir, tfile)
converted = OrderedDict()
converted_infos = OrderedDict()
converted_packages = OrderedDict()
licenses = {}
LICENSES = 'licenses'
COPYRIGHTS = 'copyrights'
PACKAGES = 'packages'
URLS = 'urls'
EMAILS = 'emails'
def is_node_modules(location):
return (filetype.is_dir(location)
and fileutils.file_name(location).lower() == 'node_modules')
warnings and errors contains warnings and errors if any. The `done` flag
is True.
If `recurse` is True, extract recursively archives nested inside other
archives If `recurse` is false, then do not extract further an already
extracted archive identified by the corresponding extract suffix location.
Note that while the original file system is walked top-down, breadth-first,
if recurse and a nested archive is found, it is extracted to full depth
first before resuming the file system walk.
"""
ignored = partial(ignore.is_ignored, ignores=ignore.default_ignores, unignores={})
if TRACE:
logger.debug('extract:start: %(location)r recurse: %(recurse)r\n' % locals())
abs_location = abspath(expanduser(location))
for top, dirs, files in fileutils.walk(abs_location, ignored):
if TRACE:
logger.debug('extract:walk: top: %(top)r dirs: %(dirs)r files: r(files)r' % locals())
if not recurse:
if TRACE:
drs = set(dirs)
for d in dirs[:]:
if extractcode.is_extraction_path(d):
dirs.remove(d)
if TRACE:
logger.debug('extract:walk: not recurse: removed dirs:' + repr(drs.symmetric_difference(set(dirs))))
for f in files:
loc = join(top, f)
if not recurse and extractcode.is_extraction_path(loc):
if TRACE:
logger.debug('extract:walk not recurse: skipped file: %(loc)r' % locals())