Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if subslot:
restrictions.append(restricts.SubSlotDep(subslot))
tsplit = text.rsplit("/", 1)
if len(tsplit) == 1:
ops, text = collect_ops(text)
if not ops:
if "*" in text:
r = convert_glob(text)
if r is None:
restrictions.append(packages.AlwaysTrue)
else:
restrictions.append(packages.PackageRestriction("package", r))
if len(restrictions) == 1:
return restrictions[0]
return packages.AndRestriction(*restrictions)
elif text.startswith("*"):
raise ParseError(
"cannot do prefix glob matches with version ops: %s" % (
orig_text,))
# ok... fake category. whee.
try:
r = list(collect_package_restrictions(
atom.atom("%scategory/%s" % (ops, text)).restrictions,
attrs=("category",), invert=True))
except errors.MalformedAtom as e:
e.atom = orig_text
raise_from(ParseError(str(e)))
if not restrictions and len(r) == 1:
return r[0]
restrictions.extend(r)
return packages.AndRestriction(*restrictions)
else:
restrictions.append(restricts.SubSlotDep(subslot))
tsplit = text.rsplit("/", 1)
if len(tsplit) == 1:
ops, text = collect_ops(text)
if not ops:
if "*" in text:
r = convert_glob(text)
if r is None:
restrictions.append(packages.AlwaysTrue)
else:
restrictions.append(packages.PackageRestriction("package", r))
if len(restrictions) == 1:
return restrictions[0]
return packages.AndRestriction(*restrictions)
elif text.startswith("*"):
raise ParseError(
f"cannot do prefix glob matches with version ops: {orig_text}")
# ok... fake category. whee.
try:
r = list(collect_package_restrictions(
atom.atom(f"{ops}category/{text}").restrictions,
attrs=("category",), invert=True))
except errors.MalformedAtom as e:
e.atom = orig_text
raise ParseError(str(e)) from e
if not restrictions and len(r) == 1:
return r[0]
restrictions.extend(r)
return packages.AndRestriction(*restrictions)
elif text[0] in atom.valid_ops or '*' not in text:
def _internal_format_depends(out, node, func):
prefix = None
if isinstance(node, boolean.OrRestriction):
prefix = '|| ('
children = node.restrictions
elif (isinstance(node, boolean.AndRestriction) and not
isinstance(node, atom.atom)):
prefix = '('
children = node.restrictions
elif isinstance(node, packages.Conditional):
assert len(node.restriction.vals) == 1
prefix = '%s%s? (' % (node.restriction.negate and '!' or '',
list(node.restriction.vals)[0])
children = node.payload
if prefix:
children = list(children)
if len(children) == 1:
out.write(prefix, ' ', autoline=False)
out.first_prefix.append(' ')
newline = _internal_format_depends(out, children[0], func)
out.first_prefix.pop()
if newline:
out.write()
out.write(')', autoline=False)
return True
else:
return i
def index(self, frame, start=0, stop=None):
i = self
if start != 0 or stop is not None:
i = slice(i, start, stop)
for idx, x in enumerate(self):
if x == frame:
return idx + start
return -1
class merge_plan:
vdb_restrict = packages.PackageRestriction("repo.livefs", values.EqualityMatch(True))
def __init__(self, dbs, per_repo_strategy, global_strategy=None,
depset_reorder_strategy=None, process_built_depends=False,
drop_cycles=False, debug=False, debug_handle=None):
if debug:
if debug_handle is None:
debug_handle = sys.stdout
self._dprint = partial(dprint, debug_handle)
else:
# don't run debug func when debugging is disabled
self._dprint = lambda *args, **kwargs: None
if not isinstance(dbs, (util.RepositoryGroup, list, tuple)):
dbs = [dbs]
if global_strategy is None:
virtuals = {}
update = False
cache = _read_mtime_cache(pjoin(cache_basedir, 'virtuals.cache'))
existing = _get_mtimes(repo.location)
for cat, mtime in existing.iteritems():
d = cache.pop(cat, None)
if d is not None and long(d[0]) == long(mtime):
d = _convert_cached_virtuals(d)
if d is not None:
_merge_virtuals(virtuals, d)
continue
update = True
_collect_virtuals(virtuals, repo.itermatch(
packages.PackageRestriction("category",
values.StrExactMatch(cat))))
if update or cache:
_write_mtime_cache(existing, virtuals,
pjoin(cache_basedir, 'virtuals.cache'))
defaults = _collect_default_providers(virtuals)
# _finalize_virtuals(virtuals)
return defaults, virtuals
def _add_all_if_needed(namespace, attr):
val = [packages.AlwaysTrue]
for query_attr in _query_items:
if getattr(namespace, f'_{query_attr}', None):
val = None
break
setattr(namespace, attr, val)
def _cmd_api_regen_cache(self, observer=None, threads=1, **kwargs):
cache = getattr(self.repo, 'cache', None)
if not cache and not kwargs.get('force', False):
return
sync_rate = getattr(cache, 'sync_rate', None)
try:
if sync_rate is not None:
cache.set_sync_rate(1000000)
ret = 0
# Force usage of unfiltered repo to include pkgs with metadata issues.
# Matches are collapsed directly to a list to avoid threading issues such
# as EBADF since the repo iterator isn't thread-safe.
pkgs = list(self.repo.itermatch(packages.AlwaysTrue, pkg_filter=None))
observer = self._get_observer(observer)
for pkg, e in regen.regen_repository(
self.repo, pkgs, observer=observer, threads=threads, **kwargs):
observer.error(f'caught exception {e} while processing {pkg.cpvstr}')
ret = 1
# report pkgs with bad metadata -- relies on iterating over the
# unfiltered repo to populate the masked repo
pkgs = frozenset(pkg.cpvstr for pkg in self.repo)
for pkg in sorted(self.repo._bad_masked):
observer.error(f'{pkg.cpvstr}: {pkg.data.msg(verbosity=observer.verbosity)}')
ret = 1
# remove old/invalid cache entries
self._cmd_implementation_clean_cache(pkgs)
def get_data(self, repo, options):
owners = defaultdict(set)
iterable = repo.itermatch(packages.AlwaysTrue, sorter=sorted)
items = {}
for key, subiter in groupby(iterable, attrgetter("key")):
for pkg in subiter:
if not options.include_restricted and 'fetch' in pkg.restrict:
continue
if not options.include_nonmirrored and 'mirror' in pkg.restrict:
continue
for fetchable in iflatten_instance(pkg.fetchables, fetch.fetchable):
owners[fetchable.filename].add(key)
items[fetchable.filename] = fetchable.chksums.get("size", 0)
data = defaultdict(lambda: 0)
for filename, keys in owners.items():
for key in keys:
data[key] += items[filename]
unique = sum(items.values())