Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
:return: :obj:`pkgcore.resolver.plan.merge_plan` instance
"""
if nodeps:
vdbs = list(map(misc.nodeps_repo, vdbs))
dbs = list(map(misc.nodeps_repo, dbs))
elif not verify_vdb:
vdbs = list(map(misc.nodeps_repo, vdbs))
dbs = list(dbs)
if force_replace:
resolver_cls = generate_replace_resolver_kls(resolver_cls)
return resolver_cls(vdbs + dbs, plan.pkg_sort_highest,
plan.merge_plan.prefer_reuse_strategy, **kwds)
_vdb_restrict = packages.OrRestriction(
packages.PackageRestriction("repo.livefs", values.EqualityMatch(False)),
packages.AndRestriction(
packages.PackageRestriction(
"category", values.StrExactMatch("virtual")),
packages.PackageRestriction(
"package_is_real", values.EqualityMatch(False)),
),
)
class empty_tree_merge_plan(plan.merge_plan):
_vdb_restriction = _vdb_restrict
def __init__(self, dbs, *args, **kwds):
"""
def masked(self):
"""Base package mask restriction."""
return packages.OrRestriction(*self.pkg_masks)
def parse_description(value):
"""Value is used as a regexp matching description or longdescription."""
matcher = values.StrRegex(value, case_sensitive=False)
return packages.OrRestriction(*list(
packages.PackageRestriction(attr, matcher)
for attr in ('description', 'longdescription')))
if not repo.operations.supports("digests"):
digest.error("no repository support for digests")
for target in targets:
if os.path.exists(target):
try:
restrictions.append(repo.path_restrict(target))
except ValueError as e:
digest.error(e)
else:
try:
restrictions.append(parse_match(target))
except ValueError:
digest.error(f"invalid atom: {target!r}")
restriction = packages.OrRestriction(*restrictions)
namespace.restriction = restriction
namespace.repo = repo
def pkg_upgrade(_value, namespace):
pkgs = []
for pkg in namespace.domain.all_installed_repos:
matches = sorted(namespace.domain.all_source_repos.match(pkg.slotted_atom))
if matches and matches[-1] != pkg:
pkgs.append(matches[-1].versioned_atom)
return packages.OrRestriction(*pkgs)
def generate_filter(masks, unmasks, *extra):
# note that we ignore unmasking if masking isn't specified.
# no point, mainly
masking = make_mask_filter(masks, negate=True)
unmasking = make_mask_filter(unmasks, negate=False)
r = ()
if masking:
if unmasking:
r = (packages.OrRestriction(masking, unmasking, disable_inst_caching=True),)
else:
r = (masking,)
return packages.AndRestriction(disable_inst_caching=True, finalize=True, *(r + extra))
def parse_description(value):
"""Value is used as a regexp matching description or longdescription."""
matcher = values.StrRegex(value, case_sensitive=False)
return packages.OrRestriction(finalize=True, *list(
packages.PackageRestriction(attr, matcher)
for attr in ('description', 'longdescription')))
def downgrade_resolver(
vdbs, dbs, verify_vdb=True, nodeps=False, force_replace=False,
resolver_cls=plan.merge_plan, **kwds):
"""
generate and configure a resolver for downgrading all processed nodes.
:param vdbs: list of :obj:`pkgcore.repository.prototype.tree` instances
that represents the livefs
:param dbs: list of :obj:`pkgcore.repository.prototype.tree` instances
representing sources of pkgs
:param verify_vdb: should we stop resolving once we hit the vdb,
or do full resolution?
:return: :obj:`pkgcore.resolver.plan.merge_plan` instance
"""
restrict = packages.OrRestriction(
*list(atom(f'>={x.cpvstr}') for x in chain.from_iterable(vdbs)))
f = partial(plan.merge_plan.prefer_downgrade_version_strategy, restrict)
dbs = list(map(partial(misc.restrict_repo, restrict), dbs))
# hack.
if nodeps:
vdbs = list(map(misc.nodeps_repo, vdbs))
dbs = list(map(misc.nodeps_repo, dbs))
elif not verify_vdb:
vdbs = list(map(misc.nodeps_repo, vdbs))
dbs = list(dbs)
if force_replace:
resolver_cls = generate_replace_resolver_kls(resolver_cls)
return resolver_cls(dbs + vdbs, plan.pkg_sort_highest, f, **kwds)