Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _start_with_fetchlist(self, fetchlist_task):
if self._default_exit(fetchlist_task) != os.EX_OK:
if not self.fetchlist_dict.cancelled():
try:
self.fetchlist_dict.result()
except InvalidDependString as e:
writemsg(
_("!!! %s%s%s: SRC_URI: %s\n") %
(self.cp, _repo_separator, self.repo_config.name, e),
noiselevel=-1)
self._async_wait()
return
self.fetchlist_dict = self.fetchlist_dict.result()
manifest_proc = ManifestProcess(cp=self.cp, distdir=self.distdir,
fetchlist_dict=self.fetchlist_dict, repo_config=self.repo_config,
scheduler=self.scheduler)
self._start_task(manifest_proc, self._manifest_proc_exit)
if not result:
reduced_noise = result.tounicode()
writemsg("\n %s\n" % _("The following REQUIRED_USE flag" + \
" constraints are unsatisfied:"), noiselevel=-1)
writemsg(" %s\n" % reduced_noise,
noiselevel=-1)
normalized_required_use = \
" ".join(pkg._metadata["REQUIRED_USE"].split())
if reduced_noise != normalized_required_use:
writemsg("\n %s\n" % _("The above constraints " + \
"are a subset of the following complete expression:"),
noiselevel=-1)
writemsg(" %s\n" % \
human_readable_required_use(normalized_required_use),
noiselevel=-1)
writemsg("\n", noiselevel=-1)
return 1
return os.EX_OK
for involved_flags in all_involved_flags:
if first:
first = False
else:
msg += ", "
msg += "{"
inner_first = True
for flag, state in involved_flags.items():
if inner_first:
inner_first = False
else:
msg += ", "
msg += flag + ": %s" % (state,)
msg += "}"
msg += "]\n"
writemsg(msg, noiselevel=-1)
required_changes = {}
for id, pkg in enumerate(config):
if not pkg.installed:
#We can't change the USE of installed packages.
for flag in all_involved_flags[id]:
if not pkg.iuse.is_valid_flag(flag):
continue
state = all_involved_flags[id][flag]
self._force_flag_for_package(required_changes, pkg, flag, state)
#Go through all (parent, atom) pairs for the current slot conflict.
for ppkg, atom in all_conflict_atoms_by_slotatom[id]:
if not atom.package:
continue
use = atom.unevaluated_atom.use
locking_method(myfd, fcntl.LOCK_EX | fcntl.LOCK_NB)
# We won the lock, so there isn't competition for it.
# We can safely delete the file.
writemsg(_("Got the lockfile...\n"), 1)
if _fstat_nlink(myfd) == 1:
os.unlink(lockfilename)
writemsg(_("Unlinked lockfile...\n"), 1)
locking_method(myfd, fcntl.LOCK_UN)
else:
writemsg(_("lockfile does not exist '%s'\n") % lockfilename, 1)
_open_fds[myfd].close()
return False
except SystemExit:
raise
except Exception as e:
writemsg(_("Failed to get lock... someone took it.\n"), 1)
writemsg(str(e) + "\n", 1)
# why test lockfilename? because we may have been handed an
# fd originally, and the caller might not like having their
# open fd closed automatically on them.
if isinstance(lockfilename, str):
_open_fds[myfd].close()
return True
@param dest_path: destination file path
@type dest_path: str
"""
# Remove destination file in order to ensure that the following
# symlink or copy2 call won't fail (see bug #535850).
try:
os.unlink(dest_path)
except OSError:
pass
try:
if stat.S_ISLNK(src_st.st_mode):
os.symlink(os.readlink(src_path), dest_path)
else:
shutil.copy2(src_path, dest_path)
except EnvironmentError as e:
portage.util.writemsg(
_('dispatch-conf: Error copying %(src_path)s to '
'%(dest_path)s: %(reason)s\n') % {
"src_path": src_path,
"dest_path": dest_path,
"reason": e
}, noiselevel=-1)
def create_cleanlist():
if "--debug" in myopts:
writemsg("\ndigraph:\n\n", noiselevel=-1)
graph.debug_print()
writemsg("\n", noiselevel=-1)
pkgs_to_remove = []
if action == "depclean":
if args_set:
for pkg in sorted(vardb, key=cmp_sort_key(cmp_pkg_cpv)):
arg_atom = None
try:
arg_atom = args_set.findAtomForPackage(pkg)
except portage.exception.InvalidDependString:
# this error has already been displayed by now
continue
def makedirs(dir_path):
try:
os.makedirs(dir_path)
except OSError as oe:
if errno.EEXIST == oe.errno:
pass
elif errno.EPERM == oe.errno:
writemsg("%s\n" % oe, noiselevel=-1)
writemsg(_("Operation Not Permitted: makedirs('%s')\n") % \
dir_path, noiselevel=-1)
return False
else:
raise
return True
2 completely downloaded
"""
fetched = 0
orig_digests = mydigests.get(myfile, {})
if not (allow_missing_digests or listonly):
verifiable_hash_types = set(orig_digests).intersection(valid_hashes)
if not verifiable_hash_types:
expected = " ".join(sorted(valid_hashes))
got = set(orig_digests)
got.discard("size")
got = " ".join(sorted(got))
reason = (_("Insufficient data for checksum verification"),
got, expected)
writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,
noiselevel=-1)
writemsg(_("!!! Reason: %s\n") % reason[0],
noiselevel=-1)
writemsg(_("!!! Got: %s\n!!! Expected: %s\n") % \
(reason[1], reason[2]), noiselevel=-1)
if fetchonly:
failed_files.add(myfile)
continue
else:
return 0
size = orig_digests.get("size")
if size == 0:
# Zero-byte distfiles are always invalid, so discard their digests.
del mydigests[myfile]
if pretend or fetchonly:
# make the mtimedb readonly
mtimedb.filename = None
if '--digest' in myopts or 'digest' in settings.features:
if '--digest' in myopts:
msg = "The --digest option"
else:
msg = "The FEATURES=digest setting"
msg += " can prevent corruption from being" + \
" noticed. The `repoman manifest` command is the preferred" + \
" way to generate manifests and it is capable of doing an" + \
" entire repository or category at once."
prefix = bad(" * ")
writemsg(prefix + "\n")
for line in textwrap.wrap(msg, 72):
writemsg("%s%s\n" % (prefix, line))
writemsg(prefix + "\n")
if resume:
favorites = mtimedb["resume"].get("favorites")
if not isinstance(favorites, list):
favorites = []
resume_data = mtimedb["resume"]
mergelist = resume_data["mergelist"]
if mergelist and "--skipfirst" in myopts:
for i, task in enumerate(mergelist):
if isinstance(task, list) and \
task and task[-1] == "merge":
del mergelist[i]
def write(msg):
writemsg(msg, noiselevel=-1)