Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if memory_usage[0] is None or rss > memory_usage[0]:
memory_usage[0] = rss
mem_tm = Timer(interval=1, function=get_tree_mem_usage, args=(memory_usage,))
mem_tm.daemon = True
mem_tm.start()
sproc.wait()
mem_tm.cancel()
if memory_usage[0] is not None:
_logger.info(
"[job %s] Max memory used: %iMiB",
self.name,
round(memory_usage[0] / (2 ** 20)),
)
else:
_logger.debug(
"Could not collect memory usage, job ended before monitoring began."
)
if pathobj.is_file():
if frag:
return "{}#{}".format(pathobj.as_uri(), frag)
return pathobj.as_uri()
sharepaths = [
os.environ.get(
"XDG_DATA_HOME", os.path.join(os.path.expanduser("~"), ".local", "share")
)
]
sharepaths.extend(
os.environ.get("XDG_DATA_DIRS", "/usr/local/share/:/usr/share/").split(":")
)
shares = [os.path.join(s, "commonwl", uri) for s in sharepaths]
_logger.debug("Search path is %s", shares)
for path in shares:
if os.path.exists(path):
return Path(uri).as_uri()
if os.path.exists("{}.cwl".format(path)):
return Path("{}.cwl".format(path)).as_uri()
return None
if os.path.isabs(j.stderr) or ".." in j.stderr:
raise ValidationException(
"stderr must be a relative path, got '%s'" % j.stderr
)
if self.tool.get("stdout"):
with SourceLine(self.tool, "stdout", ValidationException, debug):
j.stdout = cast(str, builder.do_eval(self.tool["stdout"]))
if j.stdout:
if os.path.isabs(j.stdout) or ".." in j.stdout or not j.stdout:
raise ValidationException(
"stdout must be a relative path, got '%s'" % j.stdout
)
if debug:
_logger.debug(
"[job %s] command line bindings is %s",
j.name,
json_dumps(builder.bindings, indent=4),
)
dockerReq, _ = self.get_requirement("DockerRequirement")
if dockerReq is not None and runtimeContext.use_container:
out_dir, out_prefix = os.path.split(runtimeContext.tmp_outdir_prefix)
j.outdir = runtimeContext.outdir or tempfile.mkdtemp(
prefix=out_prefix, dir=out_dir
)
tmpdir_dir, tmpdir_prefix = os.path.split(runtimeContext.tmpdir_prefix)
j.tmpdir = runtimeContext.tmpdir or tempfile.mkdtemp(
prefix=tmpdir_prefix, dir=tmpdir_dir
)
j.stagedir = tempfile.mkdtemp(prefix=tmpdir_prefix, dir=tmpdir_dir)
else:
def __init__(
self, basedir, cache=None, insecure=False): # type: (Text) -> None
super(FtpFsAccess, self).__init__(basedir)
self.cache = cache or {}
self.netrc = None
self.insecure = insecure
try:
if 'HOME' in os.environ:
if os.path.exists(os.path.join(os.environ['HOME'], '.netrc')):
self.netrc = netrc.netrc(
os.path.join(os.environ['HOME'], '.netrc'))
elif os.path.exists(os.path.join(os.curdir, '.netrc')):
self.netrc = netrc.netrc(os.path.join(os.curdir, '.netrc'))
except netrc.NetrcParseError as err:
_logger.debug(err)
manifest = "tagmanifest"
# Add checksums to corresponding manifest files
for (method, hash_value) in checksums.items():
# File not in manifest because we bailed out on
# existence in bagged_size above
manifestpath = os.path.join(
self.folder, "%s-%s.txt" % (manifest, method.lower())
)
# encoding: match Tag-File-Character-Encoding: UTF-8
# newline: ensure LF also on Windows
with open(
manifestpath, "a", encoding=ENCODING, newline="\n"
) as checksum_file:
line = "%s %s\n" % (hash_value, rel_path)
_logger.debug("[provenance] Added to %s: %s", manifestpath, line)
checksum_file.write(line)
def resolve_ga4gh_tool(document_loader: Loader, uri: str) -> Optional[str]:
path, version = uri.partition(":")[::2]
if not version:
version = "latest"
for reg in ga4gh_tool_registries:
ds = GA4GH_TRS_FILES.format(
reg, urllib.parse.quote(path, ""), urllib.parse.quote(version, "")
)
try:
_logger.debug("Head path is %s", ds)
resp = document_loader.session.head(ds)
resp.raise_for_status()
_logger.debug("Passed head path of %s", ds)
resp = document_loader.session.get(ds)
for file_listing in resp.json():
if file_listing.get("file_type") == "PRIMARY_DESCRIPTOR":
primary_path = file_listing.get("path")
ds2 = GA4GH_TRS_PRIMARY_DESCRIPTOR.format(
reg,
urllib.parse.quote(path, ""),
urllib.parse.quote(version, ""),
urllib.parse.quote(primary_path, ""),
)
_logger.debug("Resolved %s", ds2)
def _runner(self, job, runtime_context, TMPDIR_LOCK):
# type: (Union[JobBase, WorkflowJob, CallbackJob, ExpressionJob], RuntimeContext, threading.Lock) -> None
"""Job running thread."""
try:
_logger.debug(
"job: {}, runtime_context: {}, TMPDIR_LOCK: {}".format(
job, runtime_context, TMPDIR_LOCK
)
)
job.run(runtime_context, TMPDIR_LOCK)
except WorkflowException as err:
_logger.exception("Got workflow error: {}".format(err))
self.exceptions.append(err)
except Exception as err: # pylint: disable=broad-except
_logger.exception("Got workflow error: {}".format(err))
self.exceptions.append(WorkflowException(str(err)))
finally:
if runtime_context.workflow_eval_lock:
with runtime_context.workflow_eval_lock:
self.threads.remove(threading.current_thread())
if isinstance(job, JobBase):
builder.job,
self.make_path_mapper,
self.requirements,
self.hints,
jobname,
)
j.prov_obj = self.prov_obj
j.successCodes = self.tool.get("successCodes", [])
j.temporaryFailCodes = self.tool.get("temporaryFailCodes", [])
j.permanentFailCodes = self.tool.get("permanentFailCodes", [])
debug = _logger.isEnabledFor(logging.DEBUG)
if debug:
_logger.debug(
"[job %s] initializing from %s%s",
j.name,
self.tool.get("id", ""),
" as part of %s" % runtimeContext.part_of
if runtimeContext.part_of
else "",
)
_logger.debug("[job %s] %s", j.name, json_dumps(builder.job, indent=4))
builder.pathmapper = self.make_path_mapper(
reffiles, builder.stagedir, runtimeContext, True
)
builder.requirements = j.requirements
_check_adjust = partial(check_adjust, builder)
def packed_workflow(self, packed: str) -> None:
"""Pack CWL description to generate re-runnable CWL object in RO."""
self.self_check()
rel_path = str(PurePosixPath(WORKFLOW) / "packed.cwl")
# Write as binary
with self.write_bag_file(rel_path, encoding=None) as write_pack:
write_pack.write(packed)
_logger.debug("[provenance] Added packed workflow: %s", rel_path)