Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'{0}: Embedded tool outputs: \n {1}'.format(self.task_id, json.dumps(output, indent=4)))
promises = {}
for out in self.cwl_step.tool["outputs"]:
out_id = shortname(out["id"])
jobout_id = out_id.split("/")[-1]
try:
promises[out_id] = output[jobout_id]
except:
continue
# Unsetting the Generation from final output object
visit_class(promises, ("File",), MutationManager().unset_generation)
data = {"promises": promises, "outdir": self.outdir}
_logger.info(
'{0}: Output: \n {1}'.format(self.task_id, json.dumps(data, indent=4)))
return data
discover_secondary_files(
obj["inputs"],
{shortname(t["id"]): t["default"] for t in obj["inputs"]
if "default" in t},
discovered)
visit_class(workflowobj, ("CommandLineTool", "Workflow"),
discover_default_secondary_files)
for entry in list(discovered.keys()):
# Only interested in discovered secondaryFiles which are local
# files that need to be uploaded.
if not entry.startswith("file:"):
del discovered[entry]
visit_class(workflowobj, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(workflowobj, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(obj["default"], ("File", "Directory"),
ensure_default_location)
if remove[0]:
del obj["default"]
find_defaults(workflowobj, visit_default)
discovered = {}
def discover_default_secondary_files(obj):
discover_secondary_files(
obj["inputs"],
{shortname(t["id"]): t["default"] for t in obj["inputs"]
if "default" in t},
discovered)
visit_class(workflowobj, ("CommandLineTool", "Workflow"),
discover_default_secondary_files)
for entry in list(discovered.keys()):
# Only interested in discovered secondaryFiles which are local
# files that need to be uploaded.
if not entry.startswith("file:"):
del discovered[entry]
visit_class(workflowobj, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(workflowobj, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
def import_files(tool):
visit_class(tool, ("File", "Directory"), path_to_loc)
visit_class(tool, ("File", ), functools.partial(
add_sizes, fs_access))
normalizeFilesDirs(tool)
adjustDirObjs(tool, functools.partial(
get_listing, fs_access, recursive=True))
adjustFileObjs(tool, functools.partial(
uploadFile, toil.importFile, fileindex, existing,
skip_broken=True))
def discover_default_secondary_files(obj):
discover_secondary_files(
obj["inputs"],
{shortname(t["id"]): t["default"] for t in obj["inputs"]
if "default" in t},
discovered)
visit_class(workflowobj, ("CommandLineTool", "Workflow"),
discover_default_secondary_files)
for entry in list(discovered.keys()):
# Only interested in discovered secondaryFiles which are local
# files that need to be uploaded.
if not entry.startswith("file:"):
del discovered[entry]
visit_class(workflowobj, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(workflowobj, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
{shortname(t["id"]): t["default"] for t in obj["inputs"]
if "default" in t},
discovered)
visit_class(workflowobj, ("CommandLineTool", "Workflow"),
discover_default_secondary_files)
for entry in list(discovered.keys()):
# Only interested in discovered secondaryFiles which are local
# files that need to be uploaded.
if not entry.startswith("file:"):
del discovered[entry]
visit_class(workflowobj, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(workflowobj, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("Directory"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
visit_class(discovered, ("File"), functools.partial(
ftp_upload, remote_storage_url, ftp_access))
if p.type == "File":
file_store.exportFile(FileID.unpack(p.resolved[7:]), "file://" + p.target)
elif p.type == "Directory" and not os.path.exists(p.target):
os.makedirs(p.target, 0o0755)
elif p.type == "CreateFile":
with open(p.target, "wb") as n:
n.write(p.resolved.encode("utf-8"))
def _check_adjust(f):
f["location"] = schema_salad.ref_resolver.file_uri(
pm.mapper(f["location"])[1])
if "contents" in f:
del f["contents"]
return f
visit_class(cwljob, ("File", "Directory"), _check_adjust)
def import_files(tool):
visit_class(tool, ("File", "Directory"), path_to_loc)
visit_class(tool, ("File", ), functools.partial(
add_sizes, fs_access))
normalizeFilesDirs(tool)
adjustDirObjs(tool, functools.partial(
get_listing, fs_access, recursive=True))
adjustFileObjs(tool, functools.partial(
uploadFile, toil.importFile, fileindex, existing,
skip_broken=True))
def visit_default(obj):
remove = [False]
def ensure_default_location(fileobj):
if "location" not in fileobj and "path" in fileobj:
fileobj["location"] = fileobj["path"]
del fileobj["path"]
if "location" in fileobj \
and not ftp_access.exists(fileobj["location"]):
# Delete "default" from workflowobj
remove[0] = True
visit_class(obj["default"], ("File", "Directory"),
ensure_default_location)
if remove[0]:
del obj["default"]
find_defaults(workflowobj, visit_default)