Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def remove_at_id(doc: CWLObjectType) -> None:
for key in list(doc.keys()):
if key == "@id":
del doc[key]
else:
value = doc[key]
if isinstance(value, MutableMapping):
remove_at_id(value)
elif isinstance(value, MutableSequence):
for entry in value:
if isinstance(entry, MutableMapping):
remove_at_id(entry)
remove_at_id(out)
visit_class(
out,
("File",),
functools.partial(add_sizes, runtimeContext.make_fs_access("")),
)
def loc_to_path(obj: CWLObjectType) -> None:
for field in ("path", "nameext", "nameroot", "dirname"):
if field in obj:
del obj[field]
if cast(str, obj["location"]).startswith("file://"):
obj["path"] = uri_file_path(cast(str, obj["location"]))
visit_class(out, ("File", "Directory"), loc_to_path)
# Unsetting the Generation from final output object
visit_class(out, ("File",), MutationManager().unset_generation)
ns = {} # type: ContextType
ns.update(cast(ContextType, job_order_object.get("$namespaces", {})))
ns.update(cast(ContextType, process.metadata.get("$namespaces", {})))
ld = Loader(ns)
def expand_formats(p: CWLObjectType) -> None:
if "format" in p:
p["format"] = ld.expand_url(cast(str, p["format"]), "")
visit_class(job_order_object, ("File", "Directory"), path_to_loc)
visit_class(
job_order_object,
("File",),
functools.partial(add_sizes, make_fs_access(input_basedir)),
)
visit_class(job_order_object, ("File",), expand_formats)
adjustDirObjs(job_order_object, trim_listing)
normalizeFilesDirs(job_order_object)
if secret_store and secrets_req:
secret_store.store(
[shortname(sc) for sc in cast(List[str], secrets_req["secrets"])],
job_order_object,
)
if "cwl:tool" in job_order_object:
del job_order_object["cwl:tool"]
if "id" in job_order_object:
del job_order_object["id"]
return job_order_object
datum.get("secondaryFiles", []),
("File", "Directory"),
_capture_files,
)
if schema["type"] == "Directory":
datum = cast(CWLObjectType, datum)
ll = schema.get("loadListing") or self.loadListing
if ll and ll != "no_listing":
get_listing(
self.fs_access, datum, (ll == "deep_listing"),
)
self.files.append(datum)
if schema["type"] == "Any":
visit_class(datum, ("File", "Directory"), _capture_files)
# Position to front of the sort key
if binding:
for bi in bindings:
bi["position"] = cast(List[int], binding["position"]) + cast(
List[int], bi["position"]
)
bindings.append(binding)
return bindings
# Validate job order
try:
fill_in_defaults(self.tool["inputs"], job, fs_access)
normalizeFilesDirs(job)
schema = self.names.get_name("input_record_schema", None)
if schema is None:
raise WorkflowException(
"Missing input record schema: " "{}".format(self.names)
)
validate_ex(schema, job, strict=False, logger=_logger_validation_warnings)
if load_listing and load_listing != "no_listing":
get_listing(fs_access, job, recursive=(load_listing == "deep_listing"))
visit_class(job, ("File",), functools.partial(add_sizes, fs_access))
if load_listing == "deep_listing":
for i, inparm in enumerate(self.tool["inputs"]):
k = shortname(inparm["id"])
if k not in job:
continue
v = job[k]
dircount = [0]
def inc(d): # type: (List[int]) -> None
d[0] += 1
visit_class(v, ("Directory",), lambda x: inc(dircount))
if dircount[0] == 0:
continue
filecount = [0]
raise SourceLine(
initialWorkdir, "listing", WorkflowException
).makeError(
"Entry at index %s of listing is not a Dirent, File or Directory object, was %s"
% (i, t2)
)
normalizeFilesDirs(ls)
j.generatefiles["listing"] = ls
for entry in ls:
self.updatePathmap(
builder.outdir, cast(PathMapper, builder.pathmapper), entry
)
visit_class(
[builder.files, builder.bindings],
("File", "Directory"),
partial(check_adjust, builder),
)
):
fragment = shortname(port["id"])
ret[fragment] = self.collect_output(
port,
builder,
outdir,
fs_access,
compute_checksum=compute_checksum,
)
if ret:
revmap = partial(revmap_file, builder, outdir)
adjustDirObjs(ret, trim_listing)
visit_class(ret, ("File", "Directory"), revmap)
visit_class(ret, ("File", "Directory"), remove_path)
normalizeFilesDirs(ret)
visit_class(
ret,
("File", "Directory"),
partial(check_valid_locations, fs_access),
)
if compute_checksum:
adjustFileObjs(ret, partial(compute_checksums, fs_access))
expected_schema = cast(
Schema, self.names.get_name("outputs_record_schema", None)
)
validate_ex(
expected_schema, ret, strict=False, logger=_logger_validation_warnings
)
if ret is not None and builder.mutation_manager is not None:
adjustFileObjs(ret, builder.mutation_manager.set_generation)
return ret if ret is not None else {}
def v1_2_0dev2todev3(
doc: CommentedMap, loader: Loader, baseuri: str
) -> Tuple[CommentedMap, str]: # pylint: disable=unused-argument
"""Public updater for v1.2.0-dev2 to v1.2.0-dev3."""
doc = copy.deepcopy(doc)
def update_pickvalue(t: CWLObjectType) -> None:
for step in cast(MutableSequence[CWLObjectType], t["steps"]):
for inp in cast(MutableSequence[CWLObjectType], step["in"]):
if "pickValue" in inp:
if inp["pickValue"] == "only_non_null":
inp["pickValue"] = "the_only_non_null"
visit_class(doc, "Workflow", update_pickvalue)
upd = doc
if isinstance(upd, MutableMapping) and "$graph" in upd:
upd = cast(CommentedMap, upd["$graph"])
for proc in aslist(upd):
if "cwlVersion" in proc:
del proc["cwlVersion"]
return (doc, "v1.2.0-dev3")
out,
("File",),
functools.partial(add_sizes, runtimeContext.make_fs_access("")),
)
def loc_to_path(obj: CWLObjectType) -> None:
for field in ("path", "nameext", "nameroot", "dirname"):
if field in obj:
del obj[field]
if cast(str, obj["location"]).startswith("file://"):
obj["path"] = uri_file_path(cast(str, obj["location"]))
visit_class(out, ("File", "Directory"), loc_to_path)
# Unsetting the Generation from final output object
visit_class(out, ("File",), MutationManager().unset_generation)
if isinstance(out, str):
stdout.write(out)
else:
stdout.write(json_dumps(out, indent=4, ensure_ascii=False))
stdout.write("\n")
if hasattr(stdout, "flush"):
stdout.flush()
if status != "success":
_logger.warning("Final process status is %s", status)
return 1
_logger.info("Final process status is %s", status)
return 0
except (ValidationException) as exc:
i,
partial(ParameterOutputWorkflowException, port=port),
debug,
):
fragment = shortname(port["id"])
ret[fragment] = self.collect_output(
port,
builder,
outdir,
fs_access,
compute_checksum=compute_checksum,
)
if ret:
revmap = partial(revmap_file, builder, outdir)
adjustDirObjs(ret, trim_listing)
visit_class(ret, ("File", "Directory"), revmap)
visit_class(ret, ("File", "Directory"), remove_path)
normalizeFilesDirs(ret)
visit_class(
ret,
("File", "Directory"),
partial(check_valid_locations, fs_access),
)
if compute_checksum:
adjustFileObjs(ret, partial(compute_checksums, fs_access))
expected_schema = cast(
Schema, self.names.get_name("outputs_record_schema", None)
)
validate_ex(
expected_schema, ret, strict=False, logger=_logger_validation_warnings
)