Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if secret_store and secrets_req:
secret_store.store(
[shortname(sc) for sc in cast(List[str], secrets_req["secrets"])],
job_order_object,
)
if _logger.isEnabledFor(logging.DEBUG):
_logger.debug(
"Parsed job order from command line: %s",
json_dumps(job_order_object, indent=4),
)
for inp in process.tool["inputs"]:
if "default" in inp and (
not job_order_object or shortname(inp["id"]) not in job_order_object
):
if not job_order_object:
job_order_object = {}
job_order_object[shortname(inp["id"])] = inp["default"]
if job_order_object is None:
if process.tool["inputs"]:
if toolparser is not None:
print("\nOptions for {} ".format(args.workflow))
toolparser.print_help()
_logger.error("")
_logger.error("Input object required, use --help for details")
exit(1)
else:
job_order_object = {}
self.embedded_tool.tool["class"] == "Workflow"
and runtimeContext.research_obj
and self.prov_obj
and self.embedded_tool.provenance_object
):
self.embedded_tool.parent_wf = self.prov_obj
process_name = self.tool["id"].split("#")[1]
self.prov_obj.start_process(
process_name,
datetime.datetime.now(),
self.embedded_tool.provenance_object.workflow_run_uri,
)
step_input = {}
for inp in self.tool["inputs"]:
field = shortname(inp["id"])
if not inp.get("not_connected"):
step_input[field] = job_order[inp["id"]]
try:
for tool in self.embedded_tool.job(
step_input,
functools.partial(self.receive_output, output_callbacks),
runtimeContext,
):
yield tool
except WorkflowException:
_logger.error("Exception on step '%s'", runtimeContext.name)
raise
except Exception as exc:
_logger.exception("Unexpected exception")
raise WorkflowException(str(exc)) from exc
if not result and optional:
# Don't convert zero or empty string to None
if result in [0, ""]:
return result
# For [] or None, return None
else:
return None
if (
not empty_and_optional
and isinstance(schema["type"], MutableMapping)
and schema["type"]["type"] == "record"
):
out = {}
for field in cast(List[CWLObjectType], schema["type"]["fields"]):
out[shortname(cast(str, field["name"]))] = self.collect_output(
field, builder, outdir, fs_access, compute_checksum=compute_checksum
)
return out
return result
]
)
)
)
)
param["id"] = inputid
param.lc.line = toolpath_object[stepfield].lc.data[index][0]
param.lc.col = toolpath_object[stepfield].lc.data[index][1]
param.lc.filename = toolpath_object[stepfield].lc.filename
toolpath_object[toolfield].append(param)
missing_values = []
for _, tool_entry in enumerate(self.embedded_tool.tool["inputs"]):
if shortname(tool_entry["id"]) not in bound:
if "null" not in tool_entry["type"] and "default" not in tool_entry:
missing_values.append(shortname(tool_entry["id"]))
if missing_values:
validation_errors.append(
SourceLine(self.tool, "in").makeError(
"Step is missing required parameter%s '%s'"
% (
"s" if len(missing_values) > 1 else "",
"', '".join(missing_values),
)
)
)
if validation_errors:
raise ValidationException("\n".join(validation_errors))
super(WorkflowStep, self).__init__(toolpath_object, loadingContext)
def _link_merge_source(promises, in_out_obj, source_obj):
to_merge = [(shortname(s), promises[s].rv()) for s in aslist(source_obj)]
link_merge = in_out_obj.get("linkMerge", "merge_nested")
if link_merge == "merge_nested":
merged = MergeInputsNested(to_merge)
elif link_merge == "merge_flattened":
merged = MergeInputsFlattened(to_merge)
else:
raise validate.ValidationException(
"Unsupported linkMerge '%s'" % link_merge)
return merged
job: Union[Process, JobsType], job_order_object: CWLObjectType
) -> CWLObjectType:
"""Create copy of job object for provenance."""
if not isinstance(job, WorkflowJob):
# direct command line tool execution
return job_order_object
customised_job = {} # type: CWLObjectType
# new job object for RO
for each, i in enumerate(job.tool["inputs"]):
with SourceLine(
job.tool["inputs"],
each,
WorkflowException,
_logger.isEnabledFor(logging.DEBUG),
):
iid = shortname(i["id"])
if iid in job_order_object:
customised_job[iid] = copy.deepcopy(job_order_object[iid])
# add the input element in dictionary for provenance
elif "default" in i:
customised_job[iid] = copy.deepcopy(i["default"])
# add the default elements in the dictionary for provenance
else:
pass
return customised_job
if "format" in p:
p["format"] = ld.expand_url(cast(str, p["format"]), "")
visit_class(job_order_object, ("File", "Directory"), path_to_loc)
visit_class(
job_order_object,
("File",),
functools.partial(add_sizes, make_fs_access(input_basedir)),
)
visit_class(job_order_object, ("File",), expand_formats)
adjustDirObjs(job_order_object, trim_listing)
normalizeFilesDirs(job_order_object)
if secret_store and secrets_req:
secret_store.store(
[shortname(sc) for sc in cast(List[str], secrets_req["secrets"])],
job_order_object,
)
if "cwl:tool" in job_order_object:
del job_order_object["cwl:tool"]
if "id" in job_order_object:
del job_order_object["id"]
return job_order_object
def postScatterEval(io):
shortio = {shortname(k): v for k, v in iteritems(io)}
for k in valueFrom:
io.setdefault(k, None)
def valueFromFunc(k, v):
if k in valueFrom:
return cwltool.expression.do_eval(
valueFrom[k], shortio, self.step.requirements,
None, None, {}, context=v)
else:
return v
return {k: valueFromFunc(k, v) for k, v in list(io.items())}
if scatterMethod == "dotproduct":
for i in range(0, len(cwljob[shortname(scatter[0])])):
copyjob = copy.copy(cwljob)
for sc in [shortname(x) for x in scatter]:
copyjob[sc] = cwljob[sc][i]
copyjob = postScatterEval(copyjob)
(subjob, follow_on) = makeJob(
self.step.embedded_tool, copyjob, None,
self.runtime_context)
self.addChild(subjob)
outputs.append(follow_on.rv())
elif scatterMethod == "nested_crossproduct":
outputs = self.nested_crossproduct_scatter(
cwljob, scatter, postScatterEval)
elif scatterMethod == "flat_crossproduct":
self.flat_crossproduct_scatter(
cwljob, scatter, outputs, postScatterEval)
else:
if debug:
_logger.debug(
"Raw output from %s: %s",
custom_output,
json_dumps(ret, indent=4),
)
else:
for i, port in enumerate(ports):
with SourceLine(
ports,
i,
partial(ParameterOutputWorkflowException, port=port),
debug,
):
fragment = shortname(port["id"])
ret[fragment] = self.collect_output(
port,
builder,
outdir,
fs_access,
compute_checksum=compute_checksum,
)
if ret:
revmap = partial(revmap_file, builder, outdir)
adjustDirObjs(ret, trim_listing)
visit_class(ret, ("File", "Directory"), revmap)
visit_class(ret, ("File", "Directory"), remove_path)
normalizeFilesDirs(ret)
visit_class(
ret,
("File", "Directory"),
def job(
self,
job_order: CWLObjectType,
output_callbacks: Optional[OutputCallbackType],
runtimeContext: RuntimeContext,
) -> Generator[Union[JobBase, CallbackJob], None, None]:
workReuse, _ = self.get_requirement("WorkReuse")
enableReuse = workReuse.get("enableReuse", True) if workReuse else True
jobname = uniquename(
runtimeContext.name or shortname(self.tool.get("id", "job"))
)
if runtimeContext.cachedir and enableReuse:
cachecontext = runtimeContext.copy()
cachecontext.outdir = "/out"
cachecontext.tmpdir = "/tmp" # nosec
cachecontext.stagedir = "/stage"
cachebuilder = self._init_job(job_order, cachecontext)
cachebuilder.pathmapper = PathMapper(
cachebuilder.files,
runtimeContext.basedir,
cachebuilder.stagedir,
separateDirs=False,
)
_check_adjust = partial(check_adjust, cachebuilder)
visit_class(
[cachebuilder.files, cachebuilder.bindings],