Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
runtimeContext: Optional[RuntimeContext] = None,
input_required: bool = True,
) -> int:
if not stdout: # force UTF-8 even if the console is configured differently
if hasattr(sys.stdout, "encoding") and sys.stdout.encoding.upper() not in (
"UTF-8",
"UTF8",
):
if hasattr(sys.stdout, "detach"):
stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
else:
stdout = getwriter("utf-8")(sys.stdout) # type: ignore
else:
stdout = sys.stdout
_logger.removeHandler(defaultStreamHandler)
stderr_handler = logger_handler
if stderr_handler is not None:
_logger.addHandler(stderr_handler)
else:
coloredlogs.install(logger=_logger, stream=stderr)
stderr_handler = _logger.handlers[-1]
workflowobj = None
prov_log_handler = None # type: Optional[logging.StreamHandler]
try:
if args is None:
if argsl is None:
argsl = sys.argv[1:]
addl = [] # type: List[str]
if "CWLTOOL_OPTIONS" in os.environ:
addl = os.environ["CWLTOOL_OPTIONS"].split(" ")
args = arg_parser().parse_args(addl + argsl)
for inp in process.tool["inputs"]:
if "default" in inp and (
not job_order_object or shortname(inp["id"]) not in job_order_object
):
if not job_order_object:
job_order_object = {}
job_order_object[shortname(inp["id"])] = inp["default"]
if job_order_object is None:
if process.tool["inputs"]:
if toolparser is not None:
print("\nOptions for {} ".format(args.workflow))
toolparser.print_help()
_logger.error("")
_logger.error("Input object required, use --help for details")
exit(1)
else:
job_order_object = {}
if print_input_deps:
basedir = None # type: Optional[str]
uri = cast(str, job_order_object["id"])
if uri == args.workflow:
basedir = os.path.dirname(uri)
uri = ""
printdeps(
job_order_object,
loader,
stdout,
relative_deps,
uri,
"UTF8",
):
if hasattr(sys.stdout, "detach"):
stdout = io.TextIOWrapper(sys.stdout.buffer, encoding="utf-8")
else:
stdout = getwriter("utf-8")(sys.stdout) # type: ignore
else:
stdout = sys.stdout
_logger.removeHandler(defaultStreamHandler)
stderr_handler = logger_handler
if stderr_handler is not None:
_logger.addHandler(stderr_handler)
else:
coloredlogs.install(logger=_logger, stream=stderr)
stderr_handler = _logger.handlers[-1]
workflowobj = None
prov_log_handler = None # type: Optional[logging.StreamHandler]
try:
if args is None:
if argsl is None:
argsl = sys.argv[1:]
addl = [] # type: List[str]
if "CWLTOOL_OPTIONS" in os.environ:
addl = os.environ["CWLTOOL_OPTIONS"].split(" ")
args = arg_parser().parse_args(addl + argsl)
if args.record_container_id:
if not args.cidfile_dir:
args.cidfile_dir = os.getcwd()
del args.record_container_id
if runtimeContext is None:
# File literal outputs with no path, we don't write these and will fail
# with unsupportedRequirement when retrieving later with getFile
elif x.startswith("_:"):
return x
else:
x = existing.get(x, x)
if x not in index:
if not urlparse.urlparse(x).scheme:
rp = os.path.realpath(x)
else:
rp = x
try:
index[x] = "toilfs:" + writeFunc(rp).pack()
existing[index[x]] = x
except Exception as e:
cwllogger.error("Got exception '%s' while copying '%s'", e, x)
raise
return index[x]
}
for g in sorted(
fs_access.glob(fs_access.join(outdir, gb)),
key=cmp_to_key(
cast(
Callable[[str, str], int],
locale.strcoll,
)
),
)
]
)
except (OSError) as e:
_logger.warning(str(e))
except Exception:
_logger.error(
"Unexpected error from fs_access", exc_info=True
)
raise
for files in cast(List[Dict[str, Optional[CWLOutputType]]], r):
rfile = files.copy()
revmap(rfile)
if files["class"] == "Directory":
ll = schema.get("loadListing") or builder.loadListing
if ll and ll != "no_listing":
get_listing(fs_access, files, (ll == "deep_listing"))
else:
if binding.get("loadContents"):
with fs_access.open(
cast(str, rfile["location"]), "rb"
) as f:
if p.type == "File" and not os.path.isfile(p[0]) and p.staged:
raise WorkflowException(
"Input file %s (at %s) not found or is not a regular "
"file." % (knownfile, self.pathmapper.mapper(knownfile)[0])
)
if "listing" in self.generatefiles:
runtimeContext = runtimeContext.copy()
runtimeContext.outdir = self.outdir
self.generatemapper = self.make_path_mapper(
self.generatefiles["listing"],
self.builder.outdir,
runtimeContext,
False,
)
if _logger.isEnabledFor(logging.DEBUG):
_logger.debug(
"[job %s] initial work dir %s",
self.name,
json_dumps(
{
p: self.generatemapper.mapper(p)
for p in self.generatemapper.files()
},
indent=4,
),
loadingContext.prov_obj = self.provenance_object
loadingContext = loadingContext.copy()
loadingContext.requirements = self.requirements
loadingContext.hints = self.hints
self.steps = [] # type: List[WorkflowStep]
validation_errors = []
for index, step in enumerate(self.tool.get("steps", [])):
try:
self.steps.append(
self.make_workflow_step(
step, index, loadingContext, loadingContext.prov_obj
)
)
except ValidationException as vexc:
if _logger.isEnabledFor(logging.DEBUG):
_logger.exception("Validation failed at")
validation_errors.append(vexc)
if validation_errors:
raise ValidationException("\n".join(str(v) for v in validation_errors))
random.shuffle(self.steps)
# statically validate data links instead of doing it at runtime.
workflow_inputs = self.tool["inputs"]
workflow_outputs = self.tool["outputs"]
step_inputs = [] # type: List[CWLObjectType]
step_outputs = [] # type: List[CWLObjectType]
param_to_step = {} # type: Dict[str, CWLObjectType]
for step in self.steps:
) # str
use_conda_dependencies = getattr(
args, "beta_conda_dependencies", None
) # str
if conf_file or use_conda_dependencies:
runtimeContext.job_script_provider = DependenciesConfiguration(args)
else:
runtimeContext.find_default_container = functools.partial(
find_default_container,
default_container=runtimeContext.default_container,
use_biocontainers=args.beta_use_biocontainers,
)
(out, status) = real_executor(
tool, initialized_job_order_object, runtimeContext, logger=_logger
)
if out is not None:
if runtimeContext.research_obj is not None:
runtimeContext.research_obj.create_job(out, True)
def remove_at_id(doc: CWLObjectType) -> None:
for key in list(doc.keys()):
if key == "@id":
del doc[key]
else:
value = doc[key]
if isinstance(value, MutableMapping):
remove_at_id(value)
elif isinstance(value, MutableSequence):
for entry in value: