Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_non_supported_parse_from_str(self):
class MyTObj(object):
pass
actual = register_value_type(InlineValueType(MyTObj))
with pytest.raises(DatabandRuntimeError):
actual.parse_from_str("a")
def failed_to_run_spark_script(task, cmd, application, return_code, error_snippets):
return DatabandRuntimeError(
"spark_submit failed with return code %s. Failed to run: %s"
% (return_code, cmd),
show_exc_info=False,
nested_exceptions=error_snippets,
help_msg="Check your %s logic and input data %s. Inspect spark logs for more info."
% (application, list(task.relations.task_inputs_user.values())),
)
dbnd_context = get_databand_context()
conn_string = dbnd_context.settings.core.get_sql_alchemy_conn()
if conn_string.startswith("sqlite:"):
from dbnd_web.utils.dbnd_db import get_sqlite_db_location
db_file = get_sqlite_db_location(conn_string)
logger.info("Exporting DB=%s", db_file)
tar.add(db_file, arcname="dbnd.db")
elif conn_string.startswith("postgresql"):
with tempfile.NamedTemporaryFile(prefix="dbdump.", suffix=".sql") as tf:
from dbnd_web.utils.dbnd_db import dump_postgres
dump_postgres(conn_string, tf.name)
tar.add(tf.name, arcname="postgres-dbnd.sql")
else:
raise DatabandRuntimeError(
"Can not export db! "
"Currently, we support only sqlite and postgres db in automatic export"
)
if include_logs:
context = get_databand_context()
local_env = context.settings.get_env_config(CloudType.local)
logs_folder = local_env.dbnd_local_root.folder("logs").path
if os.path.exists(logs_folder):
logger.info("Adding run folder from '%s'", logs_folder)
tar.add(logs_folder, "run")
else:
logger.error("Logs dir '%s' is not found", logs_folder)
def wrong_return_value_len(task, names, result):
return DatabandRuntimeError(
"Returned result from '{task}' doesn't match expected schema. "
"Expected tuple of '{names}', got tuple of length '{result}'".format(
task=_run_name(task), names=names, result=len(result)
)
def failed_to_run_qubole_job(status_code, log_url, spark_log):
return DatabandRuntimeError(
"Qubole run failed with code %s." % status_code,
show_exc_info=False,
nested_exceptions=spark_log,
help_msg="Check spark log for more info: %s." % log_url,
)
def _build_submit_task(self, run):
if run.root_task:
raise DatabandRuntimeError(
"Can't send to remote execution task created via code, only command line is supported"
)
# dont' describe in local run, do it in remote run
settings = self.settings
settings.system.describe = False
cmd_line_args = (
["run"] + _get_dbnd_run_relative_cmd() + ["--run-driver", str(run.run_uid)]
)
args = run.remote_engine.dbnd_executable + cmd_line_args
root_task = run.remote_engine.submit_to_engine_task(
env=run.env,
args=args,
def attempt_number(self, value):
if not value:
raise DatabandRuntimeError("cannot set None as the attempt number")
if value != self._attempt_number:
self._attempt_number = value
self.init_attempt()
self.run.tracker.tracking_store.add_task_runs(
run=self.run, task_runs=[self]
)
def dump_to_multi_target(target, value):
return DatabandRuntimeError(
"Can't dump %s to %s': target is multi target (read only)"
% (type(value), target)
)
def task_has_not_complete_but_all_outputs_exists(task):
return DatabandRuntimeError(
"Something wrong, task %s has been executed, "
"but _complete function returns False while all outputs exist! "
% _safe_task_family(task),
help_msg="Check your implementation of %s, validate that you _complete override is correct",
show_exc_info=False,
)
def get_pod_status_v1(self):
from requests import HTTPError
try:
return self.kube_client.read_namespaced_pod(self.name, self.namespace)
except HTTPError as e:
raise DatabandRuntimeError(
"There was an error reading pod status for %s at namespace %s via kubernetes API: {}".format(
e
)