Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def create_enum_type_mismatch_error(context, config_value):
check.inst_param(context, 'context', ValidationContext)
return EvaluationError(
stack=context.stack,
reason=DagsterEvaluationErrorReason.RUNTIME_TYPE_MISMATCH,
message='Value for enum type {type_name} must be a string'.format(
type_name=context.config_type.name
),
error_data=RuntimeMismatchErrorData(context.config_type, repr(config_value)),
)
def observable(self, run_id, step_key, io_type, cursor=None):
check.str_param(run_id, 'run_id')
check.str_param(step_key, 'step_key')
check.inst_param(io_type, 'io_type', ComputeIOType)
check.opt_str_param(cursor, 'cursor')
if cursor:
cursor = int(cursor)
else:
cursor = 0
subscription = ComputeLogSubscription(self, run_id, step_key, io_type, cursor)
self.on_subscribe(subscription)
return Observable.create(subscription) # pylint: disable=E1101
def scaffold_type(config_type, skip_optional=True):
check.inst_param(config_type, 'config_type', ConfigType)
check.bool_param(skip_optional, 'skip_optional')
# Right now selectors and composites have the same
# scaffolding logic, which might not be wise.
if config_type.has_fields:
default_dict = {}
for field_name, field in config_type.fields.items():
if skip_optional and field.is_optional:
continue
default_dict[field_name] = scaffold_type(field.config_type, skip_optional)
return default_dict
elif config_type.is_any:
return 'AnyType'
elif config_type.is_scalar:
defaults = {'String': '', 'Path': 'path/to/something', 'Int': 0, 'Bool': True}
def step_materialization(step_context, materialization):
check.inst_param(materialization, 'materialization', Materialization)
return DagsterEvent.from_step(
event_type=DagsterEventType.STEP_MATERIALIZATION,
step_context=step_context,
event_specific_data=StepMaterializationData(materialization),
message=materialization.description
if materialization.description
else 'Materialized value{label_clause}.'.format(
label_clause=' {label}'.format(label=materialization.label)
if materialization.label
else ''
),
def validate(self, dataframe, column_name):
check.inst_param(dataframe, 'dataframe', DataFrame)
check.str_param(column_name, 'column_name')
if column_name not in dataframe.columns:
raise ConstraintViolationException(
constraint_name=self.name,
constraint_description=self.description,
column_name=column_name,
)
def create_execution_plan_from_steps(pipeline_def, steps, environment_config):
check.inst_param(pipeline_def, 'pipeline_def', PipelineDefinition)
check.list_param(steps, 'steps', of_type=ExecutionStep)
check.inst_param(environment_config, 'environment_config', EnvironmentConfig)
step_dict = {step.key: step for step in steps}
deps = {step.key: set() for step in steps}
seen_keys = set()
for step in steps:
if step.key in seen_keys:
keys = [s.key for s in steps]
check.failed(
'Duplicated key {key}. Full list: {key_list}.'.format(key=step.key, key_list=keys)
)
seen_keys.add(step.key)
for step_input in step.step_inputs:
def resolve_to_runtime_set(set_type):
from .python_set import create_typed_runtime_set
check.inst_param(set_type, 'set_type', WrappingSetType)
return create_typed_runtime_set(set_type.inner_type)
def resolve_to_runtime_tuple(tuple_type):
from .python_tuple import create_typed_tuple, PythonTuple
check.inst_param(tuple_type, 'tuple_type', WrappingTupleType)
if tuple_type.inner_type is None:
return PythonTuple
return create_typed_tuple(*tuple_type.inner_type)
def create_system_storage_data(
context_creation_data, system_storage_data, scoped_resources_builder
):
check.inst_param(context_creation_data, 'context_creation_data', ContextCreationData)
environment_config, pipeline_def, system_storage_def, pipeline_run = (
context_creation_data.environment_config,
context_creation_data.pipeline_def,
context_creation_data.system_storage_def,
context_creation_data.pipeline_run,
)
system_storage_data = (
system_storage_data
if system_storage_data
else construct_system_storage_data(
InitSystemStorageContext(
pipeline_def=pipeline_def,
mode_def=context_creation_data.mode_def,
system_storage_def=system_storage_def,
def transform_fn(context, _args):
check.inst_param(context, 'context', TransformExecutionContext)
return execute_sql_text_on_context(context.get_system_context(), sql_text)