Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def tearDown(self):
nx.write_gpickle = self.real_write_gpickle
dbt.utils.dependency_projects = self.real_dependency_projects
dbt.clients.system.find_matching = self.real_find_matching
dbt.clients.system.load_file_contents = self.real_load_file_contents
def test__noop(self):
actual = dbt.utils.deep_map(lambda x, _: x, self.input_value)
self.assertEqual(actual, self.input_value)
try:
self.before_hooks(adapter)
started = time.time()
self.before_run(adapter, selected_uids)
res = self.execute_nodes()
self.after_run(adapter, res)
elapsed = time.time() - started
self.after_hooks(adapter, res, elapsed)
finally:
adapter.cleanup_connections()
result = self.get_result(
results=res,
elapsed_time=elapsed,
generated_at=dbt.utils.timestring()
)
return result
def _parse_template_docs(self, template, docfile):
for key, item in template.module.__dict__.items():
if type(item) != jinja2.runtime.Macro:
continue
if not key.startswith(dbt.utils.DOCS_PREFIX):
continue
name = key.replace(dbt.utils.DOCS_PREFIX, '')
# because docs are in their own graph namespace, node type doesn't
# need to be part of the unique ID.
unique_id = '{}.{}'.format(docfile.package_name, name)
merged = dbt.utils.deep_merge(
docfile.to_dict(),
{
'name': name,
'unique_id': unique_id,
'block_contents': item().strip(),
}
)
yield ParsedDocumentation.from_dict(merged)
def __init__(self, args, config):
super(DebugTask, self).__init__(args, config)
self.profiles_dir = getattr(self.args, 'profiles_dir',
dbt.config.PROFILES_DIR)
self.profile_path = os.path.join(self.profiles_dir, 'profiles.yml')
self.project_path = os.path.join(os.getcwd(), 'dbt_project.yml')
self.cli_vars = dbt.utils.parse_cli_vars(
getattr(self.args, 'vars', '{}')
)
# set by _load_*
self.profile = None
self.profile_fail_details = ''
self.raw_profile_data = None
self.profile_name = None
self.project = None
self.project_fail_details = ''
self.messages = []
def get_fqn(cls, node, package_project_config, extra=[]):
parts = dbt.utils.split_path(node.path)
name, _ = os.path.splitext(parts[-1])
fqn = ([package_project_config.project_name] +
parts[:-1] +
extra +
[name])
return fqn
context = {'doc': dbt.context.parser.docs(source, refs.docrefs)}
description = table.get('description', '')
source_description = source.get('description', '')
get_rendered(description, context)
get_rendered(source_description, context)
freshness = dbt.utils.deep_merge(source.get('freshness', {}),
table.get('freshness', {}))
loaded_at_field = table.get('loaded_at_field',
source.get('loaded_at_field'))
# use 'or {}' to allow quoting: null
source_quoting = source.get('quoting') or {}
table_quoting = table.get('quoting') or {}
quoting = dbt.utils.deep_merge(source_quoting, table_quoting)
default_database = self.root_project_config.credentials.database
return ParsedSourceDefinition(
package_name=package_name,
database=source.get('database', default_database),
schema=source.get('schema', source.name),
identifier=table.get('identifier', table.name),
root_path=root_dir,
path=path,
original_file_path=path,
columns=refs.column_info,
unique_id=unique_id,
name=table.name,
description=description,
source_name=source.name,
source_description=source_description,
while parser.stream.skip_if('comma'):
target = parser.parse_assign_target(name_only=True)
if target.name == 'default':
pass
elif target.name == 'adapter':
parser.stream.expect('assign')
value = parser.parse_expression()
adapter_name = value.value
else:
dbt.exceptions.invalid_materialization_argument(
materialization_name, target.name)
node.name = dbt.utils.get_materialization_macro_name(
materialization_name, adapter_name)
node.body = parser.parse_statements(('name:endmaterialization',),
drop_needle=True)
return node
root_path=root_path,
)
try:
template = dbt.clients.jinja.get_template(
macro_file_contents, context, node=base_node)
except dbt.exceptions.CompilationException as e:
e.node = base_node
raise e
for key, item in template.module.__dict__.items():
if type(item) != jinja2.runtime.Macro:
continue
node_type = None
if key.startswith(dbt.utils.MACRO_PREFIX):
node_type = NodeType.Macro
name = key.replace(dbt.utils.MACRO_PREFIX, '')
elif key.startswith(dbt.utils.OPERATION_PREFIX):
node_type = NodeType.Operation
name = key.replace(dbt.utils.OPERATION_PREFIX, '')
if node_type != resource_type:
continue
unique_id = cls.get_path(resource_type, package_name, name)
merged = dbt.utils.deep_merge(
base_node.serialize(),
{
'name': name,
config.
:param args argparse.Namespace: The arguments as parsed from the cli.
:param cli_vars dict: The command-line variables passed as arguments,
as a dict.
:param project_profile_name Optional[str]: The profile name, if
specified in a project.
:raises DbtProjectError: If there is no profile name specified in the
project or the command line arguments, or if the specified profile
is not found
:raises DbtProfileError: If the profile is invalid or missing, or the
target could not be found.
:returns Profile: The new Profile object.
"""
if cli_vars is None:
cli_vars = dbt.utils.parse_cli_vars(getattr(args, 'vars', '{}'))
threads_override = getattr(args, 'threads', None)
# TODO(jeb): is it even possible for this to not be set?
profiles_dir = getattr(args, 'profiles_dir', PROFILES_DIR)
target_override = getattr(args, 'target', None)
raw_profiles = read_profile(profiles_dir)
profile_name = cls._pick_profile_name(getattr(args, 'profile', None),
project_profile_name)
return cls.from_raw_profiles(
raw_profiles=raw_profiles,
profile_name=profile_name,
cli_vars=cli_vars,
target_override=target_override,
threads_override=threads_override
)