Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_templates(dummy_cluster, spark_version):
template_dir = os.path.join(FLINTROCK_ROOT_DIR, 'flintrock', 'templates')
for (dirpath, dirnames, filenames) in os.walk(template_dir):
if filenames:
for filename in filenames:
template_path = os.path.join(dirpath, filename)
mapping = generate_template_mapping(
cluster=dummy_cluster,
hadoop_version='',
spark_version=spark_version,
spark_executor_instances=0,
)
get_formatted_template(
path=template_path,
mapping=mapping,
)
ssh_check_output(
client=ssh_client,
command="mkdir -p hadoop/conf",
)
for template_path in template_paths:
ssh_check_output(
client=ssh_client,
command="""
echo {f} > {p}
""".format(
f=shlex.quote(
get_formatted_template(
path=os.path.join(THIS_DIR, "templates", template_path),
mapping=generate_template_mapping(
cluster=cluster,
hadoop_version=self.version,
# Hadoop doesn't need to know what
# Spark version we're using.
spark_version='',
spark_executor_instances=0,
))),
p=shlex.quote(template_path)))
ssh_check_output(
client=ssh_client,
command="mkdir -p spark/conf",
)
for template_path in template_paths:
ssh_check_output(
client=ssh_client,
command="""
echo {f} > {p}
""".format(
f=shlex.quote(
get_formatted_template(
path=os.path.join(THIS_DIR, "templates", template_path),
mapping=generate_template_mapping(
cluster=cluster,
spark_executor_instances=self.spark_executor_instances,
hadoop_version=self.hadoop_version,
spark_version=self.version or self.git_commit,
))),
p=shlex.quote(template_path)))