Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_get_latest_commit():
sha = get_latest_commit("https://github.com/apache/spark")
assert len(sha) == 40
with pytest.raises(UsageError):
get_latest_commit("https://google.com")
with pytest.raises(Exception):
get_latest_commit("https://github.com/apache/nonexistent-repo")
def test_get_latest_commit():
sha = get_latest_commit("https://github.com/apache/spark")
assert len(sha) == 40
with pytest.raises(UsageError):
get_latest_commit("https://google.com")
with pytest.raises(Exception):
get_latest_commit("https://github.com/apache/nonexistent-repo")
def test_get_latest_commit():
sha = get_latest_commit("https://github.com/apache/spark")
assert len(sha) == 40
with pytest.raises(UsageError):
get_latest_commit("https://google.com")
with pytest.raises(Exception):
get_latest_commit("https://github.com/apache/nonexistent-repo")
def test_validate_invalid_download_source():
with pytest.raises(Error):
validate_download_source("https://www.apache.org/dyn/closer.lua?action=download&filename=hadoop/common/hadoop-2.8.3/hadoop-2.8.3.tar.gz")
raises=Error,
)
def test_validate_valid_download_source():
validate_download_source("https://www.apache.org/dyn/closer.lua?action=download&filename=hadoop/common/hadoop-2.8.5/hadoop-2.8.5.tar.gz")
validate_download_source("https://www.apache.org/dyn/closer.lua?action=download&filename=spark/spark-2.4.4/spark-2.4.4-bin-hadoop2.7.tgz")
def test_operations_against_stopped_cluster(stopped_cluster):
p = subprocess.run(
['flintrock', 'run-command', stopped_cluster, 'ls'],
stderr=subprocess.PIPE)
expected_error_message = str(
ClusterInvalidState(
attempted_command='run-command',
state='stopped'))
assert p.returncode == 1
assert p.stderr.decode('utf-8').strip() == expected_error_message
p = subprocess.run(
['flintrock', 'copy-file', stopped_cluster, __file__, '/remote/path'],
stderr=subprocess.PIPE)
expected_error_message = str(
ClusterInvalidState(
attempted_command='copy-file',
state='stopped'))
assert p.returncode == 1
assert p.stderr.decode('utf-8').strip() == expected_error_message
option_requires(
option='--some-option',
requires_all=['--set_option'],
scope=locals()
)
option_requires(
option='--some-option',
requires_any=[
'--set_option',
'--unset-option'],
scope=locals()
)
with pytest.raises(UsageError):
option_requires(
option='--some-option',
requires_all=[
'--set-option',
'--unset-option'],
scope=locals()
)
with pytest.raises(UsageError):
option_requires(
option='--some-option',
requires_any=[
'--unset-option'],
scope=locals()
)
conditional_value='magic',
requires_any=[
'--unset-option'],
scope=locals()
)
some_option = ''
option_requires(
option='--some-option',
conditional_value='',
requires_any=[
'--unset-option'],
scope=locals()
)
with pytest.raises(UsageError):
some_option = 'magic'
option_requires(
option='--some-option',
conditional_value='magic',
requires_any=[
'--unset-option'],
scope=locals()
)
def test_templates(dummy_cluster, spark_version):
template_dir = os.path.join(FLINTROCK_ROOT_DIR, 'flintrock', 'templates')
for (dirpath, dirnames, filenames) in os.walk(template_dir):
if filenames:
for filename in filenames:
template_path = os.path.join(dirpath, filename)
mapping = generate_template_mapping(
cluster=dummy_cluster,
hadoop_version='',
spark_version=spark_version,
spark_executor_instances=0,
)
get_formatted_template(
path=template_path,
mapping=mapping,
)
def test_option_name_to_variable_name_conversions():
test_cases = [
('--ec2-user', 'ec2_user'),
('--provider', 'provider'),
('--spark-git-commit', 'spark_git_commit')
]
for option_name, variable_name in test_cases:
assert option_name_to_variable_name(option_name) == variable_name
assert variable_name_to_option_name(variable_name) == option_name
assert option_name == variable_name_to_option_name(
option_name_to_variable_name(option_name))
assert variable_name == option_name_to_variable_name(
variable_name_to_option_name(variable_name))