Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
expected_json_results_from_file(
'cds',
'fixtures',
'cds_single_expected.json',
),
get_configuration_single(),
'CDS_single',
),
],
ids=[
'smoke',
'smoke_single',
]
)
def test_cds(
expected_results,
config,
expected_json_results_from_file(
'wsp',
'fixtures',
'wsp_smoke_records.json',
),
get_local_settings(),
),
],
ids=[
'ftp',
'local',
]
)
def test_wsp_ftp_crawl_twice(expected_results, settings, cleanup):
crawler = get_crawler_instance(
settings.get('CRAWLER_HOST_URL'),
)
expected_json_results_from_file(
'pos',
'fixtures',
'pos_conference_proceedings_records.json',
),
get_configuration(),
),
],
ids=[
'smoke',
]
)
def test_pos_conference_paper_record_and_proceedings_record(
expected_results,
config,
):
crawler = get_crawler_instance(config['CRAWLER_HOST_URL'])
def get_expected_fixture(response_file_name):
expected_record = expected_json_results_from_file(
'responses/desy',
response_file_name,
test_suite='unit',
)
return expected_record
expected_json_results_from_file(
'desy',
'fixtures',
'desy_records_ftp_expected.json',
),
get_ftp_settings(),
),
],
ids=[
'ftp package crawl twice',
]
)
def test_desy_crawl_twice(expected_results, settings, cleanup):
crawler = get_crawler_instance(
settings.get('CRAWLER_HOST_URL')
)
expected_json_results_from_file(
'arxiv',
'fixtures',
'arxiv_expected_single.json',
),
get_configuration_single(),
'arXiv_single',
),
],
ids=[
'smoke',
'smoke_single',
]
)
def test_arxiv(
expected_results,
config,
expected_json_results_from_file(
'arxiv',
'fixtures',
'arxiv_expected.json',
),
get_configuration(),
'arXiv',
),
(
expected_json_results_from_file(
'arxiv',
'fixtures',
'arxiv_expected_single.json',
),
get_configuration_single(),
'arXiv_single',
),
expected_json_results_from_file(
'desy',
'fixtures',
'desy_records_local_expected.json',
),
get_local_settings(),
),
],
ids=[
'ftp package',
'local package',
]
)
def test_desy(
expected_results,
settings,