Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_curl_plugin_fetch_evil_archive(self):
# There are several evil archives checked in under tests/resources. The
# others are checked directly as part of test_curl_plugin.py.
fields = {
'url': (shared.test_resources / '.tar').as_uri(),
'unpack': 'tar',
}
with self.assertRaises(plugin.PluginRuntimeError):
self.do_plugin_test('curl', fields, {})
def setUp(self):
self.content = {"some": "stuff", "foo/bar": "baz"}
self.content_dir = shared.create_dir(self.content)
self.cache_root = shared.create_dir()
self.plugin_context = plugin.PluginContext(
cwd='.',
plugin_cache_root=self.cache_root,
parallelism_semaphore=asyncio.BoundedSemaphore(
plugin.DEFAULT_PARALLEL_FETCH_LIMIT),
plugin_cache_locks=defaultdict(asyncio.Lock),
tmp_root=shared.create_dir())
plugin.debug_assert_clean_parallel_count()
def setUp(self):
self.content = {"some": "stuff", "foo/bar": "baz"}
self.content_dir = shared.create_dir(self.content)
self.cache_root = shared.create_dir()
self.plugin_context = plugin.PluginContext(
cwd='.',
plugin_cache_root=self.cache_root,
parallelism_semaphore=asyncio.BoundedSemaphore(
plugin.DEFAULT_PARALLEL_FETCH_LIMIT),
plugin_cache_locks=defaultdict(asyncio.Lock),
tmp_root=shared.create_dir())
plugin.debug_assert_clean_parallel_count()
def assert_parallel(n):
# The plugin module keep a global counter of all the jobs that run in
# parallel, so that we can write these tests.
if plugin.DEBUG_PARALLEL_MAX != n:
raise AssertionError('Expected {} parallel {}. Counted {}.'.format(
n, 'job' if n == 1 else 'jobs', plugin.DEBUG_PARALLEL_MAX))
def assert_parallel(n):
# The plugin module keep a global counter of all the jobs that run in
# parallel, so that we can write these tests.
if plugin.DEBUG_PARALLEL_MAX != n:
raise AssertionError('Expected {} parallel {}. Counted {}.'.format(
n, 'job' if n == 1 else 'jobs', plugin.DEBUG_PARALLEL_MAX))
def test_plugin_get_reup_fields(context, type, fields):
handle = TestDisplayHandle()
return run_task(
plugin.plugin_get_reup_fields(context, type, fields, handle))
def _get_parallel_fetch_limit(args):
jobs = args.get('--jobs')
if jobs is None:
return plugin.DEFAULT_PARALLEL_FETCH_LIMIT
try:
parallel = int(jobs)
if parallel <= 0:
raise PrintableError('Argument to --jobs must be 1 or more.')
return parallel
except Exception:
raise PrintableError('Argument to --jobs must be a number.')
def get_plugin_context(self):
return plugin.PluginContext(
# Plugin cwd is always the directory containing peru.yaml, even if
# the sync_dir has been explicitly set elsewhere. That's because
# relative paths in peru.yaml should respect the location of that
# file.
cwd=str(Path(self.peru_file).parent),
plugin_cache_root=self.cache.plugins_root,
parallelism_semaphore=self.fetch_semaphore,
plugin_cache_locks=self.plugin_cache_locks,
tmp_root=self._tmp_root)