Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_manager_instance(loop, dbi, tmpdir):
files_path = str(tmpdir.mkdir("files"))
watch_path = str(tmpdir.mkdir("watch"))
executor = concurrent.futures.ThreadPoolExecutor()
scheduler = loop.run_until_complete(aiojobs.create_scheduler())
manager = virtool.files.manager.Manager(executor, dbi, files_path, watch_path)
loop.run_until_complete(scheduler.spawn(manager.run()))
yield manager
loop.run_until_complete(scheduler.close())
async with sess.get(tag_list_url,
**rqst_args) as resp:
data = json.loads(await resp.read())
if 'tags' in data:
# sometimes there are dangling image names in the hub.
tags.extend(data['tags'])
tag_list_url = None
next_page_link = resp.links.get('next')
if next_page_link:
next_page_url = next_page_link['url']
tag_list_url = (
registry_url
.with_path(next_page_url.path)
.with_query(next_page_url.query)
)
scheduler = await aiojobs.create_scheduler(limit=4)
try:
jobs = await asyncio.gather(*[
scheduler.spawn(_scan_tag(sess, rqst_args, image, tag))
for tag in tags])
await asyncio.gather(*[job.wait() for job in jobs])
finally:
await scheduler.close()
async def _scan_image(sess, image):
rqst_args = await registry_login(
sess, registry_url,
credentials, f'repository:{image}:pull')
tags = []
rqst_args['headers'].update(**base_hdrs)
async with sess.get(registry_url / f'v2/{image}/tags/list',
**rqst_args) as resp:
data = json.loads(await resp.read())
if 'tags' in data:
# sometimes there are dangling image names in the hub.
tags.extend(data['tags'])
scheduler = await aiojobs.create_scheduler(limit=8)
try:
jobs = await asyncio.gather(*[
scheduler.spawn(_scan_tag(sess, rqst_args, image, tag))
for tag in tags])
await asyncio.gather(*[job.wait() for job in jobs])
finally:
await scheduler.close()
def __init__(
self,
path: str,
*,
name: str = None,
errors: Sequence[Type[BaseError]] = None,
dependencies: Sequence[Depends] = None,
common_dependencies: Sequence[Depends] = None,
scheduler_factory: Callable[..., Awaitable[aiojobs.Scheduler]] = aiojobs.create_scheduler,
scheduler_kwargs: dict = None,
**kwargs,
) -> None:
super().__init__(redirect_slashes=False)
if errors is None:
errors = self.default_errors
self.scheduler_factory = scheduler_factory
self.scheduler_kwargs = scheduler_kwargs
self.scheduler = None
self.callee_module = inspect.getmodule(inspect.stack()[1][0]).__name__
self.entrypoint_route = self.entrypoint_route_class(
self,
path,
name=name,
errors=errors,
dependencies=dependencies,
async def resume(self):
if self._scheduler is None:
self._scheduler = await aiojobs.create_scheduler()
async def setupOrmDb(self, dbpath):
self.scheduler = await aiojobs.create_scheduler(
close_timeout=1.0,
limit=150,
pending_limit=1000
)
# Old database, just for Atom feeds right now
self.sqliteDb = SqliteDatabase(self._sqliteDbLocation)
ensure(self.sqliteDb.setup())
self.modelAtomFeeds = AtomFeedsModel(self.sqliteDb.feeds, parent=self)
self.urlHistory = history.URLHistory(
self.sqliteDb,
enabled=self.settingsMgr.urlHistoryEnabled,
parent=self
)
async def checker():
scheduler = await aiojobs.create_scheduler(limit=CONCURRENT_CHECKS)
while True:
state.myloop()
try:
params = settings.proxy_checker
headers = {"Authorization": params.auth}
async with aiohttp.ClientSession(headers=headers) as s:
async with s.get(params.list, timeout=5) as r:
for proxy in await r.json():
p = Prodict.from_dict(proxy)
await scheduler.spawn(chech(p, params))
except Exception:
logger.exception('err')
jobs = scheduler._jobs
while True:
await asyncio.sleep(0.1)
if len(jobs) == 0: