Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async def test_timeout_default(self):
self.page.setDefaultNavigationTimeout(1)
with self.assertRaises(TimeoutError):
await self.page.goto(self.url + 'long')
async def test_timeout(self):
with self.assertRaises(TimeoutError):
await self.page.goto(self.url + 'long', timeout=1)
async def test_timeout(self):
with self.assertRaises(TimeoutError) as cm:
await self.page.waitForXPath('//div', timeout=10)
self.assertIn(
'Waiting for XPath "//div" failed: timeout',
cm.exception.args[0],
)
def handle(self, *args, **options):
self.source = DataSource.objects.get(name='cambridge')
while True:
try:
asyncio.get_event_loop().run_until_complete(self.sock_it())
except (
websockets.exceptions.ConnectionClosed,
asyncio.InvalidStateError,
pyppeteer.errors.TimeoutError
) as e:
print(e)
if self._azure_kmsi:
await page.waitForSelector(
'form[action="/kmsi"]', timeout=self._AWAIT_TIMEOUT)
await page.waitForSelector('#idBtn_Back')
await page.click('#idBtn_Back')
page.on('request', _saml_response)
await page.setRequestInterception(True)
wait_time = time.time() + self._MFA_TIMEOUT
while time.time() < wait_time and not self.saml_response:
if await self._querySelector(page, '.has-error'):
raise FormError
if not self.saml_response:
raise TimeoutError
except (TimeoutError, BrowserError, FormError) as e:
print('An error occurred while authenticating, check credentials.')
print(e)
if self._debug:
debugfile = 'aadaerror-{}.png'.format(
datetime.now().strftime("%Y-%m-%dT%H%m%SZ"))
await page.screenshot({'path': debugfile})
print('See screenshot {} for clues.'.format(debugfile))
exit(1)
finally:
await browser.close()
@page.on("request")
async def _(request: p_network_manager.Request) -> None:
if request.resourceType in ["document", "stylesheet", "image", "font"]:
await request.continue_()
else:
await request.abort()
try:
await page.goto(url, {
# Maximum navigation time in milliseconds (* 1000):
"timeout": 5 * 1000,
# Consider navigation to be finished when there are no more than 2 network connections for at least 500 ms:
"waitUntil": "networkidle2",
})
except p_errors.TimeoutError:
await page.close()
raise Error("timeout")
except p_errors.PageError as exc:
await page.close()
raise Error("navigation: {}", exc.args[0]) # e.g. "net::ERR_NAME_NOT_RESOLVED"
return page
async def _timeout_func() -> None:
await asyncio.sleep(self._timeout / 1000)
self._maximumTimer.set_exception(TimeoutError(errorMessage))
async def timer(timeout: Union[int, float]) -> None:
await asyncio.sleep(timeout / 1000)
self._timeoutError = True
self.terminate(TimeoutError(
f'Waiting for {title} failed: timeout {timeout}ms exceeds.'
))
async def fetch(self, url, **kwargs):
try:
max_tries = kwargs.pop('max_tries')
except KeyError:
# default 3
max_tries = 3
# Create a new page to load url.
page = await self.browser.newPage()
try:
logger.info("try to get {url} by browser.".format(url=url))
await page.goto(url, **kwargs)
# break
except TimeoutError:
pass
except PageError:
return emptyBrowserResponse(url)
# the TimeoutError not just occured when we cannot connect it.
# for some reasons, it will also be happen when JavaScript not full load.
# so we can also get the most page content.
url = page.url
for i in range(max_tries):
try:
text = await page.content()
cookies = await page.cookies()
break
except NetworkError:
# if timeout is too small, sometimes it will raise this error.
try: