Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def download_error(er):
if is_http(er, code=429):
# retry doesn't work with 429 error
sleep(5)
raise er
crawler.handle_error(er)
sleep(5)
def download_error(er):
if is_http(er, code=429):
# retry doesn't work with 429 error
sleep(5)
raise er
else:
crawler.handle_error(er)
sleep(5)
def errorhandler(err, crawler):
if is_http(err, 404) and re.search("weibo\.com/[^/]+/[^/]+$", err.request.url):
raise SkipEpisodeError
def errorhandler(err, crawler):
if is_http(err, 410) or is_http(err, 404):
if (re.match(r"https://(live|farm\d+)\.staticflickr\.com/\d+/\d+_[a-z0-9]+_[a-z0-9]{1,2}\.\w+", err.response.url) and
crawler.ep.image):
# a specific size is deleted?
crawler.ep.image = None
# clear html to refetch the page
crawler.html = None
return
if re.match(r"https://www\.flickr\.com/photos/[^/]+/\d+/", err.response.url):
raise SkipEpisodeError
def errorhandler(err, crawler):
if is_http(err, 404):
url = None
try:
url = err.response.url
except AttributeError:
pass
if url and is_ep_url(url):
# deleted by author?
# https://www.pixiv.net/member_illust.php?mode=medium&illust_id=68059323
print("Skip {}: {}".format(err.response.url, 404))
raise SkipEpisodeError