Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# handler for CTRL+C
signal.signal(signal.SIGINT, double_ctrl_c_handler)
try:
while tor.core.is_running:
try:
func(config)
except APIException as e:
if e.error_type == 'RATELIMIT':
log.warning(
'Ratelimit - artificially limited by Reddit. Sleeping'
' for requested time!'
)
handle_rate_limit(e)
except (RequestException, ServerError, Forbidden) as e:
log.warning(f'{e} - Issue communicating with Reddit. Sleeping for 60s!')
time.sleep(60)
log.info('User triggered shutdown. Shutting down.')
sys.exit(0)
except Exception as e:
log.error(e)
sys.exit(1)
#will only ever apply to options class
try:
if hasattr(arg, 'init_time'):
if check_time(arg.init_time, arg.timer):
if arg.log:
arg.db.update_log_entry(arg, 'Timer')
sys.exit()
except NotFound:
print('Encountered NotFound error when extracting attributes. This will be handled later.')
continue
return f(*args, **kwargs)
except(InternalError, ProgrammingError) as e:
print(sys.exc_info())
raise e
except(RequestException, ServerError):
print(sys.exc_info())
print('sleeping...')
time.sleep(10)
configure_logging(config, log_name='archiver.log')
tor = configure_tor(r, config)
initialize(tor, config)
logging.info('Initialization complete.')
archive = r.subreddit('ToR_Archive')
try:
while True:
try:
run(tor, config, archive)
time.sleep(300) # 5 minutes
except (
prawcore.exceptions.RequestException,
prawcore.exceptions.ServerError,
prawcore.exceptions.Forbidden
) as e:
logging.warning(
'{} - Issue communicating with Reddit. Sleeping for 60s!'
''.format(e)
)
time.sleep(60)
except KeyboardInterrupt:
logging.info('Received keyboard interrupt! Shutting down!')
sys.exit(0)
except Exception as e:
explode_gracefully('ToR_archivist', e, tor)
def process_error(message, exception, traceback):
is_transient = isinstance(exception, prawcore.exceptions.ServerError)
log.warning(f"{message}: {exception}")
if is_transient:
log.info(traceback)
counters.errors.labels(type='api').inc()
else:
log.warning(traceback)
counters.errors.labels(type='other').inc()
return is_transient
async def _start_service_feed(self):
while not self.should_stop and self.connect_attempts < self.MAX_CONNECTION_ATTEMPTS:
try:
await self._start_listener()
except RequestException:
# probably a connexion loss, try again
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except InvalidToken as e:
# expired, try again
self.logger.exception(e, True, f"Error when receiving Reddit feed: '{e}'")
self.logger.info(f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds.")
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except ServerError as e:
# server error, try again
self.logger.exception(e, True, "Error when receiving Reddit feed: '{e}'")
self.logger.info(f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds.")
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except OAuthException as e:
self.logger.exception(e, True, f"Error when receiving Reddit feed: '{e}' this may mean that reddit "
f"login info in config.json are wrong")
self.keep_running = False
except ResponseException as e:
message_complement = "this may mean that reddit login info in config.json are invalid." \
if not self.credentials_ok else \
f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds."
self.logger.exception(e, True,
f"Error when receiving Reddit feed: '{e}' this may mean {message_complement}")
if not self.credentials_ok:
self.connect_attempts += 1
# Perform Secondary Objectives (check inbox)
chkinbox()
time.sleep(5)
reflairing = reflair()
if reflairing:
print('Reflairing complete.\n')
else:
print('Restarting reflairing session...\n')
# Exception list for when Reddit inevitably screws up
except praw.exceptions.APIException:
print('\nAn API exception happened.\nTaking a coffee break.\n')
time.sleep(30)
except prawcore.exceptions.ServerError:
print('\nReddit\'s famous 503 error occurred.\nTaking a coffee break.\n')
time.sleep(180)
except prawcore.exceptions.InvalidToken:
print('\n401 error: Token needs refreshing.\nTaking a coffee break.\n')
time.sleep(30)
# Probably another goddamn Snoosletter that the bot can't reply to.
except prawcore.exceptions.Forbidden:
print(' Unable to respond. Marking as read.\n')
for item in r.inbox.unread(limit=100):
if item in r.inbox.messages(limit=100):
item.mark_read()
except (KeyboardInterrupt, SystemExit):
raise
except:
print('\nException happened (OC-Bot).\nTaking a coffee break.\n')
time.sleep(30)
return suggestion.url
except StopIteration:
results = subreddits.search(country_name, sort='top', time_filter='week')
except prawcore.exceptions.ServerError:
log.debug("503 error, redirecting to Global Voices")
return "http://globalvoicesonline.org"
try:
suggestion = results.next()
while (suggestion.link_flair_text == 'Unconfirmed'):
suggestion = results.next()
log.debug("Article found: " + suggestion.title)
return suggestion.url
except StopIteration:
results = subreddits.search(country_name, sort='new')
except prawcore.exceptions.ServerError:
log.debug("503 error, redirecting to Global Voices")
return "http://globalvoicesonline.org"
try:
suggestion = results.next()
while (suggestion.link_flair_text == 'Unconfirmed'):
suggestion = results.next()
log.debug("Article found: " + suggestion.title)
return suggestion.url
except StopIteration:
log.debug("No article found, redirecting to Global Voices")
return "http://globalvoicesonline.org"
except prawcore.exceptions.ServerError:
log.debug("503 error, redirecting to Global Voices")
return "http://globalvoicesonline.org"
RETRY_EXCEPTIONS = (ChunkedEncodingError, ConnectionError, ReadTimeout)
RETRY_STATUSES = {
520,
522,
codes["bad_gateway"],
codes["gateway_timeout"],
codes["internal_server_error"],
codes["service_unavailable"],
}
STATUS_EXCEPTIONS = {
codes["bad_gateway"]: ServerError,
codes["bad_request"]: BadRequest,
codes["conflict"]: Conflict,
codes["found"]: Redirect,
codes["forbidden"]: authorization_error_class,
codes["gateway_timeout"]: ServerError,
codes["internal_server_error"]: ServerError,
codes["media_type"]: SpecialError,
codes["not_found"]: NotFound,
codes["request_entity_too_large"]: TooLarge,
codes["service_unavailable"]: ServerError,
codes["unauthorized"]: authorization_error_class,
codes["unavailable_for_legal_reasons"]: UnavailableForLegalReasons,
# Cloudflare status (not named in requests)
520: ServerError,
522: ServerError,
}
SUCCESS_STATUSES = {codes["created"], codes["ok"]}
@staticmethod
def _log_request(data, method, params, url):
log.debug("Fetching: {} {}".format(method, url))
def _start_dispatcher(self):
while self.keep_running and self.connect_attempts < self.MAX_CONNECTION_ATTEMPTS:
try:
self._start_listener()
except RequestException:
# probably a connexion loss, try again
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except InvalidToken as e:
# expired, try again
self.logger.error(f"Error when receiving Reddit feed: '{e}'")
self.logger.exception(e)
self.logger.info(f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds.")
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except ServerError as e:
# server error, try again
self.logger.error("Error when receiving Reddit feed: '{e}'")
self.logger.exception(e)
self.logger.info(f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds.")
time.sleep(self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC)
except OAuthException as e:
self.logger.error(f"Error when receiving Reddit feed: '{e}' this may mean that reddit login info "
f"in config.json are wrong")
self.logger.exception(e)
self.keep_running = False
except ResponseException as e:
message_complement = "this may mean that reddit login info in config.json are invalid." \
if not self.credentials_ok else \
f"Try to continue after {self._SLEEPING_TIME_BEFORE_RECONNECT_ATTEMPT_SEC} seconds."
self.logger.error(f"Error when receiving Reddit feed: '{e}' this may mean {message_complement}")
self.logger.exception(e)
)
tor_post = config.r.submission(id=clean_id(tor_post_id))
thing_to_reply_to = tor_post.reply(_(base_comment))
for chunk in chunks(result, 9000):
# end goal: if something is over 9000 characters long, we
# should post a top level comment, then keep replying to
# the comments we make until we run out of chunks.
thing_to_reply_to = thing_to_reply_to.reply(_(chunk))
config.redis.delete(new_post)
except (
prawcore.exceptions.RequestException,
prawcore.exceptions.ServerError
) as e:
logging.warning(
'{} - Issue communicating with Reddit. Sleeping for 60s!'
''.format(e)
)
time.sleep(60)