Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# Get postIDs
PostIDs = getPostIDs()
# Check if already processed
if submission.id in PostIDs:
continue
# Add current postID to data frame and save back to csv
updatePostIDs(submission.id)
# Save post to appropriate csv file
SavePosts(str(submission.subreddit), [submission.id, submission.title, submission.created_utc, submission.score, submission.num_comments, submission.author])
PostCounter += 1
Logger(submission.subreddit,submission.created_utc,CommentCounter,PostCounter)
except KeyboardInterrupt:
log('Termination received. Goodbye!')
return False
except PrawcoreException:
log(PrawcoreException, newline=True)
def runCleaning(reddit):
running = True
while running:
try:
#log.info("Checking bot's comments")
redditor = reddit.redditor(c.BOT_REDDIT_USERNAME)
for comment in redditor.comments.new(limit=50):
if comment.score <= c.SCORE_DELETE_THRESH:
comment.delete()
log.info("Comment to {} deleted (score: {})".format(comment.parent_id, comment.score))
time.sleep(c.SCORE_CHECK_TIME)
except (PrawcoreException, APIException, ClientException) as e:
log.exception("Error parsing bot's comments: {}".format(str(e)))
log.info("Waiting {} seconds".format(str(c.TIMEOUT)))
time.sleep(c.TIMEOUT)
"""Initialize a RequestException instance.
:param original_exception: The original exception that occurred.
:param request_args: The arguments to the request function.
:param request_kwargs: The keyword arguments to the request function.
"""
self.original_exception = original_exception
self.request_args = request_args
self.request_kwargs = request_kwargs
super(RequestException, self).__init__(
"error with request {}".format(original_exception)
)
class ResponseException(PrawcoreException):
"""Indicate that there was an error with the completed HTTP request."""
def __init__(self, response):
"""Initialize a ResponseException instance.
:param response: A requests.response instance.
"""
self.response = response
super(ResponseException, self).__init__(
"received {} HTTP response".format(response.status_code)
)
class OAuthException(PrawcoreException):
"""Indicate that there was an OAuth2 related error with the request."""
PostIDs = getPostIDs()
# Check if already processed
if submission.id in PostIDs:
continue
# Add current postID to data frame and save back to csv
updatePostIDs(submission.id)
# Save post to appropriate csv file
SavePosts(str(submission.subreddit), [submission.id, submission.title, submission.created_utc, submission.score, submission.num_comments, submission.author])
PostCounter += 1
Logger(submission.subreddit,submission.created_utc,CommentCounter,PostCounter)
except KeyboardInterrupt:
log('Termination received. Goodbye!')
return False
except PrawcoreException:
log(PrawcoreException, newline=True)
class ResponseException(PrawcoreException):
"""Indicate that there was an error with the completed HTTP request."""
def __init__(self, response):
"""Initialize a ResponseException instance.
:param response: A requests.response instance.
"""
self.response = response
super(ResponseException, self).__init__(
"received {} HTTP response".format(response.status_code)
)
class OAuthException(PrawcoreException):
"""Indicate that there was an OAuth2 related error with the request."""
def __init__(self, response, error, description):
"""Intialize a OAuthException instance.
:param response: A requests.response instance.
:param error: The error type returned by reddit.
:param description: A description of the error when provided.
"""
self.error = error
self.description = description
self.response = response
message = "{} error processing request".format(error)
if description:
message += " ({})".format(description)
DB_FILE = os.environ.get("DATABASE", "snapshill.sqlite3")
LEN_MAX = 35
REDDIT_API_WAIT = 2
WARN_TIME = 300 # warn after spending 5 minutes on a post
REDDIT_PATTERN = re.compile(
"https?://(([A-z]{2})(-[A-z]{2})" "?|beta|i|m|pay|ssl|www|old|new|alpha)\.?reddit\.com"
)
SUBREDDIT_OR_USER = re.compile("/(u|user|r)/[^\/]+/?$")
# we have to do some manual ratelimiting because we are tunnelling through
# some other websites.
RECOVERABLE_EXC = (
APIException,
ClientException,
PRAWException,
PrawcoreException,
ConnectionError,
)
loglevel = logging.DEBUG if os.environ.get("DEBUG") == "true" else logging.INFO
TESTING = os.environ.get("TEST") == "true"
logging.basicConfig(level=loglevel, format="[%(asctime)s] [%(levelname)s] %(message)s")
log = logging.getLogger("snapshill")
logging.getLogger("requests").setLevel(loglevel)
warnings.simplefilter("ignore") # Ignore ResourceWarnings (because screw them)
def get_footer():
return "\n\n*I am just a simple bot, __not__ a moderator of this subreddit* | [*bot subreddit*]({info}) | [*contact the maintainers*]({contact})".format(
return
reply_r.append(tmp_s)
reply_r = ' '.join(reply_r)
reply = unidecode(reply_r)
if com.subreddit.display_name == 'EVEX':
target = target + random.choice(['-senpai','-kun','-chan','-san','-sama'])
log('%s: (%d) %s (%d) by %s in %s on %s, reply' % (id, index, target, sentence_avg, author, sub, ctime), additional='\n%s\n' % reply)
if (target[:3] != '/r/'):
target = target.replace('_','\_')
try_reply(com,'%s\n\n ~ %s%s' % (reply,target,FOOTER))
#log('%s: Finished' % id)
except prawcore.exceptions.Forbidden as ex:
log("Could not reply to comment by %s in %s: %s" % (author, sub, str(ex)))
except prawcore.exceptions.APIException:
log("Parent comment by %s in %s was deleted" % (author, sub))
except prawcore.exceptions.PrawcoreException as ex:
log("%s: (%d) %s (%d) by %s in %s on %s: could not reply, will retry: %s" % (id, index, target, sentence_avg, author, sub, ctime, str(ex)))
q.put(id)
if item.id in seen_deque['inbox']:
logger.debug('[Inbox] Skip: seen item: t4_{}'.format(item.id))
continue
if item.created_utc < check_time['inbox']:
if item.created_utc < start_time:
logger.debug('[Inbox] Skip: item was submitted before bot started: t4_{}'.format(item.id))
else:
logger.debug('[Inbox] Skip: timestamp was supplanted: t4_{}'.format(item.id))
continue
check_time['inbox'] += control_checkpoint_progression(item.created_utc - check_time['inbox'])
seen_deque['inbox'].append(item.id)
process_inbox_item(item)
except (praw.exceptions.PRAWException, prawcore.exceptions.PrawcoreException) as e:
if isinstance(e, praw.exceptions.APIException):
if e.error_type == 'RATELIMIT':
logger.info('Exception: ratelimit exceeded: {}'.format(e.message))
time.sleep(11*60)
else:
logger.warning('Exception: unhandled PRAW APIException exception:', exc_info=True)
elif isinstance(e, prawcore.exceptions.ResponseException):
logger.info('Exception: ResponseException: {}'.format(e.response))
time.sleep(5)
elif isinstance(e, prawcore.exceptions.RequestException):
logger.info('Exception: RequestException: {}'.format(e.original_exception))
time.sleep(5)
else:
"""Provide exception classes for the prawcore package."""
from urllib.parse import urlparse
class PrawcoreException(Exception):
"""Base exception class for exceptions that occur within this package."""
class InvalidInvocation(PrawcoreException):
"""Indicate that the code to execute cannot be completed."""
class RequestException(PrawcoreException):
"""Indicate that there was an error with the incomplete HTTP request."""
def __init__(self, original_exception, request_args, request_kwargs):
"""Initialize a RequestException instance.
:param original_exception: The original exception that occurred.
:param request_args: The arguments to the request function.
:param request_kwargs: The keyword arguments to the request function.
"""
self.original_exception = original_exception
self.request_args = request_args
# success, reset fails
failCount = 0
except praw.exceptions.APIException as e:
# https://github.com/reddit/reddit/blob/master/r2/r2/lib/errors.py
if 'RATELIMIT' in e.error_type:
reset = self.r.auth.limits.get('reset_timestamp')
if reset:
self.rateSleep = reset - _now() + 5
log.warn('run() rate exceeded, going to sleep %s',
self.rateSleep)
else:
log.exception('run() reddit responded with error: %s', e)
except prawcore.exceptions.PrawcoreException:
# connection errors if bot or reddit is offline
log.exception('run() error in core while redditing')
failCount += 1
if failCount >= self.__failLimit:
# some error/python version/praw version combinations never recover
log.error('run() consecutive fails reached limit, leaving to restart')
self.killed = True
except KeyboardInterrupt:
log.warn('run() interrupt, leaving')
self.killed = True
# sleep before next round/attempt
if not self.killed:
self.__sleep()