Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def handle_sentry(self, *args, **options):
conn = get_redis_connection()
with Connection(conn):
queues = map(Queue, options['queues'])
worker = Worker(queues, exception_handlers=[sentry_handler])
worker.work(burst=options['burst'])
from . import (
RedisHash,
get_build_object,
get_pkg_object,
status,
get_repo_object
)
logger = status.logger
doc_util = DockerUtils(status)
doc = doc_util.doc
pkg_cache_obj = PacmanPackageCache()
with Connection(status.db):
repo_queue = Queue('update_repo')
class TransactionMeta(RedisHash):
"""
This is the base class for `Transaction`(s). It simply sets up the attributes
which are stored in redis so they can be properly accessed. This class should
not be used directly.
Args:
See `Transaction` docstring.
Attributes:
See `Transaction` docstring.
"""
def start_worker(queue_name):
print "starting worker '{}'...".format(queue_name)
with Connection(redis_rq_conn):
worker = Worker(Queue(queue_name), exc_handler=failed_job_handler)
worker.work()
self.version_str = version_str
else:
self.version_str = self._pkg_obj.version_str
pkg_link = '<a href="/package/{0}">{0}</a>'.format(self._pkg_obj.pkgname)
tpl = 'Build <a href="/build/{0}">{0}</a> for {1} <strong>{2}</strong> started.'
tlmsg = tpl.format(self.bnum, pkg_link, self.version_str)
get_timeline_object(msg=tlmsg, tl_type=3, ret=False)
self._pkg_obj.builds.append(self.bnum)
status.now_building.append(self.bnum)
with Connection(self.db):
current_job = get_current_job()
current_job.meta['building_num'] = self.bnum
current_job.save()
from webhook import Webhook
from transaction_handler import handle_hook, update_repo_databases
from iso_utility import iso_release_job
from extensions import (
FlaskView,
route,
current_user,
)
logger = status.logger
# Setup rq (background task queue manager)
exc_handler = RQWorkerCustomExceptionHandler(status, logger)
with Connection(db):
transaction_queue = Queue('transactions')
repo_queue = Queue('update_repo')
webhook_queue = Queue('webook')
w1 = Worker([transaction_queue], exception_handlers=[exc_handler.handle_worker_exception])
w2 = Worker([repo_queue])
w3 = Worker([webhook_queue], exception_handlers=[exc_handler.handle_worker_exception])
def try_render_template(*args, **kwargs):
try:
return render_template(*args, **kwargs)
except TemplateNotFound:
abort(500)
def get_paginated(item_list, per_page, page, reverse=True):
def rq_tween(request):
with rq.Connection(registry.settings['rq.redis']):
return handler(request)
return rq_tween
def execute(self, job_id):
with Connection(Redis(host=REDIS_HOST)):
self._worker_job = get_current_job()
self._connect_db()
self.job = self._db.jobs.find_one({'_id': ObjectId(job_id)})
self.app = self._db.apps.find_one({'_id': ObjectId(self.job['app_id'])})
self._init_log_file()
self._db.jobs.update({'_id': self.job['_id']}, {'$set': {
'log_id': self._worker_job.id,
'started_at': datetime.utcnow(),
}})
klass_name = self.job['command'].title()
mod = __import__('commands.' + self.job['command'], fromlist=[klass_name, 'RELATED_APP_FIELDS'])
command = getattr(mod, klass_name)(self)
# Execute command and always mark the job as 'failed' in case of an unexpected exception
try:
if self.job['status'] == 'init':
# from totalimpactwebapp.profile import Profile
# from totalimpactwebapp.product import Product
from rq import Queue
from totalimpact import default_settings
from totalimpact.providers.provider import ProviderFactory, ProviderError, ProviderTimeout
redis_rq_conn = tiredis.from_url(os.getenv("REDIS_URL"), db=tiredis.REDIS_RQ_NUMBER)
if __name__ == '__main__':
parser = optparse.OptionParser("usage: %prog [options]")
parser.add_option('-q', '--queue', dest='queue', type="str",
help='profile or product')
(options, args) = parser.parse_args()
with Connection(redis_rq_conn):
queue_name = options.queue
queues = [queue_name, "default"]
worker = Worker(map(Queue, queues))
worker.work()
def fetch_rss():
logger = logging.getLogger(__name__)
rss_feeds = Rss.objects.all()
with Connection(redis.Redis(**settings.RQ_DATABASE)):
queue = Queue('link_indexing')
for rss_feed in rss_feeds:
feed = feedparser.parse(rss_feed.link, etag=rss_feed.etag)
if feed.bozo:
logger.warning('Problem while parsing feed %s (%s)',
rss_feed.link, feed.bozo_exception)
continue
if not 'status' in feed:
logger.warning('Feed %s has no status in it. Is that an http error ?', rss_feed.link)
return
if feed.status > 399:
logger.warning('Got bad http status while fetching %s', rss_feed.link)
continue
if feed.status == 301:
def get_git_references(projectId):
forceRefresh = bool(int(request.args.get('force')))
project = Project.query.get(projectId)
fullRefs = None
if forceRefresh is False:
fullRefs = redis_conn.get('project:{}:refs'.format(projectId))
if fullRefs is None:
with Connection():
queue = Queue('low', connection=redis_conn)
action = 'worker.get_git_references'
job = queue.enqueue_call(
func=action,
timeout=900,
args=(project.git_address, project.id)
)
while job.result is None:
time.sleep(0.5)
fullRefs = str(job.result)
redis_conn.set('project:{}:refs'.format(projectId), fullRefs)
return jsonify({'gitReferences': json.loads(fullRefs)})