Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
index_chunk(cls, id_list, reraise=True)
except Exception:
rec.text = (u'%s: Errored out %s %s' % (
rec.text, sys.exc_type, sys.exc_value))
# Some exceptions aren't pickleable and we need this to throw
# things that are pickleable.
raise IndexingTaskError()
finally:
unpin_this_thread()
rec.endtime = datetime.datetime.now()
rec.save()
try:
client = redis_client('default')
client.decr(OUTSTANDING_INDEX_CHUNKS, 1)
except RedisError:
# If Redis isn't running, then we just log that the task
# was completed.
log.info('Index task %s completed.', task_name)
if delete_index_first:
# Coming from the delete form, so we reindex all models.
mapping_types_to_index = None
else:
# Coming from the reindex form, so we reindex whatever we're
# told.
mapping_types_to_index = [name.replace('check_', '')
for name in request.POST.keys()
if name.startswith('check_')]
# TODO: If this gets fux0rd, then it's possible this could be
# non-zero and we really want to just ignore it. Need the ability
# to ignore it.
try:
client = redis_client('default')
val = client.get(OUTSTANDING_INDEX_CHUNKS)
if val is not None and int(val) > 0:
raise ReindexError('There are %s outstanding chunks.' % val)
# We don't know how many chunks we're building, but we do want
# to make sure another reindex request doesn't slide in here
# and kick off a bunch of chunks.
#
# There is a race condition here.
client.set(OUTSTANDING_INDEX_CHUNKS, 1)
except RedisError:
log.warning('Redis not running. Can not check if there are '
'outstanding tasks.')
batch_id = create_batch_id()
# chunkifies by class then by chunk size.
chunks = []
for cls, indexable in get_indexable(mapping_types=mapping_types_to_index):
chunks.extend(
(cls, chunk) for chunk in chunked(indexable, CHUNK_SIZE))
if delete_index_first:
# The previous lines do a lot of work and take some time to
# execute. So we wait until here to wipe and rebuild the
# index. That reduces the time that there is no index by a little.
recreate_index()
chunks_count = len(chunks)
try:
client = redis_client('default')
client.set(OUTSTANDING_INDEX_CHUNKS, chunks_count)
except RedisError:
log.warning('Redis not running. Can\'t denote outstanding tasks.')
for chunk in chunks:
index_chunk_task.delay(write_index, batch_id, chunk)
return HttpResponseRedirect(request.path)
def _document_lock_steal(document_id, user_name, expire_time=60 * 15):
"""Lock a document for a user.
Note that this does not check if the page is already locked, and simply
sets the lock on the page.
"""
try:
redis = redis_client(name='default')
key = _document_lock_key.format(id=document_id)
it_worked = redis.set(key, user_name)
redis.expire(key, expire_time)
return it_worked
except RedisError as e:
statsd.incr('redis.errror')
log.error('Redis error: %s' % e)
return False
def _document_lock_check(document_id):
"""Check for a lock on a document.
Returns the username of the user that has the page locked, or ``None`` if
no user has a lock.
"""
try:
redis = redis_client(name='default')
key = _document_lock_key.format(id=document_id)
return redis.get(key)
except RedisError as e:
statsd.incr('redis.errror')
log.error('Redis error: %s' % e)
return None
(ERROR, 'Exception while looking at ElasticSearch: %s' % str(exc)))
status['ElasticSearch'] = es_results
# Check Celery.
# start = time.time()
# pong = celery.task.ping()
# rabbit_results = r = {'duration': time.time() - start}
# status_summary['rabbit'] = pong == 'pong' and r['duration'] < 1
# Check Redis.
redis_results = []
if hasattr(settings, 'REDIS_BACKENDS'):
for backend in settings.REDIS_BACKENDS:
try:
redis_client(backend)
redis_results.append((INFO, '%s: Pass!' % backend))
except RedisError:
redis_results.append((ERROR, '%s: Fail!' % backend))
status['Redis'] = redis_results
status_code = 200
status_summary = {}
for component, output in status.items():
if ERROR in [item[0] for item in output]:
status_code = 500
status_summary[component] = False
else:
status_summary[component] = True
return render(request, 'services/monitor.html', {