Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
title = 'Instrumental file'
parameter_name = 'archive'
@admin.register(CrashDescription)
class CrashDescriptionAdmin(admin.ModelAdmin):
readonly_fields = ('created', 'modified')
list_display = ('created', 'modified', 'summary')
list_display_links = ('created', 'modified', 'summary')
class CrashDescriptionInline(admin.StackedInline):
model = CrashDescription
@admin.register(Crash)
class CrashAdmin(admin.ModelAdmin):
list_display = ('id', 'created', 'modified', 'archive_field', 'signature', 'appid', 'userid', 'summary_field', 'os', 'build_number', 'channel', 'cpu_architecture_field',)
list_select_related = ['crash_description']
list_display_links = ('id', 'created', 'modified', 'signature', 'appid', 'userid', 'cpu_architecture_field',)
list_filter = (('id', TextInputFilter,), 'created', CrashArchiveFilter, 'os', 'build_number', 'channel')
search_fields = ('appid', 'userid', 'archive',)
form = CrashFrom
readonly_fields = ['sentry_link_field', 'os', 'build_number', 'channel',]
exclude = ('groupid', 'eventid', )
actions = ('regenerate_stacktrace',)
inlines = [CrashDescriptionInline]
def archive_field(self, obj):
return bool(obj.archive)
archive_field.short_description = 'Instrumental file'
def get_prefix(model_name):
model_path_prefix = {
Crash: ('minidump', 'minidump_archive'),
Feedback: ('blackbox', 'system_logs', 'feedback_attach', 'screenshot'),
Symbols: ('symbols',),
Version: ('build',),
SparkleVersion: ('sparkle',)
}
return model_path_prefix[model_name]
full_result = dict(count=0, size=0, elements=[])
if not limit:
preference_key = '__'.join(['Crash', 'duplicate_number'])
limit = gpm[preference_key]
duplicated = Crash.objects.values('signature').annotate(count=Count('signature'))
duplicated = filter(lambda x: x['count'] > limit, duplicated)
logger.info('Duplicated signatures: %r' % duplicated)
for group in duplicated:
qs = Crash.objects.filter(signature=group['signature']).order_by('created')
dup_elements = []
dup_count = qs.count()
while dup_count > limit:
bulk_size = dup_count - limit if dup_count - limit < 1000 else 1000
bulk_ids = qs[:bulk_size].values_list('id', flat=True)
bulk = qs.filter(id__in=bulk_ids)
result = bulk_delete(Crash, bulk)
full_result['count'] += result['count']
full_result['size'] += result['size']
full_result['elements'] += result['elements']
dup_elements += result['elements']
dup_count -= bulk_size
return full_result
size = SparkleVersion.objects.get_size()
if size > gpm['SparkleVersion__limit_size'] * 1024 * 1024 * 1024:
raven.captureMessage("[Limitation]Size limit of sparkle versions is exceeded. Current size is %s [%d]" %
(filters.filesizeformat(size).replace(u'\xa0', u' '), time.time()),
data={'level': 30, 'logger': 'limitation'})
cache.set('sparkle_version_size', size)
size = Feedback.objects.get_size()
if size > gpm['Feedback__limit_size'] * 1024 * 1024 * 1024:
raven.captureMessage("[Limitation]Size limit of feedbacks is exceeded. Current size is %s [%d]" %
(filters.filesizeformat(size).replace(u'\xa0', u' '), time.time()),
data={'level': 30, 'logger': 'limitation'})
cache.set('feedbacks_size', size)
size = Crash.objects.get_size()
if size > gpm['Crash__limit_size'] * 1024 * 1024 * 1024:
raven.captureMessage("[Limitation]Size limit of crashes is exceeded. Current size is %s [%d]" %
(filters.filesizeformat(size).replace(u'\xa0', u' '), time.time()),
data={'level': 30, 'logger': 'limitation'})
cache.set('crashes_size', size)
size = Symbols.objects.get_size()
if size > gpm['Symbols__limit_size'] * 1024 * 1024 * 1024:
raven.captureMessage("[Limitation]Size limit of symbols is exceeded. Current size is %s [%d]" %
(filters.filesizeformat(size).replace(u'\xa0', u' '), time.time()),
data={'level': 30, 'logger': 'limitation'})
cache.set('symbols_size', size)
@bulk_delete.register(Crash)
def _(cls, qs):
if settings.DEFAULT_FILE_STORAGE == 'omaha_server.s3utils.S3Storage':
qs = s3_bulk_delete(qs, file_fields=['archive', 'upload_file_minidump'],
s3_fields=['minidump_archive', 'minidump'])
result = dict()
result['count'] = qs.count()
result['size'] = qs.get_size()
elements = list(qs.values_list('id', 'created', 'signature', 'userid', 'appid'))
result['elements'] = map(lambda x: dict(id=x[0], element_created=x[1].strftime("%d. %B %Y %I:%M%p"), signature=x[2],
userid=x[3], appid=x[4]), elements)
qs.delete()
return result