Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _migrate_h5(self, old_store_path):
try:
old_store = HDF5Store(old_store_path, flush=5)
old_info = [(key, old_store.load(key)) for key in old_store.keys()]
for key, val in old_info:
self.store.dump(key, val)
self.store.flush()
old_store.close()
os.remove(old_store_path)
except Exception:
import sys
app_log.exception('FATAL: Cannot migrate: {}'.format(old_store_path))
sys.exit(1)
def log(conf):
'''Set up logging using Python's standard logging.config.dictConfig()'''
# Create directories for directories mentioned by handlers if logs are used
active_handlers = set(conf.get('root', {}).get('handlers', []))
for logger in conf.get('loggers', {}).values():
active_handlers |= set(logger.get('handlers', []))
for handler, handler_conf in conf.get('handlers', {}).items():
if handler in active_handlers:
filename = handler_conf.get('filename', None)
if filename is not None:
folder = os.path.dirname(os.path.abspath(handler_conf.filename))
if not os.path.exists(folder):
try:
os.makedirs(folder)
except OSError:
app_log.exception('log: %s: cannot create folder %s', handler, folder)
try:
logging.config.dictConfig(conf)
except (ValueError, TypeError, AttributeError, ImportError):
app_log.exception('Error in log: configuration')
def initialize(self, **kwargs):
# self.request.arguments does not handle unicode keys well.
# In Py2, it returns a str (not unicode). In Py3, it returns latin-1 unicode.
# Convert this to proper unicode using UTF-8 and store in self.args
self.args = {}
for k in self.request.arguments:
key = (k if isinstance(k, six.binary_type) else k.encode('latin-1')).decode('utf-8')
# Invalid unicode (e.g. ?x=%f4) throws HTTPError. This disrupts even
# error handlers. So if there's invalid unicode, log & continue.
try:
self.args[key] = self.get_arguments(k)
except HTTPError:
app_log.exception('Invalid URL argument %s' % k)
self._session, self._session_json = None, 'null'
if self.cache:
self.cachefile = self.cache()
self.original_get = self.get
self.get = self._cached_get
if self._set_xsrf:
self.xsrf_token
# Set the method to the ?x-http-method-overrride argument or the
# X-HTTP-Method-Override header if they exist
if 'x-http-method-override' in self.args:
self.request.method = self.args.pop('x-http-method-override')[0].upper()
elif 'X-HTTP-Method-Override' in self.request.headers:
self.request.method = self.request.headers['X-HTTP-Method-Override'].upper()
app_log.info(line.decode('utf-8') + ' live (pid=%s)', pid)
self.started = True
# Keep logging capture.js output until proc is killed by another thread
while hasattr(self, 'proc'):
line = self.proc.stdout.readline().strip()
if len(line) == 0:
app_log.info('%s terminated: pid=%d', script, pid)
self.started = False
break
# Capture won't print anything, unless there's a problem, or if debug is on.
# So log it at warning level not info.
app_log.warning(line.decode('utf-8'))
except Exception:
app_log.exception('Ran %s. But %s not at %s', self.cmd, script, self.url)
except Exception:
app_log.exception('Cannot start Capture')
if _key in _cache:
task = info.alert[name] = _cache[_key]
task.call_later()
continue
app_log.info('Initialising alert: %s', name)
schedule = {key: alert[key] for key in schedule_keys if key in alert}
if 'thread' in alert:
schedule['thread'] = alert['thread']
schedule['function'] = create_alert(name, alert)
if schedule['function'] is not None:
try:
_cache[_key] = scheduler.Task(name, schedule, info.threadpool,
ioloop=info._main_ioloop)
info.alert[name] = _cache[_key]
except Exception:
app_log.exception('Failed to initialize alert: %s', name)
if status_code in self.error:
try:
result = self.error[status_code]['function'](
status_code=status_code, kwargs=kwargs, handler=self)
headers = self.error[status_code].get('conf', {}).get('headers', {})
self._write_headers(headers.items())
# result may be a generator / list from build_transform,
# or a str/bytes/unicode from Template.generate. Handle both
if isinstance(result, (six.string_types, six.binary_type)):
self.write(result)
else:
for item in result:
self.write(item)
return
except Exception:
app_log.exception('url:%s.error.%d error handler raised an exception:',
self.name, status_code)
# If error was not written, use the default error
self._write_error(status_code, **kwargs)
for index, row in each:
data['index'], data['row'], data['config'] = index, row, alert
try:
retval.append(AttrDict(index=index, row=row, mail=create_mail(data)))
except Exception as e:
app_log.exception('alert: %s[%s] templating (row=%r)', name, index, row)
fail.append({'index': index, 'row': row, 'error': e})
callback = mailer.mail if not callable(callback) else callback
done = []
for v in retval:
try:
callback(**v.mail)
except Exception as e:
fail.append({'index': v.index, 'row': v.row, 'mail': v.mail, 'error': e})
app_log.exception('alert: %s[%s] delivery (row=%r)', name, v.index, v.row)
else:
done.append(v)
event = {
'alert': name, 'service': service, 'from': mailer.email or '',
'to': '', 'cc': '', 'bcc': '', 'subject': '',
'datetime': datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%SZ")
}
event.update({k: v for k, v in v.mail.items() if k in event})
event['attachments'] = ', '.join(v.mail.get('attachments', []))
alert_logger.info(event)
# Run notifications
args = {'done': done, 'fail': fail}
for notification_name in alert.get('notify', []):
notify = info.alert.get(notification_name)
if notify is not None:
def process_bytes(self, data):
try:
text = six.text_type(data, encoding='utf-8')
message = json.loads(text)
except UnicodeError:
app_log.error('TwitterStream unicode error: %s', data)
return
except ValueError:
# When rate limited, text="Exceeded connection limit for user"
app_log.error('TwitterStream non-JSON data: %s', text)
return
# Process the message (which is usually, but not always, a tweet)
try:
self.process_json(message)
except Exception:
app_log.exception('TwitterStream could not process message: %s' % text)
# Create tasks running on ioloop for the given schedule, store it in info.schedule
from . import scheduler
_stop_all_tasks(info.schedule)
for name, sched in conf.items():
_key = cache_key('schedule', sched)
if _key in _cache:
task = info.schedule[name] = _cache[_key]
task.call_later()
continue
try:
app_log.info('Initialising schedule:%s', name)
_cache[_key] = scheduler.Task(name, sched, info.threadpool,
ioloop=info._main_ioloop)
info.schedule[name] = _cache[_key]
except Exception as e:
app_log.exception(e)