Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
gramex.paths['base'] = Path('.')
# If we run with updated parameters, save for next run under the .run config
run_config = app_config.setdefault('run', {})
for key, val in args.items():
if key not in app_keys:
run_config[key] = app_config.pop(key)
save_user_config(appname, app_config)
# Tell the user what configs are used
cline = ' '.join('--%s=%s' % arg for arg in flatten_config(app_config.get('run', {})))
app_log.info('Gramex %s | %s %s | %s | Python %s', gramex.__version__, appname, cline,
os.getcwd(), sys.version.replace('\n', ' '))
gramex.init(cmd=AttrDict(app=app_config['run']))
elif appname in apps_config['user']:
# The user configuration has a wrong path. Inform user
app_log.error('%s: no directory %s', appname, app_config.target)
app_log.error('Run "gramex uninstall %s" and try again.', appname)
else:
app_log.error('%s: no directory %s', appname, app_config.target)
def _run_console(cmd, **kwargs):
'''Run cmd and pipe output to console (sys.stdout / sys.stderr)'''
cmd = shlex.split(cmd)
try:
proc = Popen(cmd, bufsize=-1, stdout=sys.stdout, stderr=sys.stderr,
universal_newlines=True, **kwargs)
except OSError:
app_log.error('Cannot find command: %s', cmd[0])
raise
proc.communicate()
self.started = True
except requests.ReadTimeout:
# If capture.js doesn't respond immediately, we haven't started
app_log.error('url: %s timed out', self.url)
except requests.ConnectionError:
# Try starting the process again
app_log.info('Starting %s via %s', script, self.cmd)
self.close()
# self.cmd is taken from the YAML configuration. Safe to run
self.proc = Popen(shlex.split(self.cmd), stdout=PIPE, stderr=STDOUT) # nosec
self.proc.poll()
atexit.register(self.close)
# TODO: what if readline() does not return quickly?
line = self.proc.stdout.readline().strip()
if not self.first_line_re.search(line):
return app_log.error('cmd: %s invalid. Returned "%s"', self.cmd, line)
app_log.info('Pinging %s at %s', script, self.url)
try:
r = requests.get(self.url, timeout=self.timeout)
self._validate_server(r)
pid = self.proc.pid
app_log.info(line.decode('utf-8') + ' live (pid=%s)', pid)
self.started = True
# Keep logging capture.js output until proc is killed by another thread
while hasattr(self, 'proc'):
line = self.proc.stdout.readline().strip()
if len(line) == 0:
app_log.info('%s terminated: pid=%d', script, pid)
self.started = False
break
# Capture won't print anything, unless there's a problem, or if debug is on.
# So log it at warning level not info.
def url(conf):
'''Set up the tornado web app URL handlers'''
handlers = []
# Sort the handlers in descending order of priority
specs = sorted(conf.items(), key=_sort_url_patterns, reverse=True)
for name, spec in specs:
_key = cache_key('url', spec)
if _key in _cache:
handlers.append(_cache[_key])
continue
if 'handler' not in spec:
app_log.error('url: %s: no handler specified')
continue
app_log.debug('url: %s (%s) %s', name, spec.handler, spec.get('priority', ''))
urlspec = AttrDict(spec)
handler = locate(spec.handler, modules=['gramex.handlers'])
if handler is None:
app_log.error('url: %s: ignoring missing handler %s', name, spec.handler)
continue
# Create a subclass of the handler with additional attributes.
class_vars = {'name': name, 'conf': spec}
# If there's a cache section, get the cache method for use by BaseHandler
if 'cache' in urlspec:
class_vars['cache'] = _cache_generator(urlspec['cache'], name=name)
else:
class_vars['cache'] = None
# PY27 type() requires the class name to be a string, not unicode
app_log.error('TwitterStream HTTP %d (timeout): %s. Retry: %ss',
e.code, e.response, self.delay)
# For server errors, start with 5 seconds and double until 320 seconds
elif INTERNAL_SERVER_ERROR <= e.code <= GATEWAY_TIMEOUT:
self.delay = min(320, self.delay * 2 if self.delay else 1) # noqa: 320 seconds
app_log.error('TwitterStream HTTP %d: %s. Retry: %ss',
e.code, e.response, self.delay)
# For client errors (e.g. wrong params), disable connection
else:
self.delay, self.enabled = 5, False
app_log.error('TwitterStream HTTP %d: %s. Disabling', e.code, e.response)
except Exception as e:
# Other errors are possible, such as IOError.
# Increase the delay in reconnects by 250ms each attempt, up to 16 seconds.
self.delay = min(16, self.delay + 0.25) # noqa: 16 seconds, 0.25 seconds
app_log.error('TwitterStream exception %s. Retry: %ss', e, self.delay)
# Receive the response.
# Note: read_message() cannot be called again while a request is running.
# (Yes, that's odd. Maybe Anand is missing something.)
# So wait until the read_future is cleared.
while self.conn.read_future is not None:
yield sleep(self._delay)
msg = yield self.conn.read_message()
# If node has died, clear the connection to restart it.
if msg is None:
self.conn = None
raise WebSocketClosedError()
# Parse the result as JSON. Log errors if any
result = json.loads(msg)
if result['error']:
app_log.error(result['error']['stack'])
raise Return(result)
Typically, people install Conda AND R (in any order), and use the system R
(rather than the conda R) by placing it before Conda in the PATH.
But the system R does not work with Conda rpy2. So we check if Conda R
exists and return its path, so that it can be used as R_HOME.
'''
try:
from conda.base.context import context
except ImportError:
app_log.error('Anaconda not installed. Cannot use Anaconda R')
return None
r_home = os.path.normpath(os.path.join(context.root_prefix, 'lib', 'R'))
if os.path.isdir(os.path.join(r_home, 'bin')):
return r_home
app_log.error('Anaconda R not installed')
return None
elif isinstance(alert['data'], list):
datasets = {'data': alert['data']}
elif isinstance(alert['data'], dict):
for key, dataset in alert['data'].items():
if isinstance(dataset, string_types):
datasets[key] = {'url': dataset}
elif isinstance(dataset, list) or 'url' in dataset:
datasets[key] = dataset
else:
app_log.error('alert: %s.data: %s is missing url:', name, key)
else:
app_log.error('alert: %s.data: must be a data file or dict. Not %s',
name, repr(alert['data']))
if 'each' in alert and alert['each'] not in datasets:
app_log.error('alert: %s.each: %s is not in data:', name, alert['each'])
return
vars = {key: None for key in datasets}
vars.update({'config': None, 'args': None})
condition = build_transform(
{'function': alert.get('condition', 'True')},
filename='alert: %s' % name, vars=vars, iter=False)
alert_logger = logging.getLogger('gramex.alert')
def load_datasets(data, each):
'''
Modify data by load datasets and filter by condition.
Modify each to apply the each: argument, else return (None, None)
'''
for key, val in datasets.items():
if len(cmd) < 1:
app_log.error(show_usage('uninstall'))
return
if len(cmd) > 1 and args:
app_log.error('Arguments allowed only with single app. Ignoring %s', ', '.join(cmd[1:]))
cmd = cmd[:1]
for appname in cmd:
app_log.info('Uninstalling: %s', appname)
# Delete the target directory if it exists
app_config = get_app_config(appname, args)
if os.path.exists(app_config.target):
safe_rmtree(app_config.target)
else:
app_log.error('No directory %s to remove', app_config.target)
save_user_config(appname, None)