Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# }, …
# }
resources = collections.defaultdict(
lambda: collections.defaultdict(list))
for line_number, line in enumerate(chunk.split(b"\n")):
# Ignore empty lines
if not line:
continue
try:
measurement, tags, fields, timestamp = (
line_protocol.parseString(line.decode())
)
except (UnicodeDecodeError, SyntaxError,
pyparsing.ParseException):
api.abort(400, {
"cause": "Value error",
"detail": "line",
"reason": "Unable to parse line %d" % (
line_number + 1),
})
if timestamp is None:
timestamp = now
try:
resource_id = tags.pop(tag_to_rid)
except KeyError:
api.abort(400, {
"cause": "Value error",
"detail": "key",
"reason": "Unable to find key `%s' in tags" % (
def _write_get_lines():
encoding = pecan.request.headers.get('Transfer-Encoding', "").lower()
if encoding == "chunked":
# TODO(sileht): Support reading chunk without uwsgi when
# pecan.request.environ['wsgi.input_terminated'] is set.
# https://github.com/unbit/uwsgi/issues/1428
if uwsgi is None:
api.abort(
501, {"cause": "Not implemented error",
"reason": "This server is not running with uwsgi"})
return encoding, uwsgi.chunked_read()
return None, pecan.request.body
def post_query(self, q=None):
if q is not None:
try:
query = query_parser.parseString(q)
except pyparsing.ParseException:
api.abort(501, {"cause": "Not implemented error",
"detail": "q",
"reason": "Query not implemented"})
resource_type = query[0]
api.enforce("create resource type", {"name": resource_type})
schema = pecan.request.indexer.get_resource_type_schema()
rt = schema.resource_type_from_dict(resource_type, {}, 'creating')
try:
pecan.request.indexer.create_resource_type(rt)
except indexer.ResourceTypeAlreadyExists:
pass
pecan.response.status = 204
def get_current_user(request):
user = request.remote_user
if user is None:
api.abort(401)
return user.decode('iso-8859-1')
try:
# Check if the policy allows the user to list resources linked
# to their created_by_project
api.enforce(rule, target)
except webob.exc.HTTPForbidden:
pass
else:
if project_id:
policy_filter.append(
{"like": {"creator": "%:" + project_id}})
else:
policy_filter.append({"=": {"creator": None}})
if not policy_filter:
# We need to have at least one policy filter in place
api.abort(403, "Insufficient privileges")
return {"or": policy_filter}
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"in": {"id": metric_ids}})
missing_metric_ids = (set(metric_ids)
- set(six.text_type(m.id) for m in metrics))
if missing_metric_ids:
api.abort(404, {"cause": "Unknown metrics",
"reason": "Provided metrics don't exists",
"detail": missing_metric_ids})
number_of_metrics = len(metrics)
if number_of_metrics == 0:
return []
for metric in metrics:
api.enforce("get metric", metric)
metrics_by_ids = dict((six.text_type(m.id), m) for m in metrics)
references = [processor.MetricReference(metrics_by_ids[m], a)
for (m, a) in references]
response = {
"measures": get_measures_or_abort(
references, body["operations"],
start, stop, granularity, needed_overlap, fill)
}
if details:
response["references"] = metrics
return response
def post(self, start=None, stop=None, granularity=None,
needed_overlap=None, fill=None, groupby=None, **kwargs):
details = api.get_bool_param('details', kwargs)
if fill is None and needed_overlap is None:
fill = "dropna"
start, stop, granularity, needed_overlap, fill = api.validate_qs(
start, stop, granularity, needed_overlap, fill)
body = api.deserialize_and_validate(self.FetchSchema)
references = extract_references(body["operations"])
if not references:
api.abort(400, {"cause": "Operations is invalid",
"reason": "At least one 'metric' is required",
"detail": body["operations"]})
if "resource_type" in body:
attr_filter = body["search"]
policy_filter = (
pecan.request.auth_helper.get_resource_policy_filter(
pecan.request, "search resource", body["resource_type"]))
if policy_filter:
if attr_filter:
attr_filter = {"and": [
policy_filter,
if "resource_type" in body:
attr_filter = body["search"]
policy_filter = (
pecan.request.auth_helper.get_resource_policy_filter(
pecan.request, "search resource", body["resource_type"]))
if policy_filter:
if attr_filter:
attr_filter = {"and": [
policy_filter,
attr_filter
]}
else:
attr_filter = policy_filter
groupby = sorted(set(api.arg_to_list(groupby)))
sorts = groupby if groupby else api.RESOURCE_DEFAULT_PAGINATION
try:
resources = pecan.request.indexer.list_resources(
body["resource_type"],
attribute_filter=attr_filter,
sorts=sorts)
except indexer.IndexerException as e:
api.abort(400, six.text_type(e))
if not groupby:
try:
return self._get_measures_by_name(
resources, references, body["operations"], start, stop,
granularity, needed_overlap, fill, details=details)
except indexer.NoSuchMetric as e:
api.abort(400, e)
def ResourceTypeSchema(resource_type):
try:
pecan.request.indexer.get_resource_type(resource_type)
except indexer.NoSuchResourceType as e:
api.abort(400, e)
return resource_type
class AggregatesController(rest.RestController):
FetchSchema = voluptuous.Any({
"operations": OperationsSchema
}, {
"operations": OperationsSchema,
"resource_type": ResourceTypeSchema,
"search": voluptuous.Any(api.ResourceSearchSchema,
api.QueryStringSearchAttrFilter.parse),
})
@pecan.expose("json")
def post(self, start=None, stop=None, granularity=None,
needed_overlap=None, fill=None, groupby=None, **kwargs):
details = api.get_bool_param('details', kwargs)
if fill is None and needed_overlap is None:
fill = "dropna"
start, stop, granularity, needed_overlap, fill = api.validate_qs(
start, stop, granularity, needed_overlap, fill)
body = api.deserialize_and_validate(self.FetchSchema)
references = extract_references(body["operations"])
try:
pecan.request.indexer.get_resource_type(resource_type)
except indexer.NoSuchResourceType as e:
api.abort(400, e)
return resource_type
class AggregatesController(rest.RestController):
FetchSchema = voluptuous.Any({
"operations": OperationsSchema
}, {
"operations": OperationsSchema,
"resource_type": ResourceTypeSchema,
"search": voluptuous.Any(api.ResourceSearchSchema,
api.QueryStringSearchAttrFilter.parse),
})
@pecan.expose("json")
def post(self, start=None, stop=None, granularity=None,
needed_overlap=None, fill=None, groupby=None, **kwargs):
details = api.get_bool_param('details', kwargs)
if fill is None and needed_overlap is None:
fill = "dropna"
start, stop, granularity, needed_overlap, fill = api.validate_qs(
start, stop, granularity, needed_overlap, fill)
body = api.deserialize_and_validate(self.FetchSchema)
references = extract_references(body["operations"])
if not references: