Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
except (UnicodeDecodeError, SyntaxError,
pyparsing.ParseException):
api.abort(400, {
"cause": "Value error",
"detail": "line",
"reason": "Unable to parse line %d" % (
line_number + 1),
})
if timestamp is None:
timestamp = now
try:
resource_id = tags.pop(tag_to_rid)
except KeyError:
api.abort(400, {
"cause": "Value error",
"detail": "key",
"reason": "Unable to find key `%s' in tags" % (
tag_to_rid),
})
tags_str = (("@" if tags else "") +
",".join(("%s=%s" % (k, tags[k]))
for k in sorted(tags)))
for field_name, field_value in six.iteritems(fields):
if isinstance(field_value, str):
# We do not support field value that are not numerical
continue
# Metric name is the:
def get_measures_or_abort(references, operations, start,
stop, granularity, needed_overlap, fill):
try:
return processor.get_measures(
pecan.request.storage,
references,
operations,
start, stop,
granularity, needed_overlap, fill)
except exceptions.UnAggregableTimeseries as e:
api.abort(400, e)
# TODO(sileht): We currently got only one metric for these exceptions but
# we can improve processor to returns all missing metrics at once, so we
# returns a list for the future
except storage.MetricDoesNotExist as e:
api.abort(404, {"cause": "Unknown metrics",
"detail": [str(e.metric.id)]})
except storage.AggregationDoesNotExist as e:
api.abort(404, {"cause": "Metrics with unknown aggregation",
"detail": [(str(e.metric.id), e.method)]})
return processor.get_measures(
pecan.request.storage,
references,
operations,
start, stop,
granularity, needed_overlap, fill)
except exceptions.UnAggregableTimeseries as e:
api.abort(400, e)
# TODO(sileht): We currently got only one metric for these exceptions but
# we can improve processor to returns all missing metrics at once, so we
# returns a list for the future
except storage.MetricDoesNotExist as e:
api.abort(404, {"cause": "Unknown metrics",
"detail": [str(e.metric.id)]})
except storage.AggregationDoesNotExist as e:
api.abort(404, {"cause": "Metrics with unknown aggregation",
"detail": [(str(e.metric.id), e.method)]})
project_id = request.headers.get("X-Project-Id")
try:
# Check if the policy allows the user to list metrics linked
# to their created_by_project
api.enforce(rule, {
"created_by_project_id": project_id,
})
except webob.exc.HTTPForbidden:
pass
else:
policy_filter.append(
{"like": {"creator": "%:" + project_id}})
if not policy_filter:
# We need to have at least one policy filter in place
api.abort(403, "Insufficient privileges")
return {"or": policy_filter}
return tuple((attr, r[attr]) for attr in groupby)
results = []
for key, resources in itertools.groupby(resources, groupper):
try:
results.append({
"group": dict(key),
"measures": self._get_measures_by_name(
resources, references, body["operations"],
start, stop, granularity, needed_overlap, fill,
details=details)
})
except indexer.NoSuchMetric:
pass
if not results:
api.abort(
400,
indexer.NoSuchMetric(set((m for (m, a) in references))))
return results
else:
try:
metric_ids = set(six.text_type(utils.UUID(m))
for (m, a) in references)
except ValueError as e:
api.abort(400, {"cause": "Invalid metric references",
"reason": six.text_type(e),
"detail": references})
metrics = pecan.request.indexer.list_metrics(
attribute_filter={"in": {"id": metric_ids}})
missing_metric_ids = (set(metric_ids)
def ResourceTypeSchema(resource_type):
try:
pecan.request.indexer.get_resource_type(resource_type)
except indexer.NoSuchResourceType as e:
api.abort(400, e)
return resource_type
def OperationsSchema(v):
if isinstance(v, six.text_type):
try:
v = pyparsing.OneOrMore(
pyparsing.nestedExpr()).parseString(v).asList()[0]
except pyparsing.ParseException as e:
api.abort(400, {"cause": "Invalid operations",
"reason": "Fail to parse the operations string",
"detail": six.text_type(e)})
return voluptuous.Schema(voluptuous.Any(*OperationsSchemaBase),
required=True)(v)