diff --git a/gnocchi/rest/aggregates/api.py b/gnocchi/rest/aggregates/api.py index e1fd08ca4..d6d8b1118 100644 --- a/gnocchi/rest/aggregates/api.py +++ b/gnocchi/rest/aggregates/api.py @@ -612,7 +612,8 @@ def post(self, start=None, stop=None, granularity=None, "detail": references}) metrics = pecan.request.indexer.list_metrics( - attribute_filter={"in": {"id": metric_ids}}) + attribute_filter={"in": {"id": metric_ids}}, + details=True) missing_metric_ids = (set(metric_ids) - set(str(m.id) for m in metrics)) if missing_metric_ids: diff --git a/gnocchi/rest/api.py b/gnocchi/rest/api.py index ad0ff9130..f1b73640a 100644 --- a/gnocchi/rest/api.py +++ b/gnocchi/rest/api.py @@ -21,6 +21,7 @@ import operator import uuid +from collections import abc import jsonpatch from oslo_utils import strutils import pecan @@ -54,10 +55,20 @@ except ImportError: PROMETHEUS_SUPPORTED = False +try: + from oslo_db.sqlalchemy import models as sqlalchemy_models + SQLALCHEMY_SUPPORTED = True +except ImportError: + SQLALCHEMY_SUPPORTED = False + ATTRGETTER_GRANULARITY = operator.attrgetter("granularity") LOG = logging.getLogger(__name__) +FLATTEN_DICT_TYPES = ((abc.Mapping, sqlalchemy_models.ModelBase) + if SQLALCHEMY_SUPPORTED + else (abc.Mapping,)) + def arg_to_list(value): if isinstance(value, list): @@ -91,7 +102,7 @@ def flatten_dict_to_keypairs(d, separator=':'): :param separator: symbol between names """ for name, value in sorted(d.items()): - if isinstance(value, dict): + if isinstance(value, FLATTEN_DICT_TYPES): for subname, subvalue in flatten_dict_to_keypairs(value, separator): yield ('%s%s%s' % (name, separator, subname), subvalue) diff --git a/gnocchi/tests/test_rest.py b/gnocchi/tests/test_rest.py index 118f7ae01..aa565cbe2 100644 --- a/gnocchi/tests/test_rest.py +++ b/gnocchi/tests/test_rest.py @@ -1886,6 +1886,58 @@ def test_search_resources_invalid_query(self): ) +class AggregatesTest(RestTest): + def test_get_metric_aggregates_with_another_user(self): + r = self.app.post_json( + "/v1/metric", + params={"archive_policy_name": "medium"}, + status=201) + metric_id = r.json['id'] + self.app.post_json( + f"/v1/metric/{metric_id}/measures", + params=[{"timestamp": "2013-01-01 12:00:01", + "value": 8}, + {"timestamp": "2013-01-01 12:00:02", + "value": 16}]) + with self.app.use_another_user(): + self.app.post_json( + "/v1/aggregates", + params={"operations": ["metric", metric_id, "mean"]}, + status=403) + + def test_get_metric_aggregates_with_another_user_allowed(self): + rid = str(uuid.uuid4()) + r = self.app.post_json( + "/v1/resource/generic", + params={ + "id": rid, + "project_id": TestingApp.PROJECT_ID_2, + "metrics": { + "disk": {"archive_policy_name": "low"}, + } + }) + metric_id = r.json['metrics']['disk'] + self.app.post_json( + f"/v1/metric/{metric_id}/measures", + params=[{"timestamp": "2013-01-01 12:00:01", + "value": 8}, + {"timestamp": "2013-01-01 12:00:02", + "value": 16}]) + with self.app.use_another_user(): + r = self.app.post_json( + "/v1/aggregates", + params={"operations": ["metric", metric_id, "mean"]}, + status=200) + aggregates = r.json + self.assertIn(metric_id, aggregates["measures"]) + measures = aggregates["measures"][metric_id] + self.assertIn("mean", measures) + self.assertEqual([["2013-01-01T00:00:00+00:00", 86400, 12], + ["2013-01-01T12:00:00+00:00", 3600, 12], + ["2013-01-01T12:00:00+00:00", 300, 12]], + measures["mean"]) + + class QueryStringSearchAttrFilterTest(tests_base.TestCase): def _do_test(self, expr, expected): req = api.QueryStringSearchAttrFilter._parse(expr)