Merge "Remove "group_filters" parameter from v2 storage interface"
This commit is contained in:
@@ -101,12 +101,12 @@ class ReportController(rest.RestController):
|
|||||||
# enforce it by policy engine
|
# enforce it by policy engine
|
||||||
scope_key = CONF.collect.scope_key
|
scope_key = CONF.collect.scope_key
|
||||||
groupby = [scope_key]
|
groupby = [scope_key]
|
||||||
group_filters = {scope_key: tenant_id} if tenant_id else None
|
filters = {scope_key: tenant_id} if tenant_id else None
|
||||||
result = storage.total(
|
result = storage.total(
|
||||||
groupby=groupby,
|
groupby=groupby,
|
||||||
begin=begin, end=end,
|
begin=begin, end=end,
|
||||||
metric_types=service,
|
metric_types=service,
|
||||||
group_filters=group_filters)
|
filters=filters)
|
||||||
|
|
||||||
if result['total'] < 1:
|
if result['total'] < 1:
|
||||||
return decimal.Decimal('0')
|
return decimal.Decimal('0')
|
||||||
@@ -144,12 +144,12 @@ class ReportController(rest.RestController):
|
|||||||
storage_groupby.append(scope_key)
|
storage_groupby.append(scope_key)
|
||||||
if groupby is not None and 'res_type' in groupby:
|
if groupby is not None and 'res_type' in groupby:
|
||||||
storage_groupby.append('type')
|
storage_groupby.append('type')
|
||||||
group_filters = {scope_key: tenant_id} if tenant_id else None
|
filters = {scope_key: tenant_id} if tenant_id else None
|
||||||
result = storage.total(
|
result = storage.total(
|
||||||
groupby=storage_groupby,
|
groupby=storage_groupby,
|
||||||
begin=begin, end=end,
|
begin=begin, end=end,
|
||||||
metric_types=service,
|
metric_types=service,
|
||||||
group_filters=group_filters)
|
filters=filters)
|
||||||
|
|
||||||
summarymodels = []
|
summarymodels = []
|
||||||
for res in result['results']:
|
for res in result['results']:
|
||||||
|
|||||||
@@ -62,7 +62,7 @@ class DataFramesController(rest.RestController):
|
|||||||
scope_key = CONF.collect.scope_key
|
scope_key = CONF.collect.scope_key
|
||||||
backend = pecan.request.storage_backend
|
backend = pecan.request.storage_backend
|
||||||
dataframes = []
|
dataframes = []
|
||||||
group_filters = {scope_key: tenant_id} if tenant_id else None
|
filters = {scope_key: tenant_id} if tenant_id else None
|
||||||
|
|
||||||
if begin:
|
if begin:
|
||||||
begin = ck_utils.dt2ts(begin)
|
begin = ck_utils.dt2ts(begin)
|
||||||
@@ -71,7 +71,7 @@ class DataFramesController(rest.RestController):
|
|||||||
try:
|
try:
|
||||||
resp = backend.retrieve(
|
resp = backend.retrieve(
|
||||||
begin, end,
|
begin, end,
|
||||||
group_filters=group_filters,
|
filters=filters,
|
||||||
metric_types=resource_type,
|
metric_types=resource_type,
|
||||||
paginate=False)
|
paginate=False)
|
||||||
except storage.NoTimeFrame:
|
except storage.NoTimeFrame:
|
||||||
|
|||||||
@@ -79,10 +79,10 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
|||||||
return metric_types
|
return metric_types
|
||||||
|
|
||||||
def retrieve(self, begin=None, end=None,
|
def retrieve(self, begin=None, end=None,
|
||||||
filters=None, group_filters=None,
|
filters=None,
|
||||||
metric_types=None,
|
metric_types=None,
|
||||||
offset=0, limit=100, paginate=True):
|
offset=0, limit=100, paginate=True):
|
||||||
tenant_id = group_filters.get('project_id') if group_filters else None
|
tenant_id = filters.get('project_id') if filters else None
|
||||||
metric_types = self._check_metric_types(metric_types)
|
metric_types = self._check_metric_types(metric_types)
|
||||||
frames = self.storage.get_time_frame(
|
frames = self.storage.get_time_frame(
|
||||||
begin, end,
|
begin, end,
|
||||||
@@ -97,9 +97,9 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
|||||||
def total(self, groupby=None,
|
def total(self, groupby=None,
|
||||||
begin=None, end=None,
|
begin=None, end=None,
|
||||||
metric_types=None,
|
metric_types=None,
|
||||||
filters=None, group_filters=None,
|
filters=None,
|
||||||
offset=0, limit=100, paginate=True):
|
offset=0, limit=100, paginate=True):
|
||||||
tenant_id = group_filters.get('project_id') if group_filters else None
|
tenant_id = filters.get('project_id') if filters else None
|
||||||
|
|
||||||
storage_gby = []
|
storage_gby = []
|
||||||
if groupby:
|
if groupby:
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ class BaseStorage(object):
|
|||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def retrieve(self, begin=None, end=None,
|
def retrieve(self, begin=None, end=None,
|
||||||
filters=None, group_filters=None,
|
filters=None,
|
||||||
metric_types=None,
|
metric_types=None,
|
||||||
offset=0, limit=1000, paginate=True):
|
offset=0, limit=1000, paginate=True):
|
||||||
"""Returns the following dict::
|
"""Returns the following dict::
|
||||||
@@ -106,10 +106,8 @@ class BaseStorage(object):
|
|||||||
:type begin: datetime
|
:type begin: datetime
|
||||||
:param end: End date
|
:param end: End date
|
||||||
:type end: datetime
|
:type end: datetime
|
||||||
:param filters: Metadata to filter on. ex: {'flavor_id': '42'}
|
:param filters: Attributes to filter on. ex: {'flavor_id': '42'}
|
||||||
:type filters: dict
|
:type filters: dict
|
||||||
:param group_filters: Groupby to filter on. ex: {'project_id': '123ab'}
|
|
||||||
:type group_filters: dict
|
|
||||||
:param metric_types: Metric type to filter on.
|
:param metric_types: Metric type to filter on.
|
||||||
:type metric_types: str or list
|
:type metric_types: str or list
|
||||||
:param offset: Offset for pagination
|
:param offset: Offset for pagination
|
||||||
@@ -126,7 +124,7 @@ class BaseStorage(object):
|
|||||||
def total(self, groupby=None,
|
def total(self, groupby=None,
|
||||||
begin=None, end=None,
|
begin=None, end=None,
|
||||||
metric_types=None,
|
metric_types=None,
|
||||||
filters=None, group_filters=None,
|
filters=None,
|
||||||
offset=0, limit=1000, paginate=True):
|
offset=0, limit=1000, paginate=True):
|
||||||
"""Returns a grouped total for given groupby.
|
"""Returns a grouped total for given groupby.
|
||||||
|
|
||||||
@@ -139,10 +137,8 @@ class BaseStorage(object):
|
|||||||
:type begin: datetime
|
:type begin: datetime
|
||||||
:param end: End date
|
:param end: End date
|
||||||
:type end: datetime
|
:type end: datetime
|
||||||
:param filters: Metadata to filter on. ex: {'flavor_id': '42'}
|
:param filters: Attributes to filter on. ex: {'flavor_id': '42'}
|
||||||
:type filters: dict
|
:type filters: dict
|
||||||
:param group_filters: Groupby to filter on. ex: {'project_id': '123ab'}
|
|
||||||
:type group_filters: dict
|
|
||||||
:param metric_types: Metric type to filter on.
|
:param metric_types: Metric type to filter on.
|
||||||
:type metric_types: str or list
|
:type metric_types: str or list
|
||||||
:param offset: Offset for pagination
|
:param offset: Offset for pagination
|
||||||
|
|||||||
@@ -142,10 +142,10 @@ class InfluxClient(object):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_filter(key, value):
|
def _get_filter(key, value):
|
||||||
if isinstance(value, six.text_type):
|
if isinstance(value, six.string_types):
|
||||||
format_string = "{}='{}'"
|
format_string = "{}='{}'"
|
||||||
elif isinstance(value, (six.integer_types, float)):
|
elif isinstance(value, (six.integer_types, float)):
|
||||||
format_string = "{}='{}'"
|
format_string = "{}={}"
|
||||||
return format_string.format(key, value)
|
return format_string.format(key, value)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -258,16 +258,6 @@ class InfluxStorage(v2_storage.BaseStorage):
|
|||||||
|
|
||||||
return begin, end
|
return begin, end
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _build_filters(filters, group_filters):
|
|
||||||
output = None
|
|
||||||
if filters and group_filters:
|
|
||||||
output = copy.deepcopy(filters)
|
|
||||||
output.update(group_filters)
|
|
||||||
elif group_filters:
|
|
||||||
output = group_filters
|
|
||||||
return output
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _point_to_dataframe_entry(point):
|
def _point_to_dataframe_entry(point):
|
||||||
groupby = (point.pop('groupby', None) or '').split('|')
|
groupby = (point.pop('groupby', None) or '').split('|')
|
||||||
@@ -310,11 +300,10 @@ class InfluxStorage(v2_storage.BaseStorage):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
def retrieve(self, begin=None, end=None,
|
def retrieve(self, begin=None, end=None,
|
||||||
filters=None, group_filters=None,
|
filters=None,
|
||||||
metric_types=None,
|
metric_types=None,
|
||||||
offset=0, limit=1000, paginate=True):
|
offset=0, limit=1000, paginate=True):
|
||||||
begin, end = self._check_begin_end(begin, end)
|
begin, end = self._check_begin_end(begin, end)
|
||||||
filters = self._build_filters(filters, group_filters)
|
|
||||||
total, resp = self._conn.retrieve(
|
total, resp = self._conn.retrieve(
|
||||||
metric_types, filters, begin, end, offset, limit, paginate)
|
metric_types, filters, begin, end, offset, limit, paginate)
|
||||||
|
|
||||||
@@ -345,11 +334,10 @@ class InfluxStorage(v2_storage.BaseStorage):
|
|||||||
def total(self, groupby=None,
|
def total(self, groupby=None,
|
||||||
begin=None, end=None,
|
begin=None, end=None,
|
||||||
metric_types=None,
|
metric_types=None,
|
||||||
filters=None, group_filters=None,
|
filters=None,
|
||||||
offset=0, limit=1000, paginate=True):
|
offset=0, limit=1000, paginate=True):
|
||||||
|
|
||||||
begin, end = self._check_begin_end(begin, end)
|
begin, end = self._check_begin_end(begin, end)
|
||||||
filters = self._build_filters(filters, group_filters)
|
|
||||||
|
|
||||||
total = self._conn.get_total(
|
total = self._conn.get_total(
|
||||||
metric_types, begin, end, groupby, filters)
|
metric_types, begin, end, groupby, filters)
|
||||||
|
|||||||
@@ -102,22 +102,22 @@ class StorageDataframeTest(StorageTest):
|
|||||||
|
|
||||||
def test_get_frame_on_one_period_and_one_tenant(self):
|
def test_get_frame_on_one_period_and_one_tenant(self):
|
||||||
self.insert_different_data_two_tenants()
|
self.insert_different_data_two_tenants()
|
||||||
group_filters = {'project_id': self._tenant_id}
|
filters = {'project_id': self._tenant_id}
|
||||||
data = self.storage.retrieve(
|
data = self.storage.retrieve(
|
||||||
begin=samples.FIRST_PERIOD_BEGIN,
|
begin=samples.FIRST_PERIOD_BEGIN,
|
||||||
end=samples.FIRST_PERIOD_END,
|
end=samples.FIRST_PERIOD_END,
|
||||||
group_filters=group_filters)['dataframes']
|
filters=filters)['dataframes']
|
||||||
self.assertEqual(2, len(data))
|
self.assertEqual(2, len(data))
|
||||||
|
|
||||||
def test_get_frame_on_one_period_and_one_tenant_outside_data(self):
|
def test_get_frame_on_one_period_and_one_tenant_outside_data(self):
|
||||||
self.insert_different_data_two_tenants()
|
self.insert_different_data_two_tenants()
|
||||||
group_filters = {'project_id': self._other_tenant_id}
|
filters = {'project_id': self._other_tenant_id}
|
||||||
self.assertRaises(
|
self.assertRaises(
|
||||||
storage.NoTimeFrame,
|
storage.NoTimeFrame,
|
||||||
self.storage.retrieve,
|
self.storage.retrieve,
|
||||||
begin=samples.FIRST_PERIOD_BEGIN,
|
begin=samples.FIRST_PERIOD_BEGIN,
|
||||||
end=samples.FIRST_PERIOD_END,
|
end=samples.FIRST_PERIOD_END,
|
||||||
group_filters=group_filters)
|
filters=filters)
|
||||||
|
|
||||||
def test_get_frame_on_two_periods(self):
|
def test_get_frame_on_two_periods(self):
|
||||||
self.insert_different_data_two_tenants()
|
self.insert_different_data_two_tenants()
|
||||||
@@ -174,11 +174,11 @@ class StorageTotalTest(StorageTest):
|
|||||||
begin = ck_utils.ts2dt(samples.FIRST_PERIOD_BEGIN)
|
begin = ck_utils.ts2dt(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = ck_utils.ts2dt(samples.FIRST_PERIOD_END)
|
end = ck_utils.ts2dt(samples.FIRST_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
group_filters = {'project_id': self._tenant_id}
|
filters = {'project_id': self._tenant_id}
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
end=end,
|
end=end,
|
||||||
group_filters=group_filters)['results']
|
filters=filters)['results']
|
||||||
self.assertEqual(1, len(total))
|
self.assertEqual(1, len(total))
|
||||||
self.assertEqual(0.5537, total[0]["rate"])
|
self.assertEqual(0.5537, total[0]["rate"])
|
||||||
self.assertEqual(self._tenant_id, total[0]["tenant_id"])
|
self.assertEqual(self._tenant_id, total[0]["tenant_id"])
|
||||||
|
|||||||
@@ -119,14 +119,14 @@ class StorageUnitTest(TestCase):
|
|||||||
begin = datetime.datetime(2018, 1, 1)
|
begin = datetime.datetime(2018, 1, 1)
|
||||||
end = datetime.datetime(2018, 1, 1, 4)
|
end = datetime.datetime(2018, 1, 1, 4)
|
||||||
|
|
||||||
group_filters = {'project_id': self._project_id}
|
filters = {'project_id': self._project_id}
|
||||||
self._compare_get_total_result_with_expected(
|
self._compare_get_total_result_with_expected(
|
||||||
expected_qty,
|
expected_qty,
|
||||||
expected_total,
|
expected_total,
|
||||||
1,
|
1,
|
||||||
self.storage.total(begin=begin,
|
self.storage.total(begin=begin,
|
||||||
end=end,
|
end=end,
|
||||||
group_filters=group_filters),
|
filters=filters),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_total_all_scopes_one_period(self):
|
def test_get_total_all_scopes_one_period(self):
|
||||||
@@ -151,14 +151,14 @@ class StorageUnitTest(TestCase):
|
|||||||
begin = datetime.datetime(2018, 1, 1)
|
begin = datetime.datetime(2018, 1, 1)
|
||||||
end = datetime.datetime(2018, 1, 1, 1)
|
end = datetime.datetime(2018, 1, 1, 1)
|
||||||
|
|
||||||
group_filters = {'project_id': self._project_id}
|
filters = {'project_id': self._project_id}
|
||||||
self._compare_get_total_result_with_expected(
|
self._compare_get_total_result_with_expected(
|
||||||
expected_qty,
|
expected_qty,
|
||||||
expected_total,
|
expected_total,
|
||||||
1,
|
1,
|
||||||
self.storage.total(begin=begin,
|
self.storage.total(begin=begin,
|
||||||
end=end,
|
end=end,
|
||||||
group_filters=group_filters),
|
filters=filters),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_get_total_all_scopes_all_periods_groupby_project_id(self):
|
def test_get_total_all_scopes_all_periods_groupby_project_id(self):
|
||||||
@@ -309,9 +309,9 @@ class StorageUnitTest(TestCase):
|
|||||||
begin = datetime.datetime(2018, 1, 1)
|
begin = datetime.datetime(2018, 1, 1)
|
||||||
end = datetime.datetime(2018, 1, 1, 1)
|
end = datetime.datetime(2018, 1, 1, 1)
|
||||||
|
|
||||||
group_filters = {'project_id': self._project_id}
|
filters = {'project_id': self._project_id}
|
||||||
frames = self.storage.retrieve(begin=begin, end=end,
|
frames = self.storage.retrieve(begin=begin, end=end,
|
||||||
group_filters=group_filters,
|
filters=filters,
|
||||||
metric_types=['image.size', 'instance'])
|
metric_types=['image.size', 'instance'])
|
||||||
self.assertEqual(frames['total'], expected_length)
|
self.assertEqual(frames['total'], expected_length)
|
||||||
|
|
||||||
|
|||||||
@@ -103,10 +103,10 @@ class WriteOrchestrator(object):
|
|||||||
if not timeframe_end:
|
if not timeframe_end:
|
||||||
timeframe_end = timeframe + self._period
|
timeframe_end = timeframe + self._period
|
||||||
try:
|
try:
|
||||||
group_filters = {'project_id': self._tenant_id}
|
filters = {'project_id': self._tenant_id}
|
||||||
data = self._storage.retrieve(begin=timeframe,
|
data = self._storage.retrieve(begin=timeframe,
|
||||||
end=timeframe_end,
|
end=timeframe_end,
|
||||||
group_filters=group_filters,
|
filters=filters,
|
||||||
paginate=False)
|
paginate=False)
|
||||||
for df in data['dataframes']:
|
for df in data['dataframes']:
|
||||||
for service, resources in df['usage'].items():
|
for service, resources in df['usage'].items():
|
||||||
|
|||||||
Reference in New Issue
Block a user