Merge "Remove "group_filters" parameter from v2 storage interface"
This commit is contained in:
commit
559a16d710
@ -101,12 +101,12 @@ class ReportController(rest.RestController):
|
||||
# enforce it by policy engine
|
||||
scope_key = CONF.collect.scope_key
|
||||
groupby = [scope_key]
|
||||
group_filters = {scope_key: tenant_id} if tenant_id else None
|
||||
filters = {scope_key: tenant_id} if tenant_id else None
|
||||
result = storage.total(
|
||||
groupby=groupby,
|
||||
begin=begin, end=end,
|
||||
metric_types=service,
|
||||
group_filters=group_filters)
|
||||
filters=filters)
|
||||
|
||||
if result['total'] < 1:
|
||||
return decimal.Decimal('0')
|
||||
@ -144,12 +144,12 @@ class ReportController(rest.RestController):
|
||||
storage_groupby.append(scope_key)
|
||||
if groupby is not None and 'res_type' in groupby:
|
||||
storage_groupby.append('type')
|
||||
group_filters = {scope_key: tenant_id} if tenant_id else None
|
||||
filters = {scope_key: tenant_id} if tenant_id else None
|
||||
result = storage.total(
|
||||
groupby=storage_groupby,
|
||||
begin=begin, end=end,
|
||||
metric_types=service,
|
||||
group_filters=group_filters)
|
||||
filters=filters)
|
||||
|
||||
summarymodels = []
|
||||
for res in result['results']:
|
||||
|
@ -62,7 +62,7 @@ class DataFramesController(rest.RestController):
|
||||
scope_key = CONF.collect.scope_key
|
||||
backend = pecan.request.storage_backend
|
||||
dataframes = []
|
||||
group_filters = {scope_key: tenant_id} if tenant_id else None
|
||||
filters = {scope_key: tenant_id} if tenant_id else None
|
||||
|
||||
if begin:
|
||||
begin = ck_utils.dt2ts(begin)
|
||||
@ -71,7 +71,7 @@ class DataFramesController(rest.RestController):
|
||||
try:
|
||||
resp = backend.retrieve(
|
||||
begin, end,
|
||||
group_filters=group_filters,
|
||||
filters=filters,
|
||||
metric_types=resource_type,
|
||||
paginate=False)
|
||||
except storage.NoTimeFrame:
|
||||
|
@ -79,10 +79,10 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
||||
return metric_types
|
||||
|
||||
def retrieve(self, begin=None, end=None,
|
||||
filters=None, group_filters=None,
|
||||
filters=None,
|
||||
metric_types=None,
|
||||
offset=0, limit=100, paginate=True):
|
||||
tenant_id = group_filters.get('project_id') if group_filters else None
|
||||
tenant_id = filters.get('project_id') if filters else None
|
||||
metric_types = self._check_metric_types(metric_types)
|
||||
frames = self.storage.get_time_frame(
|
||||
begin, end,
|
||||
@ -97,9 +97,9 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
||||
def total(self, groupby=None,
|
||||
begin=None, end=None,
|
||||
metric_types=None,
|
||||
filters=None, group_filters=None,
|
||||
filters=None,
|
||||
offset=0, limit=100, paginate=True):
|
||||
tenant_id = group_filters.get('project_id') if group_filters else None
|
||||
tenant_id = filters.get('project_id') if filters else None
|
||||
|
||||
storage_gby = []
|
||||
if groupby:
|
||||
|
@ -92,7 +92,7 @@ class BaseStorage(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def retrieve(self, begin=None, end=None,
|
||||
filters=None, group_filters=None,
|
||||
filters=None,
|
||||
metric_types=None,
|
||||
offset=0, limit=1000, paginate=True):
|
||||
"""Returns the following dict::
|
||||
@ -106,10 +106,8 @@ class BaseStorage(object):
|
||||
:type begin: datetime
|
||||
:param end: End date
|
||||
:type end: datetime
|
||||
:param filters: Metadata to filter on. ex: {'flavor_id': '42'}
|
||||
:param filters: Attributes to filter on. ex: {'flavor_id': '42'}
|
||||
:type filters: dict
|
||||
:param group_filters: Groupby to filter on. ex: {'project_id': '123ab'}
|
||||
:type group_filters: dict
|
||||
:param metric_types: Metric type to filter on.
|
||||
:type metric_types: str or list
|
||||
:param offset: Offset for pagination
|
||||
@ -126,7 +124,7 @@ class BaseStorage(object):
|
||||
def total(self, groupby=None,
|
||||
begin=None, end=None,
|
||||
metric_types=None,
|
||||
filters=None, group_filters=None,
|
||||
filters=None,
|
||||
offset=0, limit=1000, paginate=True):
|
||||
"""Returns a grouped total for given groupby.
|
||||
|
||||
@ -139,10 +137,8 @@ class BaseStorage(object):
|
||||
:type begin: datetime
|
||||
:param end: End date
|
||||
:type end: datetime
|
||||
:param filters: Metadata to filter on. ex: {'flavor_id': '42'}
|
||||
:param filters: Attributes to filter on. ex: {'flavor_id': '42'}
|
||||
:type filters: dict
|
||||
:param group_filters: Groupby to filter on. ex: {'project_id': '123ab'}
|
||||
:type group_filters: dict
|
||||
:param metric_types: Metric type to filter on.
|
||||
:type metric_types: str or list
|
||||
:param offset: Offset for pagination
|
||||
|
@ -142,10 +142,10 @@ class InfluxClient(object):
|
||||
|
||||
@staticmethod
|
||||
def _get_filter(key, value):
|
||||
if isinstance(value, six.text_type):
|
||||
if isinstance(value, six.string_types):
|
||||
format_string = "{}='{}'"
|
||||
elif isinstance(value, (six.integer_types, float)):
|
||||
format_string = "{}='{}'"
|
||||
format_string = "{}={}"
|
||||
return format_string.format(key, value)
|
||||
|
||||
@staticmethod
|
||||
@ -258,16 +258,6 @@ class InfluxStorage(v2_storage.BaseStorage):
|
||||
|
||||
return begin, end
|
||||
|
||||
@staticmethod
|
||||
def _build_filters(filters, group_filters):
|
||||
output = None
|
||||
if filters and group_filters:
|
||||
output = copy.deepcopy(filters)
|
||||
output.update(group_filters)
|
||||
elif group_filters:
|
||||
output = group_filters
|
||||
return output
|
||||
|
||||
@staticmethod
|
||||
def _point_to_dataframe_entry(point):
|
||||
groupby = (point.pop('groupby', None) or '').split('|')
|
||||
@ -310,11 +300,10 @@ class InfluxStorage(v2_storage.BaseStorage):
|
||||
return output
|
||||
|
||||
def retrieve(self, begin=None, end=None,
|
||||
filters=None, group_filters=None,
|
||||
filters=None,
|
||||
metric_types=None,
|
||||
offset=0, limit=1000, paginate=True):
|
||||
begin, end = self._check_begin_end(begin, end)
|
||||
filters = self._build_filters(filters, group_filters)
|
||||
total, resp = self._conn.retrieve(
|
||||
metric_types, filters, begin, end, offset, limit, paginate)
|
||||
|
||||
@ -345,11 +334,10 @@ class InfluxStorage(v2_storage.BaseStorage):
|
||||
def total(self, groupby=None,
|
||||
begin=None, end=None,
|
||||
metric_types=None,
|
||||
filters=None, group_filters=None,
|
||||
filters=None,
|
||||
offset=0, limit=1000, paginate=True):
|
||||
|
||||
begin, end = self._check_begin_end(begin, end)
|
||||
filters = self._build_filters(filters, group_filters)
|
||||
|
||||
total = self._conn.get_total(
|
||||
metric_types, begin, end, groupby, filters)
|
||||
|
@ -102,22 +102,22 @@ class StorageDataframeTest(StorageTest):
|
||||
|
||||
def test_get_frame_on_one_period_and_one_tenant(self):
|
||||
self.insert_different_data_two_tenants()
|
||||
group_filters = {'project_id': self._tenant_id}
|
||||
filters = {'project_id': self._tenant_id}
|
||||
data = self.storage.retrieve(
|
||||
begin=samples.FIRST_PERIOD_BEGIN,
|
||||
end=samples.FIRST_PERIOD_END,
|
||||
group_filters=group_filters)['dataframes']
|
||||
filters=filters)['dataframes']
|
||||
self.assertEqual(2, len(data))
|
||||
|
||||
def test_get_frame_on_one_period_and_one_tenant_outside_data(self):
|
||||
self.insert_different_data_two_tenants()
|
||||
group_filters = {'project_id': self._other_tenant_id}
|
||||
filters = {'project_id': self._other_tenant_id}
|
||||
self.assertRaises(
|
||||
storage.NoTimeFrame,
|
||||
self.storage.retrieve,
|
||||
begin=samples.FIRST_PERIOD_BEGIN,
|
||||
end=samples.FIRST_PERIOD_END,
|
||||
group_filters=group_filters)
|
||||
filters=filters)
|
||||
|
||||
def test_get_frame_on_two_periods(self):
|
||||
self.insert_different_data_two_tenants()
|
||||
@ -174,11 +174,11 @@ class StorageTotalTest(StorageTest):
|
||||
begin = ck_utils.ts2dt(samples.FIRST_PERIOD_BEGIN)
|
||||
end = ck_utils.ts2dt(samples.FIRST_PERIOD_END)
|
||||
self.insert_data()
|
||||
group_filters = {'project_id': self._tenant_id}
|
||||
filters = {'project_id': self._tenant_id}
|
||||
total = self.storage.total(
|
||||
begin=begin,
|
||||
end=end,
|
||||
group_filters=group_filters)['results']
|
||||
filters=filters)['results']
|
||||
self.assertEqual(1, len(total))
|
||||
self.assertEqual(0.5537, total[0]["rate"])
|
||||
self.assertEqual(self._tenant_id, total[0]["tenant_id"])
|
||||
|
@ -119,14 +119,14 @@ class StorageUnitTest(TestCase):
|
||||
begin = datetime.datetime(2018, 1, 1)
|
||||
end = datetime.datetime(2018, 1, 1, 4)
|
||||
|
||||
group_filters = {'project_id': self._project_id}
|
||||
filters = {'project_id': self._project_id}
|
||||
self._compare_get_total_result_with_expected(
|
||||
expected_qty,
|
||||
expected_total,
|
||||
1,
|
||||
self.storage.total(begin=begin,
|
||||
end=end,
|
||||
group_filters=group_filters),
|
||||
filters=filters),
|
||||
)
|
||||
|
||||
def test_get_total_all_scopes_one_period(self):
|
||||
@ -151,14 +151,14 @@ class StorageUnitTest(TestCase):
|
||||
begin = datetime.datetime(2018, 1, 1)
|
||||
end = datetime.datetime(2018, 1, 1, 1)
|
||||
|
||||
group_filters = {'project_id': self._project_id}
|
||||
filters = {'project_id': self._project_id}
|
||||
self._compare_get_total_result_with_expected(
|
||||
expected_qty,
|
||||
expected_total,
|
||||
1,
|
||||
self.storage.total(begin=begin,
|
||||
end=end,
|
||||
group_filters=group_filters),
|
||||
filters=filters),
|
||||
)
|
||||
|
||||
def test_get_total_all_scopes_all_periods_groupby_project_id(self):
|
||||
@ -309,9 +309,9 @@ class StorageUnitTest(TestCase):
|
||||
begin = datetime.datetime(2018, 1, 1)
|
||||
end = datetime.datetime(2018, 1, 1, 1)
|
||||
|
||||
group_filters = {'project_id': self._project_id}
|
||||
filters = {'project_id': self._project_id}
|
||||
frames = self.storage.retrieve(begin=begin, end=end,
|
||||
group_filters=group_filters,
|
||||
filters=filters,
|
||||
metric_types=['image.size', 'instance'])
|
||||
self.assertEqual(frames['total'], expected_length)
|
||||
|
||||
|
@ -103,10 +103,10 @@ class WriteOrchestrator(object):
|
||||
if not timeframe_end:
|
||||
timeframe_end = timeframe + self._period
|
||||
try:
|
||||
group_filters = {'project_id': self._tenant_id}
|
||||
filters = {'project_id': self._tenant_id}
|
||||
data = self._storage.retrieve(begin=timeframe,
|
||||
end=timeframe_end,
|
||||
group_filters=group_filters,
|
||||
filters=filters,
|
||||
paginate=False)
|
||||
for df in data['dataframes']:
|
||||
for service, resources in df['usage'].items():
|
||||
|
Loading…
x
Reference in New Issue
Block a user