From 33065c16f6b7ffb1f3595a10559b395542e7e30d Mon Sep 17 00:00:00 2001 From: liu-sheng Date: Thu, 16 Oct 2014 17:02:49 +0800 Subject: [PATCH] Standardize timestamp fields of ceilometer API For currently, the timestamp in query fields is not unified, this patch standardizes the usage of timestamp query fileds: - unify the time query filed in API to *timestamp* supported only. - unify the time related fileds of filters to start_timestamp and end_timestamp to simplify the process logic. - improve the _query_to_kwargs method to give explicit presentation if user specifies unsupported query fields. Change-Id: Ic3ade312efa89edd01d2ee5ae31d341805b11f79 Closes-bug: #1295104 Closes-bug: #1291171 --- ceilometer/api/controllers/v2.py | 64 ++++++--------- ceilometer/event/storage/impl_sqlalchemy.py | 4 +- ceilometer/storage/__init__.py | 40 +++++----- ceilometer/storage/hbase/utils.py | 17 ++-- ceilometer/storage/impl_db2.py | 4 +- ceilometer/storage/impl_hbase.py | 8 +- ceilometer/storage/impl_mongodb.py | 4 +- ceilometer/storage/impl_sqlalchemy.py | 18 ++--- ceilometer/storage/mongo/utils.py | 8 +- .../tests/api/v2/test_alarm_scenarios.py | 12 +-- ..._compute_duration_by_resource_scenarios.py | 13 ++-- ceilometer/tests/api/v2/test_query.py | 78 ++++++++++++++++++- .../tests/storage/test_storage_scenarios.py | 44 +++++------ 13 files changed, 186 insertions(+), 128 deletions(-) diff --git a/ceilometer/api/controllers/v2.py b/ceilometer/api/controllers/v2.py index a1a0429f..df6b5dc5 100644 --- a/ceilometer/api/controllers/v2.py +++ b/ceilometer/api/controllers/v2.py @@ -411,7 +411,7 @@ def _validate_query(query, db_func, internal_keys=None, :param allow_timestamps: defines whether the timestamp-based constraint is applicable for this query or not - :returns: None, if the query is valid + :returns: valid query keys the db_func supported :raises InvalidInput: if an operator is not supported for a given field :raises InvalidInput: if timestamp constraints are allowed, but @@ -428,6 +428,11 @@ def _validate_query(query, db_func, internal_keys=None, valid_keys.remove('alarm_type') valid_keys.append('type') + internal_timestamp_keys = ['end_timestamp', 'start_timestamp', + 'end_timestamp_op', 'start_timestamp_op'] + if 'start_timestamp' in valid_keys: + internal_keys += internal_timestamp_keys + valid_keys += ['timestamp', 'search_offset'] internal_keys.append('self') valid_keys = set(valid_keys) - set(internal_keys) translation = {'user_id': 'user', @@ -470,6 +475,7 @@ def _validate_query(query, db_func, internal_keys=None, msg = ("unrecognized field in query: %s, " "valid keys: %s") % (query, sorted(valid_keys)) raise wsme.exc.UnknownArgument(key, msg) + return valid_keys def _validate_timestamp_fields(query, field_name, operator_list, @@ -516,11 +522,9 @@ def _validate_timestamp_fields(query, field_name, operator_list, def _query_to_kwargs(query, db_func, internal_keys=None, allow_timestamps=True): internal_keys = internal_keys or [] - _validate_query(query, db_func, internal_keys=internal_keys, - allow_timestamps=allow_timestamps) + valid_keys = _validate_query(query, db_func, internal_keys=internal_keys, + allow_timestamps=allow_timestamps) query = _sanitize_query(query, db_func) - internal_keys.append('self') - valid_keys = set(inspect.getargspec(db_func)[0]) - set(internal_keys) translation = {'user_id': 'user', 'project_id': 'project', 'resource_id': 'resource', @@ -553,18 +557,7 @@ def _query_to_kwargs(query, db_func, internal_keys=None, if metaquery and 'metaquery' in valid_keys: kwargs['metaquery'] = metaquery if stamp: - q_ts = _get_query_timestamps(stamp) - if 'start' in valid_keys: - kwargs['start'] = q_ts['query_start'] - kwargs['end'] = q_ts['query_end'] - elif 'start_timestamp' in valid_keys: - kwargs['start_timestamp'] = q_ts['query_start'] - kwargs['end_timestamp'] = q_ts['query_end'] - if 'start_timestamp_op' in stamp: - kwargs['start_timestamp_op'] = stamp['start_timestamp_op'] - if 'end_timestamp_op' in stamp: - kwargs['end_timestamp_op'] = stamp['end_timestamp_op'] - + kwargs.update(_get_query_timestamps(stamp)) return kwargs @@ -599,19 +592,14 @@ def _get_query_timestamps(args=None): Returns a dictionary containing: - query_start: First timestamp to use for query - start_timestamp: start_timestamp parameter from request - query_end: Final timestamp to use for query - end_timestamp: end_timestamp parameter from request - search_offset: search_offset parameter from request + start_timestamp: First timestamp to use for query + start_timestamp_op: First timestamp operator to use for query + end_timestamp: Final timestamp to use for query + end_timestamp_op: Final timestamp operator to use for query """ if args is None: - return {'query_start': None, - 'query_end': None, - 'start_timestamp': None, - 'end_timestamp': None, - 'search_offset': 0} + return {} search_offset = int(args.get('search_offset', 0)) def _parse_timestamp(timestamp): @@ -625,20 +613,16 @@ def _get_query_timestamps(args=None): 'invalid timestamp format') return iso_timestamp - start_timestamp = args.get('start_timestamp') - end_timestamp = args.get('end_timestamp') - start_timestamp = _parse_timestamp(start_timestamp) - end_timestamp = _parse_timestamp(end_timestamp) - query_start = start_timestamp - datetime.timedelta( + start_timestamp = _parse_timestamp(args.get('start_timestamp')) + end_timestamp = _parse_timestamp(args.get('end_timestamp')) + start_timestamp = start_timestamp - datetime.timedelta( minutes=search_offset) if start_timestamp else None - query_end = end_timestamp + datetime.timedelta( + end_timestamp = end_timestamp + datetime.timedelta( minutes=search_offset) if end_timestamp else None - return {'query_start': query_start, - 'query_end': query_end, - 'start_timestamp': start_timestamp, + return {'start_timestamp': start_timestamp, 'end_timestamp': end_timestamp, - 'search_offset': search_offset, - } + 'start_timestamp_op': args.get('start_timestamp_op'), + 'end_timestamp_op': args.get('end_timestamp_op')} def _flatten_metadata(metadata): @@ -2369,8 +2353,8 @@ def _event_query_to_event_filter(q): evt_model_filter = { 'event_type': None, 'message_id': None, - 'start_time': None, - 'end_time': None + 'start_timestamp': None, + 'end_timestamp': None } traits_filter = [] diff --git a/ceilometer/event/storage/impl_sqlalchemy.py b/ceilometer/event/storage/impl_sqlalchemy.py index 3a6e5de7..2065119a 100644 --- a/ceilometer/event/storage/impl_sqlalchemy.py +++ b/ceilometer/event/storage/impl_sqlalchemy.py @@ -209,8 +209,8 @@ class Connection(base.Connection): :param event_filter: EventFilter instance """ - start = event_filter.start_time - end = event_filter.end_time + start = event_filter.start_timestamp + end = event_filter.end_timestamp session = self._engine_facade.get_session() LOG.debug(_("Getting events that match filter: %s") % event_filter) with session.begin(): diff --git a/ceilometer/storage/__init__.py b/ceilometer/storage/__init__.py index e7197cee..196b2805 100644 --- a/ceilometer/storage/__init__.py +++ b/ceilometer/storage/__init__.py @@ -119,9 +119,9 @@ class SampleFilter(object): :param user: The sample owner. :param project: The sample project. - :param start: Earliest time point in the request. + :param start_timestamp: Earliest time point in the request. :param start_timestamp_op: Earliest timestamp operation in the request. - :param end: Latest time point in the request. + :param end_timestamp: Latest time point in the request. :param end_timestamp_op: Latest timestamp operation in the request. :param resource: Optional filter for resource id. :param meter: Optional filter for meter type using the meter name. @@ -130,16 +130,16 @@ class SampleFilter(object): :param metaquery: Optional filter on the metadata """ def __init__(self, user=None, project=None, - start=None, start_timestamp_op=None, - end=None, end_timestamp_op=None, + start_timestamp=None, start_timestamp_op=None, + end_timestamp=None, end_timestamp_op=None, resource=None, meter=None, source=None, message_id=None, metaquery=None): self.user = user self.project = project - self.start = utils.sanitize_timestamp(start) + self.start_timestamp = utils.sanitize_timestamp(start_timestamp) self.start_timestamp_op = start_timestamp_op - self.end = utils.sanitize_timestamp(end) + self.end_timestamp = utils.sanitize_timestamp(end_timestamp) self.end_timestamp_op = end_timestamp_op self.resource = resource self.meter = meter @@ -150,9 +150,9 @@ class SampleFilter(object): def __repr__(self): return ("" % (self.user, self.project, - self.start, + self.start_timestamp, self.start_timestamp_op, - self.end, + self.end_timestamp, self.end_timestamp_op, self.resource, self.meter, @@ -175,8 +175,8 @@ class SampleFilter(object): class EventFilter(object): """Properties for building an Event query. - :param start_time: UTC start datetime (mandatory) - :param end_time: UTC end datetime (mandatory) + :param start_timestamp: UTC start datetime (mandatory) + :param end_timestamp: UTC end datetime (mandatory) :param event_type: the name of the event. None for all. :param message_id: the message_id of the event. None for all. :param traits_filter: the trait filter dicts, all of which are optional. @@ -192,20 +192,20 @@ class EventFilter(object): 'op': } """ - def __init__(self, start_time=None, end_time=None, event_type=None, - message_id=None, traits_filter=None): - self.start_time = utils.sanitize_timestamp(start_time) - self.end_time = utils.sanitize_timestamp(end_time) + def __init__(self, start_timestamp=None, end_timestamp=None, + event_type=None, message_id=None, traits_filter=None): + self.start_timestamp = utils.sanitize_timestamp(start_timestamp) + self.end_timestamp = utils.sanitize_timestamp(end_timestamp) self.message_id = message_id self.event_type = event_type self.traits_filter = traits_filter or [] def __repr__(self): - return ("" % - (self.start_time, - self.end_time, + (self.start_timestamp, + self.end_timestamp, self.event_type, six.text_type(self.traits_filter))) diff --git a/ceilometer/storage/hbase/utils.py b/ceilometer/storage/hbase/utils.py index b3c35cc6..3b5d7888 100644 --- a/ceilometer/storage/hbase/utils.py +++ b/ceilometer/storage/hbase/utils.py @@ -65,10 +65,10 @@ def make_events_query_from_filter(event_filter): Query is based on the selected parameter. :param event_filter: storage.EventFilter object. """ - start = "%s" % (timestamp(event_filter.start_time, reverse=False) - if event_filter.start_time else "") - stop = "%s" % (timestamp(event_filter.end_time, reverse=False) - if event_filter.end_time else "") + start = "%s" % (timestamp(event_filter.start_timestamp, reverse=False) + if event_filter.start_timestamp else "") + stop = "%s" % (timestamp(event_filter.end_timestamp, reverse=False) + if event_filter.end_timestamp else "") kwargs = {'event_type': event_filter.event_type, 'event_id': event_filter.message_id} res_q = make_query(**kwargs) @@ -240,8 +240,10 @@ def make_sample_query_from_filter(sample_filter, require_meter=True): raise RuntimeError('Missing required meter specifier') start_row, end_row, ts_query = make_timestamp_query( make_general_rowkey_scan, - start=sample_filter.start, start_op=sample_filter.start_timestamp_op, - end=sample_filter.end, end_op=sample_filter.end_timestamp_op, + start=sample_filter.start_timestamp, + start_op=sample_filter.start_timestamp_op, + end=sample_filter.end_timestamp, + end_op=sample_filter.end_timestamp_op, some_id=meter) kwargs = dict(user_id=sample_filter.user, project_id=sample_filter.project, @@ -257,7 +259,8 @@ def make_sample_query_from_filter(sample_filter, require_meter=True): else: res_q = ts_query if ts_query else None - need_timestamp = (sample_filter.start or sample_filter.end) is not None + need_timestamp = (sample_filter.start_timestamp or + sample_filter.end_timestamp) is not None columns = get_meter_columns(metaquery=sample_filter.metaquery, need_timestamp=need_timestamp, **kwargs) return res_q, start_row, end_row, columns diff --git a/ceilometer/storage/impl_db2.py b/ceilometer/storage/impl_db2.py index 4e50b1fd..0f973d59 100644 --- a/ceilometer/storage/impl_db2.py +++ b/ceilometer/storage/impl_db2.py @@ -336,8 +336,8 @@ class Connection(pymongo_base.Connection): q = pymongo_utils.make_query_from_filter(sample_filter) if period: - if sample_filter.start: - period_start = sample_filter.start + if sample_filter.start_timestamp: + period_start = sample_filter.start_timestamp else: period_start = self.db.meter.find( limit=1, sort=[('timestamp', diff --git a/ceilometer/storage/impl_hbase.py b/ceilometer/storage/impl_hbase.py index b20a0330..7edc72d6 100644 --- a/ceilometer/storage/impl_hbase.py +++ b/ceilometer/storage/impl_hbase.py @@ -369,15 +369,15 @@ class Connection(hbase_base.Connection, base.Connection): filter=q, row_start=start, row_stop=stop, columns=columns))) - if sample_filter.start: - start_time = sample_filter.start + if sample_filter.start_timestamp: + start_time = sample_filter.start_timestamp elif meters: start_time = meters[-1][0]['timestamp'] else: start_time = None - if sample_filter.end: - end_time = sample_filter.end + if sample_filter.end_timestamp: + end_time = sample_filter.end_timestamp elif meters: end_time = meters[0][0]['timestamp'] else: diff --git a/ceilometer/storage/impl_mongodb.py b/ceilometer/storage/impl_mongodb.py index 3b48d186..4bcc4d86 100644 --- a/ceilometer/storage/impl_mongodb.py +++ b/ceilometer/storage/impl_mongodb.py @@ -860,8 +860,8 @@ class Connection(pymongo_base.Connection): q = pymongo_utils.make_query_from_filter(sample_filter) if period: - if sample_filter.start: - period_start = sample_filter.start + if sample_filter.start_timestamp: + period_start = sample_filter.start_timestamp else: period_start = self.db.meter.find( limit=1, sort=[('timestamp', diff --git a/ceilometer/storage/impl_sqlalchemy.py b/ceilometer/storage/impl_sqlalchemy.py index 8cc587fa..7f552d03 100644 --- a/ceilometer/storage/impl_sqlalchemy.py +++ b/ceilometer/storage/impl_sqlalchemy.py @@ -147,14 +147,14 @@ def make_query_from_filter(session, query, sample_filter, require_meter=True): if sample_filter.source: query = query.filter( models.Resource.source_id == sample_filter.source) - if sample_filter.start: - ts_start = sample_filter.start + if sample_filter.start_timestamp: + ts_start = sample_filter.start_timestamp if sample_filter.start_timestamp_op == 'gt': query = query.filter(models.Sample.timestamp > ts_start) else: query = query.filter(models.Sample.timestamp >= ts_start) - if sample_filter.end: - ts_end = sample_filter.end + if sample_filter.end_timestamp: + ts_end = sample_filter.end_timestamp if sample_filter.end_timestamp_op == 'le': query = query.filter(models.Sample.timestamp <= ts_end) else: @@ -401,9 +401,9 @@ class Connection(base.Connection): s_filter = storage.SampleFilter(user=user, project=project, source=source, - start=start_timestamp, + start_timestamp=start_timestamp, start_timestamp_op=start_timestamp_op, - end=end_timestamp, + end_timestamp=end_timestamp, end_timestamp_op=end_timestamp_op, metaquery=metaquery, resource=resource) @@ -687,7 +687,7 @@ class Connection(base.Connection): aggregate) return - if not sample_filter.start or not sample_filter.end: + if not (sample_filter.start_timestamp and sample_filter.end_timestamp): res = self._make_stats_query(sample_filter, None, aggregate).first() @@ -703,8 +703,8 @@ class Connection(base.Connection): # stats by period. We would like to use GROUP BY, but there's no # portable way to manipulate timestamp in SQL, so we can't. for period_start, period_end in base.iter_period( - sample_filter.start or res.tsmin, - sample_filter.end or res.tsmax, + sample_filter.start_timestamp or res.tsmin, + sample_filter.end_timestamp or res.tsmax, period): q = query.filter(models.Sample.timestamp >= period_start) q = q.filter(models.Sample.timestamp < period_end) diff --git a/ceilometer/storage/mongo/utils.py b/ceilometer/storage/mongo/utils.py index 40e1eb21..ecee1248 100644 --- a/ceilometer/storage/mongo/utils.py +++ b/ceilometer/storage/mongo/utils.py @@ -74,8 +74,8 @@ def make_events_query_from_filter(event_filter): :param event_filter: storage.EventFilter object. """ q = {} - ts_range = make_timestamp_range(event_filter.start_time, - event_filter.end_time) + ts_range = make_timestamp_range(event_filter.start_timestamp, + event_filter.end_timestamp) if ts_range: q['timestamp'] = ts_range if event_filter.event_type: @@ -130,8 +130,8 @@ def make_query_from_filter(sample_filter, require_meter=True): elif require_meter: raise RuntimeError('Missing required meter specifier') - ts_range = make_timestamp_range(sample_filter.start, - sample_filter.end, + ts_range = make_timestamp_range(sample_filter.start_timestamp, + sample_filter.end_timestamp, sample_filter.start_timestamp_op, sample_filter.end_timestamp_op) diff --git a/ceilometer/tests/api/v2/test_alarm_scenarios.py b/ceilometer/tests/api/v2/test_alarm_scenarios.py index feddeb7e..8bf6b9f9 100644 --- a/ceilometer/tests/api/v2/test_alarm_scenarios.py +++ b/ceilometer/tests/api/v2/test_alarm_scenarios.py @@ -2170,10 +2170,8 @@ class TestAlarms(v2.FunctionalTest, expect_errors=True, status=400) self.assertEqual('Unknown argument: "alarm_id": unrecognized' " field in query: [], valid keys: ['end_timestamp'," - " 'end_timestamp_op', 'project'," - " 'start_timestamp', 'start_timestamp_op'," - " 'type', 'user']", + " u'b' Unset>], valid keys: ['project', " + "'search_offset', 'timestamp', 'type', 'user']", resp.json['error_message']['faultstring']) def test_get_alarm_history_constrained_by_not_supported_rule(self): @@ -2183,10 +2181,8 @@ class TestAlarms(v2.FunctionalTest, expect_errors=True, status=400) self.assertEqual('Unknown argument: "abcd": unrecognized' " field in query: [], valid keys: ['end_timestamp'," - " 'end_timestamp_op', 'project'," - " 'start_timestamp', 'start_timestamp_op'," - " 'type', 'user']", + " u'abcd' Unset>], valid keys: ['project', " + "'search_offset', 'timestamp', 'type', 'user']", resp.json['error_message']['faultstring']) def test_get_nonexistent_alarm_history(self): diff --git a/ceilometer/tests/api/v2/test_compute_duration_by_resource_scenarios.py b/ceilometer/tests/api/v2/test_compute_duration_by_resource_scenarios.py index e962f7ff..83ffeb1c 100644 --- a/ceilometer/tests/api/v2/test_compute_duration_by_resource_scenarios.py +++ b/ceilometer/tests/api/v2/test_compute_duration_by_resource_scenarios.py @@ -49,13 +49,14 @@ class TestComputeDurationByResource(v2.FunctionalTest, self.late2 = datetime.datetime(2012, 8, 29, 19, 0) def _patch_get_interval(self, start, end): - def get_interval(event_filter, period, groupby, aggregate): - self.assertIsNotNone(event_filter.start) - self.assertIsNotNone(event_filter.end) - if event_filter.start > end or event_filter.end < start: + def get_interval(sample_filter, period, groupby, aggregate): + self.assertIsNotNone(sample_filter.start_timestamp) + self.assertIsNotNone(sample_filter.end_timestamp) + if (sample_filter.start_timestamp > end or + sample_filter.end_timestamp < start): return [] - duration_start = max(event_filter.start, start) - duration_end = min(event_filter.end, end) + duration_start = max(sample_filter.start_timestamp, start) + duration_end = min(sample_filter.end_timestamp, end) duration = timeutils.delta_seconds(duration_start, duration_end) return [ models.Statistics( diff --git a/ceilometer/tests/api/v2/test_query.py b/ceilometer/tests/api/v2/test_query.py index 5967e3b9..afc5ea85 100644 --- a/ceilometer/tests/api/v2/test_query.py +++ b/ceilometer/tests/api/v2/test_query.py @@ -23,8 +23,10 @@ from oslotest import base from oslotest import mockpatch import wsme +from ceilometer.alarm.storage import base as alarm_storage_base from ceilometer.api.controllers import v2 as api from ceilometer import storage +from ceilometer.storage import base as storage_base from ceilometer.tests import base as tests_base @@ -224,8 +226,8 @@ class TestQueryToKwArgs(tests_base.BaseTestCase): value=str(ts_start))] kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__) self.assertEqual(4, len(kwargs)) - self.assertTimestampEqual(kwargs['start'], ts_start) - self.assertTimestampEqual(kwargs['end'], ts_end) + self.assertTimestampEqual(kwargs['start_timestamp'], ts_start) + self.assertTimestampEqual(kwargs['end_timestamp'], ts_end) self.assertEqual('gt', kwargs['start_timestamp_op']) self.assertEqual('lt', kwargs['end_timestamp_op']) @@ -333,3 +335,75 @@ class TestQueryToKwArgs(tests_base.BaseTestCase): expected_exc = wsme.exc.InvalidInput('timestamp', '123', 'invalid timestamp format') self.assertEqual(str(expected_exc), str(exc)) + + def test_get_alarm_changes_filter_valid_fields(self): + q = [api.Query(field='abc', + op='eq', + value='abc')] + exc = self.assertRaises( + wsme.exc.UnknownArgument, + api._query_to_kwargs, q, + alarm_storage_base.Connection.get_alarm_changes) + valid_keys = ['alarm_id', 'on_behalf_of', 'project', 'search_offset', + 'timestamp', 'type', 'user'] + msg = ("unrecognized field in query: %s, " + "valid keys: %s") % (q, valid_keys) + expected_exc = wsme.exc.UnknownArgument('abc', msg) + self.assertEqual(str(expected_exc), str(exc)) + + def test_sample_filter_valid_fields(self): + q = [api.Query(field='abc', + op='eq', + value='abc')] + exc = self.assertRaises( + wsme.exc.UnknownArgument, + api._query_to_kwargs, q, storage.SampleFilter.__init__) + valid_keys = ['message_id', 'metaquery', 'meter', 'project', + 'resource', 'search_offset', 'source', 'timestamp', + 'user'] + msg = ("unrecognized field in query: %s, " + "valid keys: %s") % (q, valid_keys) + expected_exc = wsme.exc.UnknownArgument('abc', msg) + self.assertEqual(str(expected_exc), str(exc)) + + def test_get_meters_filter_valid_fields(self): + q = [api.Query(field='abc', + op='eq', + value='abc')] + exc = self.assertRaises( + wsme.exc.UnknownArgument, + api._query_to_kwargs, q, storage_base.Connection.get_meters) + valid_keys = ['metaquery', 'pagination', 'project', 'resource', + 'source', 'user'] + msg = ("unrecognized field in query: %s, " + "valid keys: %s") % (q, valid_keys) + expected_exc = wsme.exc.UnknownArgument('abc', msg) + self.assertEqual(str(expected_exc), str(exc)) + + def test_get_resources_filter_valid_fields(self): + q = [api.Query(field='abc', + op='eq', + value='abc')] + exc = self.assertRaises( + wsme.exc.UnknownArgument, + api._query_to_kwargs, q, storage_base.Connection.get_resources) + valid_keys = ['metaquery', 'pagination', 'project', 'resource', + 'search_offset', 'source', 'timestamp', 'user'] + msg = ("unrecognized field in query: %s, " + "valid keys: %s") % (q, valid_keys) + expected_exc = wsme.exc.UnknownArgument('abc', msg) + self.assertEqual(str(expected_exc), str(exc)) + + def test_get_alarms_filter_valid_fields(self): + q = [api.Query(field='abc', + op='eq', + value='abc')] + exc = self.assertRaises( + wsme.exc.UnknownArgument, + api._query_to_kwargs, q, alarm_storage_base.Connection.get_alarms) + valid_keys = ['alarm_id', 'enabled', 'meter', 'name', 'pagination', + 'project', 'state', 'type', 'user'] + msg = ("unrecognized field in query: %s, " + "valid keys: %s") % (q, valid_keys) + expected_exc = wsme.exc.UnknownArgument('abc', msg) + self.assertEqual(str(expected_exc), str(exc)) diff --git a/ceilometer/tests/storage/test_storage_scenarios.py b/ceilometer/tests/storage/test_storage_scenarios.py index ad6d9152..03fd1181 100644 --- a/ceilometer/tests/storage/test_storage_scenarios.py +++ b/ceilometer/tests/storage/test_storage_scenarios.py @@ -576,7 +576,7 @@ class RawSampleTest(DBTestBase, timestamp = datetime.datetime(2012, 7, 2, 10, 41) f = storage.SampleFilter( user='user-id', - start=timestamp, + start_timestamp=timestamp, ) results = list(self.conn.get_samples(f)) @@ -596,7 +596,7 @@ class RawSampleTest(DBTestBase, timestamp = datetime.datetime(2012, 7, 2, 10, 40) f = storage.SampleFilter( user='user-id', - end=timestamp, + end_timestamp=timestamp, ) results = list(self.conn.get_samples(f)) @@ -616,8 +616,8 @@ class RawSampleTest(DBTestBase, start_ts = datetime.datetime(2012, 7, 2, 10, 42) end_ts = datetime.datetime(2012, 7, 2, 10, 43) f = storage.SampleFilter( - start=start_ts, - end=end_ts, + start_timestamp=start_ts, + end_timestamp=end_ts, ) results = list(self.conn.get_samples(f)) @@ -1219,7 +1219,7 @@ class StatisticsTest(DBTestBase, f = storage.SampleFilter( user='user-5', meter='volume.size', - start='2012-09-25T10:28:00', + start_timestamp='2012-09-25T10:28:00', ) results = list(self.conn.get_meter_statistics(f, period=7200)) self.assertEqual(2, len(results)) @@ -1278,7 +1278,7 @@ class StatisticsTest(DBTestBase, f = storage.SampleFilter( user='user-5', meter='volume.size', - start=date + start_timestamp=date ) results = list(self.conn.get_meter_statistics(f, period=7200)) self.assertEqual(2, len(results)) @@ -1293,8 +1293,8 @@ class StatisticsTest(DBTestBase, f = storage.SampleFilter( user='user-5', meter='volume.size', - start='2012-09-25T10:28:00', - end='2012-09-25T11:28:00', + start_timestamp='2012-09-25T10:28:00', + end_timestamp='2012-09-25T11:28:00', ) results = list(self.conn.get_meter_statistics(f, period=1800)) self.assertEqual(1, len(results)) @@ -1320,8 +1320,8 @@ class StatisticsTest(DBTestBase, f = storage.SampleFilter( meter='volume.size', resource='resource-id', - start='2012-09-25T11:30:00', - end='2012-09-25T11:32:00', + start_timestamp='2012-09-25T11:30:00', + end_timestamp='2012-09-25T11:32:00', ) results = list(self.conn.get_meter_statistics(f))[0] self.assertEqual(0, results.duration) @@ -2284,7 +2284,7 @@ class StatisticsGroupByTest(DBTestBase, def test_group_by_start_timestamp_after(self): f = storage.SampleFilter( meter='instance', - start=datetime.datetime(2013, 8, 1, 17, 28, 1), + start_timestamp=datetime.datetime(2013, 8, 1, 17, 28, 1), ) results = list(self.conn.get_meter_statistics(f, groupby=['project_id'])) @@ -2294,7 +2294,7 @@ class StatisticsGroupByTest(DBTestBase, def test_group_by_end_timestamp_before(self): f = storage.SampleFilter( meter='instance', - end=datetime.datetime(2013, 8, 1, 10, 10, 59), + end_timestamp=datetime.datetime(2013, 8, 1, 10, 10, 59), ) results = list(self.conn.get_meter_statistics(f, groupby=['project_id'])) @@ -2304,7 +2304,7 @@ class StatisticsGroupByTest(DBTestBase, def test_group_by_start_timestamp(self): f = storage.SampleFilter( meter='instance', - start=datetime.datetime(2013, 8, 1, 14, 58), + start_timestamp=datetime.datetime(2013, 8, 1, 14, 58), ) results = list(self.conn.get_meter_statistics(f, groupby=['project_id'])) @@ -2336,7 +2336,7 @@ class StatisticsGroupByTest(DBTestBase, def test_group_by_end_timestamp(self): f = storage.SampleFilter( meter='instance', - end=datetime.datetime(2013, 8, 1, 11, 45), + end_timestamp=datetime.datetime(2013, 8, 1, 11, 45), ) results = list(self.conn.get_meter_statistics(f, groupby=['project_id'])) @@ -2361,8 +2361,8 @@ class StatisticsGroupByTest(DBTestBase, def test_group_by_start_end_timestamp(self): f = storage.SampleFilter( meter='instance', - start=datetime.datetime(2013, 8, 1, 8, 17, 3), - end=datetime.datetime(2013, 8, 1, 23, 59, 59), + start_timestamp=datetime.datetime(2013, 8, 1, 8, 17, 3), + end_timestamp=datetime.datetime(2013, 8, 1, 23, 59, 59), ) results = list(self.conn.get_meter_statistics(f, groupby=['project_id'])) @@ -2395,8 +2395,8 @@ class StatisticsGroupByTest(DBTestBase, f = storage.SampleFilter( meter='instance', project='project-1', - start=datetime.datetime(2013, 8, 1, 11, 1), - end=datetime.datetime(2013, 8, 1, 20, 0), + start_timestamp=datetime.datetime(2013, 8, 1, 11, 1), + end_timestamp=datetime.datetime(2013, 8, 1, 20, 0), ) results = list(self.conn.get_meter_statistics(f, groupby=['resource_id'])) @@ -2427,8 +2427,8 @@ class StatisticsGroupByTest(DBTestBase, def test_group_by_start_end_timestamp_with_period(self): f = storage.SampleFilter( meter='instance', - start=datetime.datetime(2013, 8, 1, 14, 0), - end=datetime.datetime(2013, 8, 1, 17, 0), + start_timestamp=datetime.datetime(2013, 8, 1, 14, 0), + end_timestamp=datetime.datetime(2013, 8, 1, 17, 0), ) results = list(self.conn.get_meter_statistics(f, period=3600, @@ -2511,8 +2511,8 @@ class StatisticsGroupByTest(DBTestBase, f = storage.SampleFilter( meter='instance', source='source-1', - start=datetime.datetime(2013, 8, 1, 10, 0), - end=datetime.datetime(2013, 8, 1, 18, 0), + start_timestamp=datetime.datetime(2013, 8, 1, 10, 0), + end_timestamp=datetime.datetime(2013, 8, 1, 18, 0), ) results = list(self.conn.get_meter_statistics(f, period=7200,