Make entities (Resource, User, Project) able to store lists
When one Resource contains several meters we should store all of them.
The same is true about User, Project and Resource regarding sources.
To make it possible it is needed to change the way we store data in Hbase.
Now Resource, User and Project may contain several sources. Besides, Resource may contain several meters.
To store all of them we use ColumnFamily f and columns m_meters and s_sources.
All meters stored as JSON-ed list.
E.g.
resource_1: {f: {meters: [meter1, meter2, meter3]} }
The same for users and projects:
user_1: {f: [sources: [source_1, source_2]}
We cannot update these lists safely.
In this commit m_meters and s_sources are changed to m_{unique_meter_structure} and s_{unique_source_name}.
So we getting rid of 'lists' and may use safe put() method.
All meters in Resource and sources in User, Project, Resource are
stored with stub value=1 to make filtering simpler.
New ColumnFamilies are not introdused here because CF is a 'namespace' and make sence in case when
there is a lot of requests only for this CF. But it's not the case for Ceilometer: each request may
contain _id field or metadata filter. It's better to store all this info in one CF.
Closes bug 1288284
Change-Id: I5814202e3d59fd29f96c8734e445367f766e6a4a
This commit is contained in:
@@ -48,43 +48,64 @@ class HBaseStorage(base.StorageEngine):
|
|||||||
Collections:
|
Collections:
|
||||||
|
|
||||||
- user
|
- user
|
||||||
- { _id: user id
|
- row_key: user_id
|
||||||
s_source_name: each source reported for user is stored with prefix s_
|
- Column Families:
|
||||||
the value of each entry is '1'
|
f: contains all sources with 's' prefix
|
||||||
sources: this field contains the first source reported for user.
|
|
||||||
This data is not used but stored for simplification of impl
|
|
||||||
}
|
|
||||||
- project
|
- project
|
||||||
- { _id: project id
|
- row_key: project_id
|
||||||
s_source_name: the same as for users
|
- Column Families:
|
||||||
sources: the same as for users
|
f: contains all sources with 's' prefix
|
||||||
}
|
|
||||||
- meter
|
- meter (describes sample actually)
|
||||||
- {_id_reverted_ts: row key is constructed in this way for efficient
|
- row-key: consists of reversed timestamp, meter and an md5 of
|
||||||
filtering
|
user+resource+project for purposes of uniqueness
|
||||||
parsed_info_from_incoming_data: e.g. counter_name, counter_type
|
- Column Families:
|
||||||
resource_metadata: raw metadata for corresponding resource
|
f: contains the following qualifiers:
|
||||||
r_metadata_name: flattened metadata for corresponding resource
|
-counter_name : <name of counter>
|
||||||
message: raw incoming data
|
-counter_type : <type of counter>
|
||||||
recorded_at: when the sample has been recorded
|
-counter_unit : <unit of counter>
|
||||||
source: source for the sample
|
-counter_volume : <volume of counter>
|
||||||
}
|
-message: <raw incoming data>
|
||||||
|
-message_id: <id of message>
|
||||||
|
-message_signature: <signature of message>
|
||||||
|
-resource_metadata: raw metadata for corresponding resource
|
||||||
|
of the meter
|
||||||
|
-project_id: <id of project>
|
||||||
|
-resource_id: <id of resource>
|
||||||
|
-user_id: <id of user>
|
||||||
|
-recorded_at: <datetime when sample has been recorded (utc.now)>
|
||||||
|
-flattened metadata with prefix r_metadata. e.g.
|
||||||
|
f:r_metadata.display_name or f:r_metadata.tag
|
||||||
|
-rts: <reversed timestamp of entry>
|
||||||
|
-timestamp: <meter's timestamp (came from message)>
|
||||||
|
-source for meter with prefix 's'
|
||||||
|
|
||||||
- resource
|
- resource
|
||||||
- the metadata for resources
|
- row_key: uuid of resource
|
||||||
- { _id: uuid of resource,
|
- Column Families:
|
||||||
metadata: raw metadata dictionaries
|
f: contains the following qualifiers:
|
||||||
r_metadata: flattened metadata fir quick filtering
|
-resource_metadata: raw metadata for corresponding resource
|
||||||
timestamp: datetime of last update
|
-project_id: <id of project>
|
||||||
user_id: uuid
|
-resource_id: <id of resource>
|
||||||
project_id: uuid
|
-user_id: <id of user>
|
||||||
meter: [ array of {counter_name: string, counter_type: string} ]
|
-flattened metadata with prefix r_metadata. e.g.
|
||||||
source: source of resource
|
f:r_metadata.display_name or f:r_metadata.tag
|
||||||
}
|
-sources for all corresponding meters with prefix 's'
|
||||||
|
-all meters for this resource in format
|
||||||
|
"%s!%s!%s+%s" % (counter_name, counter_type, counter_unit,
|
||||||
|
source)
|
||||||
|
|
||||||
- alarm
|
- alarm
|
||||||
- the raw incoming alarm data
|
- row_key: uuid of alarm
|
||||||
|
- Column Families:
|
||||||
|
f: contains the raw incoming alarm data
|
||||||
|
|
||||||
- alarm_h
|
- alarm_h
|
||||||
- raw incoming alarm_history data. Timestamp becomes now()
|
- row_key: uuid of alarm + "_" + reversed timestamp
|
||||||
if not determined
|
- Column Families:
|
||||||
|
f: raw incoming alarm_history data. Timestamp becomes now()
|
||||||
|
if not determined
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -143,10 +164,10 @@ class Connection(base.Connection):
|
|||||||
|
|
||||||
def upgrade(self):
|
def upgrade(self):
|
||||||
with self.conn_pool.connection() as conn:
|
with self.conn_pool.connection() as conn:
|
||||||
conn.create_table(self.PROJECT_TABLE, {'f': dict()})
|
conn.create_table(self.PROJECT_TABLE, {'f': dict(max_versions=1)})
|
||||||
conn.create_table(self.USER_TABLE, {'f': dict()})
|
conn.create_table(self.USER_TABLE, {'f': dict(max_versions=1)})
|
||||||
conn.create_table(self.RESOURCE_TABLE, {'f': dict()})
|
conn.create_table(self.RESOURCE_TABLE, {'f': dict(max_versions=1)})
|
||||||
conn.create_table(self.METER_TABLE, {'f': dict()})
|
conn.create_table(self.METER_TABLE, {'f': dict(max_versions=1)})
|
||||||
conn.create_table(self.ALARM_TABLE, {'f': dict()})
|
conn.create_table(self.ALARM_TABLE, {'f': dict()})
|
||||||
conn.create_table(self.ALARM_HISTORY_TABLE, {'f': dict()})
|
conn.create_table(self.ALARM_HISTORY_TABLE, {'f': dict()})
|
||||||
|
|
||||||
@@ -284,35 +305,28 @@ class Connection(base.Connection):
|
|||||||
resource_table = conn.table(self.RESOURCE_TABLE)
|
resource_table = conn.table(self.RESOURCE_TABLE)
|
||||||
meter_table = conn.table(self.METER_TABLE)
|
meter_table = conn.table(self.METER_TABLE)
|
||||||
|
|
||||||
# Make sure we know about the user and project
|
|
||||||
if data['user_id']:
|
if data['user_id']:
|
||||||
self._update_sources(user_table, data['user_id'],
|
user_table.put(data['user_id'], serialize_entry(
|
||||||
data['source'])
|
**{'source': data['source']}))
|
||||||
self._update_sources(project_table, data['project_id'],
|
|
||||||
data['source'])
|
project_table.put(data['project_id'], serialize_entry(
|
||||||
|
**{'source': data['source']})
|
||||||
|
)
|
||||||
|
|
||||||
# Get metadata from user's data
|
|
||||||
resource_metadata = data.get('resource_metadata', {})
|
resource_metadata = data.get('resource_metadata', {})
|
||||||
# Determine the name of new meter
|
# Determine the name of new meter
|
||||||
new_meter = _format_meter_reference(
|
new_meter = _format_meter_reference(
|
||||||
data['counter_name'], data['counter_type'],
|
data['counter_name'], data['counter_type'],
|
||||||
data['counter_unit'])
|
data['counter_unit'], data['source'])
|
||||||
flatten_result, sources, meters, metadata = \
|
#TODO(nprivalova): try not to store resource_id
|
||||||
deserialize_entry(resource_table.row(data['resource_id']))
|
resource = serialize_entry(**{
|
||||||
|
'source': data['source'], 'meter': new_meter,
|
||||||
# Update if resource has new information
|
'resource_metadata': resource_metadata,
|
||||||
if (data['source'] not in sources) or (
|
'resource_id': data['resource_id'],
|
||||||
new_meter not in meters) or (
|
'project_id': data['project_id'], 'user_id': data['user_id']})
|
||||||
metadata != resource_metadata):
|
resource_table.put(data['resource_id'], resource)
|
||||||
resource_table.put(data['resource_id'],
|
|
||||||
serialize_entry(
|
|
||||||
**{'sources': [data['source']],
|
|
||||||
'meters': [new_meter],
|
|
||||||
'metadata': resource_metadata,
|
|
||||||
'resource_id': data['resource_id'],
|
|
||||||
'project_id': data['project_id'],
|
|
||||||
'user_id': data['user_id']}))
|
|
||||||
|
|
||||||
|
#TODO(nprivalova): improve uniqueness
|
||||||
# Rowkey consists of reversed timestamp, meter and an md5 of
|
# Rowkey consists of reversed timestamp, meter and an md5 of
|
||||||
# user+resource+project for purposes of uniqueness
|
# user+resource+project for purposes of uniqueness
|
||||||
m = hashlib.md5()
|
m = hashlib.md5()
|
||||||
@@ -323,19 +337,13 @@ class Connection(base.Connection):
|
|||||||
# alphabetically.
|
# alphabetically.
|
||||||
rts = reverse_timestamp(data['timestamp'])
|
rts = reverse_timestamp(data['timestamp'])
|
||||||
row = "%s_%d_%s" % (data['counter_name'], rts, m.hexdigest())
|
row = "%s_%d_%s" % (data['counter_name'], rts, m.hexdigest())
|
||||||
record = serialize_entry(data, **{'metadata': resource_metadata,
|
record = serialize_entry(data, **{'source': data['source'],
|
||||||
'rts': rts,
|
'rts': rts,
|
||||||
'message': data,
|
'message': data,
|
||||||
'recorded_at': timeutils.utcnow(
|
'recorded_at': timeutils.utcnow(
|
||||||
)})
|
)})
|
||||||
meter_table.put(row, record)
|
meter_table.put(row, record)
|
||||||
|
|
||||||
def _update_sources(self, table, id, source):
|
|
||||||
user, sources, _, _ = deserialize_entry(table.row(id))
|
|
||||||
if source not in sources:
|
|
||||||
sources.append(source)
|
|
||||||
table.put(id, serialize_entry(user, **{'sources': sources}))
|
|
||||||
|
|
||||||
def get_users(self, source=None):
|
def get_users(self, source=None):
|
||||||
"""Return an iterable of user id strings.
|
"""Return an iterable of user id strings.
|
||||||
|
|
||||||
@@ -343,7 +351,7 @@ class Connection(base.Connection):
|
|||||||
"""
|
"""
|
||||||
with self.conn_pool.connection() as conn:
|
with self.conn_pool.connection() as conn:
|
||||||
user_table = conn.table(self.USER_TABLE)
|
user_table = conn.table(self.USER_TABLE)
|
||||||
LOG.debug(_("source: %s") % source)
|
LOG.debug(_("Query User table: source=%s") % source)
|
||||||
scan_args = {}
|
scan_args = {}
|
||||||
if source:
|
if source:
|
||||||
scan_args['columns'] = ['f:s_%s' % source]
|
scan_args['columns'] = ['f:s_%s' % source]
|
||||||
@@ -356,7 +364,7 @@ class Connection(base.Connection):
|
|||||||
"""
|
"""
|
||||||
with self.conn_pool.connection() as conn:
|
with self.conn_pool.connection() as conn:
|
||||||
project_table = conn.table(self.PROJECT_TABLE)
|
project_table = conn.table(self.PROJECT_TABLE)
|
||||||
LOG.debug(_("source: %s") % source)
|
LOG.debug(_("Query Project table: source=%s") % source)
|
||||||
scan_args = {}
|
scan_args = {}
|
||||||
if source:
|
if source:
|
||||||
scan_args['columns'] = ['f:s_%s' % source]
|
scan_args['columns'] = ['f:s_%s' % source]
|
||||||
@@ -389,7 +397,6 @@ class Connection(base.Connection):
|
|||||||
resource=resource, source=source, metaquery=metaquery)
|
resource=resource, source=source, metaquery=metaquery)
|
||||||
q, start_row, stop_row = make_sample_query_from_filter(
|
q, start_row, stop_row = make_sample_query_from_filter(
|
||||||
sample_filter, require_meter=False)
|
sample_filter, require_meter=False)
|
||||||
|
|
||||||
with self.conn_pool.connection() as conn:
|
with self.conn_pool.connection() as conn:
|
||||||
meter_table = conn.table(self.METER_TABLE)
|
meter_table = conn.table(self.METER_TABLE)
|
||||||
LOG.debug(_("Query Meter table: %s") % q)
|
LOG.debug(_("Query Meter table: %s") % q)
|
||||||
@@ -406,8 +413,9 @@ class Connection(base.Connection):
|
|||||||
meters = sorted(d_meters, key=_resource_id_from_record_tuple)
|
meters = sorted(d_meters, key=_resource_id_from_record_tuple)
|
||||||
for resource_id, r_meters in itertools.groupby(
|
for resource_id, r_meters in itertools.groupby(
|
||||||
meters, key=_resource_id_from_record_tuple):
|
meters, key=_resource_id_from_record_tuple):
|
||||||
# We need deserialized entry(data[0]) and metadata(data[3])
|
# We need deserialized entry(data[0]), sources (data[1]) and
|
||||||
meter_rows = [(data[0], data[3]) for data in sorted(
|
# metadata(data[3])
|
||||||
|
meter_rows = [(data[0], data[1], data[3]) for data in sorted(
|
||||||
r_meters, key=_timestamp_from_record_tuple)]
|
r_meters, key=_timestamp_from_record_tuple)]
|
||||||
latest_data = meter_rows[-1]
|
latest_data = meter_rows[-1]
|
||||||
min_ts = meter_rows[0][0]['timestamp']
|
min_ts = meter_rows[0][0]['timestamp']
|
||||||
@@ -417,9 +425,9 @@ class Connection(base.Connection):
|
|||||||
first_sample_timestamp=min_ts,
|
first_sample_timestamp=min_ts,
|
||||||
last_sample_timestamp=max_ts,
|
last_sample_timestamp=max_ts,
|
||||||
project_id=latest_data[0]['project_id'],
|
project_id=latest_data[0]['project_id'],
|
||||||
source=latest_data[0]['source'],
|
source=latest_data[1][0],
|
||||||
user_id=latest_data[0]['user_id'],
|
user_id=latest_data[0]['user_id'],
|
||||||
metadata=latest_data[1],
|
metadata=latest_data[2],
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_meters(self, user=None, project=None, resource=None, source=None,
|
def get_meters(self, user=None, project=None, resource=None, source=None,
|
||||||
@@ -444,23 +452,29 @@ class Connection(base.Connection):
|
|||||||
LOG.debug(_("Query Resource table: %s") % q)
|
LOG.debug(_("Query Resource table: %s") % q)
|
||||||
|
|
||||||
gen = resource_table.scan(filter=q)
|
gen = resource_table.scan(filter=q)
|
||||||
|
# We need result set to be sure that user doesn't receive several
|
||||||
|
# same meters. Please see bug
|
||||||
|
# https://bugs.launchpad.net/ceilometer/+bug/1301371
|
||||||
|
result = set()
|
||||||
for ignored, data in gen:
|
for ignored, data in gen:
|
||||||
flatten_result, s, m, md = deserialize_entry(data)
|
flatten_result, s, meters, md = deserialize_entry(data)
|
||||||
if not m:
|
for m in meters:
|
||||||
continue
|
meter_raw, m_source = m.split("+")
|
||||||
# Meter table may have only one "meter" and "source". That's
|
name, type, unit = meter_raw.split('!')
|
||||||
# why only first lists element is get in this method
|
meter_dict = {'name': name,
|
||||||
name, type, unit = m[0].split("!")
|
'type': type,
|
||||||
yield models.Meter(
|
'unit': unit,
|
||||||
name=name,
|
'resource_id': flatten_result['resource_id'],
|
||||||
type=type,
|
'project_id': flatten_result['project_id'],
|
||||||
unit=unit,
|
'user_id': flatten_result['user_id']}
|
||||||
resource_id=flatten_result['resource_id'],
|
frozen_meter = frozenset(meter_dict.items())
|
||||||
project_id=flatten_result['project_id'],
|
if frozen_meter in result:
|
||||||
source=s[0] if s else None,
|
continue
|
||||||
user_id=flatten_result['user_id'],
|
result.add(frozen_meter)
|
||||||
)
|
meter_dict.update({'source':
|
||||||
|
m_source if m_source else None})
|
||||||
|
|
||||||
|
yield models.Meter(**meter_dict)
|
||||||
|
|
||||||
def get_samples(self, sample_filter, limit=None):
|
def get_samples(self, sample_filter, limit=None):
|
||||||
"""Return an iterable of models.Sample instances.
|
"""Return an iterable of models.Sample instances.
|
||||||
@@ -609,7 +623,10 @@ class MTable(object):
|
|||||||
return ((k, self.row(k)) for k in keys)
|
return ((k, self.row(k)) for k in keys)
|
||||||
|
|
||||||
def put(self, key, data):
|
def put(self, key, data):
|
||||||
self._rows[key] = data
|
if key not in self._rows:
|
||||||
|
self._rows[key] = data
|
||||||
|
else:
|
||||||
|
self._rows[key].update(data)
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
del self._rows[key]
|
del self._rows[key]
|
||||||
@@ -799,9 +816,14 @@ def make_query(metaquery=None, **kwargs):
|
|||||||
# found in table.
|
# found in table.
|
||||||
for key, value in kwargs.items():
|
for key, value in kwargs.items():
|
||||||
if value is not None:
|
if value is not None:
|
||||||
q.append("SingleColumnValueFilter "
|
if key == 'source':
|
||||||
"('f', '%s', =, 'binary:%s', true, true)" %
|
q.append("SingleColumnValueFilter "
|
||||||
(key, dump(value)))
|
"('f', 's_%s', =, 'binary:%s', true, true)" %
|
||||||
|
(value, dump('1')))
|
||||||
|
else:
|
||||||
|
q.append("SingleColumnValueFilter "
|
||||||
|
"('f', '%s', =, 'binary:%s', true, true)" %
|
||||||
|
(key, dump(value)))
|
||||||
res_q = None
|
res_q = None
|
||||||
if len(q):
|
if len(q):
|
||||||
res_q = " AND ".join(q)
|
res_q = " AND ".join(q)
|
||||||
@@ -869,10 +891,10 @@ def _make_general_rowkey_scan(rts_start=None, rts_end=None, some_id=None):
|
|||||||
return start_row, end_row
|
return start_row, end_row
|
||||||
|
|
||||||
|
|
||||||
def _format_meter_reference(counter_name, counter_type, counter_unit):
|
def _format_meter_reference(counter_name, counter_type, counter_unit, source):
|
||||||
"""Format reference to meter data.
|
"""Format reference to meter data.
|
||||||
"""
|
"""
|
||||||
return "%s!%s!%s" % (counter_name, counter_type, counter_unit)
|
return "%s!%s!%s+%s" % (counter_name, counter_type, counter_unit, source)
|
||||||
|
|
||||||
|
|
||||||
def _timestamp_from_record_tuple(record):
|
def _timestamp_from_record_tuple(record):
|
||||||
@@ -906,14 +928,14 @@ def deserialize_entry(entry, get_raw_meta=True):
|
|||||||
for k, v in entry.items():
|
for k, v in entry.items():
|
||||||
if k.startswith('f:s_'):
|
if k.startswith('f:s_'):
|
||||||
sources.append(k[4:])
|
sources.append(k[4:])
|
||||||
elif k.startswith('f:m_'):
|
|
||||||
meters.append(k[4:])
|
|
||||||
elif k.startswith('f:r_metadata.'):
|
elif k.startswith('f:r_metadata.'):
|
||||||
metadata_flattened[k[len('f:r_metadata.'):]] = load(v)
|
metadata_flattened[k[len('f:r_metadata.'):]] = load(v)
|
||||||
|
elif k.startswith('f:m_'):
|
||||||
|
meters.append(k[4:])
|
||||||
else:
|
else:
|
||||||
flatten_result[k[2:]] = load(v)
|
flatten_result[k[2:]] = load(v)
|
||||||
if get_raw_meta:
|
if get_raw_meta:
|
||||||
metadata = flatten_result.get('metadata', {})
|
metadata = flatten_result.get('resource_metadata', {})
|
||||||
else:
|
else:
|
||||||
metadata = metadata_flattened
|
metadata = metadata_flattened
|
||||||
|
|
||||||
@@ -931,28 +953,24 @@ def serialize_entry(data={}, **kwargs):
|
|||||||
|
|
||||||
result = {}
|
result = {}
|
||||||
for k, v in entry_dict.items():
|
for k, v in entry_dict.items():
|
||||||
if k == 'sources':
|
if k == 'source':
|
||||||
# user and project tables may contain several sources and meters
|
# user, project and resource tables may contain several sources.
|
||||||
# that's why we store it separately as pairs "source/meter name:1".
|
# Besides, resource table may contain several meters.
|
||||||
# Resource and meter table contain only one and it's possible
|
# To make insertion safe we need to store all meters and sources in
|
||||||
# to store pairs like "source/meter:source name/meter name". But to
|
# a separate cell. For this purpose s_ and m_ prefixes are
|
||||||
# keep things simple it's possible to store all variants in all
|
# introduced.
|
||||||
# tables because it doesn't break logic and overhead is not too big
|
result['f:s_%s' % v] = dump('1')
|
||||||
for source in v:
|
|
||||||
result['f:s_%s' % source] = dump('1')
|
elif k == 'meter':
|
||||||
if v:
|
result['f:m_%s' % v] = dump('1')
|
||||||
result['f:source'] = dump(v[0])
|
elif k == 'resource_metadata':
|
||||||
elif k == 'meters':
|
|
||||||
for meter in v:
|
|
||||||
result['f:m_%s' % meter] = dump('1')
|
|
||||||
elif k == 'metadata':
|
|
||||||
# keep raw metadata as well as flattened to provide
|
# keep raw metadata as well as flattened to provide
|
||||||
# capability with API v2. It will be flattened in another
|
# capability with API v2. It will be flattened in another
|
||||||
# way on API level. But we need flattened too for quick filtering.
|
# way on API level. But we need flattened too for quick filtering.
|
||||||
flattened_meta = dump_metadata(v)
|
flattened_meta = dump_metadata(v)
|
||||||
for k, m in flattened_meta.items():
|
for k, m in flattened_meta.items():
|
||||||
result['f:r_metadata.' + k] = dump(m)
|
result['f:r_metadata.' + k] = dump(m)
|
||||||
result['f:metadata'] = dump(v)
|
result['f:resource_metadata'] = dump(v)
|
||||||
else:
|
else:
|
||||||
result['f:' + k] = dump(v)
|
result['f:' + k] = dump(v)
|
||||||
return result
|
return result
|
||||||
|
|||||||
@@ -132,6 +132,22 @@ class TestListMeters(FunctionalTest,
|
|||||||
'util': 0.75,
|
'util': 0.75,
|
||||||
'is_public': False},
|
'is_public': False},
|
||||||
source='test_source'),
|
source='test_source'),
|
||||||
|
sample.Sample(
|
||||||
|
'meter.test.new',
|
||||||
|
'cumulative',
|
||||||
|
'',
|
||||||
|
1,
|
||||||
|
'user-id',
|
||||||
|
'project-id',
|
||||||
|
'resource-id',
|
||||||
|
timestamp=datetime.datetime(2012, 7, 2, 10, 40),
|
||||||
|
resource_metadata={'display_name': 'test-server',
|
||||||
|
'tag': 'self.sample3',
|
||||||
|
'size': 0,
|
||||||
|
'util': 0.75,
|
||||||
|
'is_public': False},
|
||||||
|
source='test_source'),
|
||||||
|
|
||||||
sample.Sample(
|
sample.Sample(
|
||||||
'meter.mine',
|
'meter.mine',
|
||||||
'gauge',
|
'gauge',
|
||||||
@@ -160,13 +176,13 @@ class TestListMeters(FunctionalTest,
|
|||||||
|
|
||||||
def test_list_meters(self):
|
def test_list_meters(self):
|
||||||
data = self.get_json('/meters')
|
data = self.get_json('/meters')
|
||||||
self.assertEqual(4, len(data))
|
self.assertEqual(5, len(data))
|
||||||
self.assertEqual(set(['resource-id',
|
self.assertEqual(set(['resource-id',
|
||||||
'resource-id2',
|
'resource-id2',
|
||||||
'resource-id3',
|
'resource-id3',
|
||||||
'resource-id4']),
|
'resource-id4']),
|
||||||
set(r['resource_id'] for r in data))
|
set(r['resource_id'] for r in data))
|
||||||
self.assertEqual(set(['meter.test', 'meter.mine']),
|
self.assertEqual(set(['meter.test', 'meter.mine', 'meter.test.new']),
|
||||||
set(r['name'] for r in data))
|
set(r['name'] for r in data))
|
||||||
self.assertEqual(set(['test_source', 'test_source1']),
|
self.assertEqual(set(['test_source', 'test_source1']),
|
||||||
set(r['source'] for r in data))
|
set(r['source'] for r in data))
|
||||||
@@ -187,7 +203,7 @@ class TestListMeters(FunctionalTest,
|
|||||||
|
|
||||||
def test_list_samples(self):
|
def test_list_samples(self):
|
||||||
data = self.get_json('/samples')
|
data = self.get_json('/samples')
|
||||||
self.assertEqual(5, len(data))
|
self.assertEqual(6, len(data))
|
||||||
|
|
||||||
def test_query_samples_with_invalid_field_name_and_non_eq_operator(self):
|
def test_query_samples_with_invalid_field_name_and_non_eq_operator(self):
|
||||||
resp = self.get_json('/samples',
|
resp = self.get_json('/samples',
|
||||||
@@ -459,7 +475,7 @@ class TestListMeters(FunctionalTest,
|
|||||||
'value': 'resource-id',
|
'value': 'resource-id',
|
||||||
}])
|
}])
|
||||||
nids = set(r['name'] for r in data)
|
nids = set(r['name'] for r in data)
|
||||||
self.assertEqual(set(['meter.test']), nids)
|
self.assertEqual(set(['meter.test', 'meter.test.new']), nids)
|
||||||
|
|
||||||
sids = set(r['source'] for r in data)
|
sids = set(r['source'] for r in data)
|
||||||
self.assertEqual(set(['test_source']), sids)
|
self.assertEqual(set(['test_source']), sids)
|
||||||
@@ -540,7 +556,8 @@ class TestListMeters(FunctionalTest,
|
|||||||
self.assertEqual(set(['user-id']), uids)
|
self.assertEqual(set(['user-id']), uids)
|
||||||
|
|
||||||
nids = set(r['name'] for r in data)
|
nids = set(r['name'] for r in data)
|
||||||
self.assertEqual(set(['meter.mine', 'meter.test']), nids)
|
self.assertEqual(set(['meter.mine', 'meter.test', 'meter.test.new']),
|
||||||
|
nids)
|
||||||
|
|
||||||
rids = set(r['resource_id'] for r in data)
|
rids = set(r['resource_id'] for r in data)
|
||||||
self.assertEqual(set(['resource-id', 'resource-id2']), rids)
|
self.assertEqual(set(['resource-id', 'resource-id2']), rids)
|
||||||
|
|||||||
Reference in New Issue
Block a user