Add period support in storage drivers and API
This modifies the API to accept a period argument defining on which number of seconds the statistics must be grouped. This also changes the return type of the /statistics endpoint to always return an array of statistics. Change-Id: Iac492f5daf600bd1653da5c95e4184e778978d35 Signed-off-by: Julien Danjou <julien@danjou.info>
This commit is contained in:
parent
6d70a85850
commit
bc1bd84b91
@ -293,6 +293,15 @@ class Statistics(Base):
|
||||
duration_end = datetime.datetime
|
||||
"UTC date and time of the oldest timestamp, or the query end time"
|
||||
|
||||
period = int
|
||||
"The difference, in seconds, between the period start and end"
|
||||
|
||||
period_start = datetime.datetime
|
||||
"UTC date and time of the period start"
|
||||
|
||||
period_end = datetime.datetime
|
||||
"UTC date and time of the period end"
|
||||
|
||||
def __init__(self, start_timestamp=None, end_timestamp=None, **kwds):
|
||||
super(Statistics, self).__init__(**kwds)
|
||||
self._update_duration(start_timestamp, end_timestamp)
|
||||
@ -338,6 +347,9 @@ class Statistics(Base):
|
||||
count=10,
|
||||
duration_start=datetime.datetime(2013, 1, 4, 16, 42),
|
||||
duration_end=datetime.datetime(2013, 1, 4, 16, 47),
|
||||
period=7200,
|
||||
period_start=datetime.datetime(2013, 1, 4, 16, 00),
|
||||
period_end=datetime.datetime(2013, 1, 4, 18, 00),
|
||||
)
|
||||
|
||||
|
||||
@ -365,14 +377,19 @@ class MeterController(RestController):
|
||||
for e in request.storage_conn.get_raw_events(f)
|
||||
]
|
||||
|
||||
@wsme_pecan.wsexpose(Statistics, [Query])
|
||||
def statistics(self, q=[]):
|
||||
@wsme_pecan.wsexpose([Statistics], [Query], int)
|
||||
def statistics(self, q=[], period=None):
|
||||
"""Computes the statistics of the meter events in the time range given.
|
||||
|
||||
:param q: Filter rules for the data to be returned.
|
||||
:param period: Returned result will be an array of statistics for a
|
||||
period long of that number of seconds.
|
||||
|
||||
"""
|
||||
kwargs = _query_to_kwargs(q, storage.EventFilter.__init__)
|
||||
kwargs['meter'] = self._id
|
||||
f = storage.EventFilter(**kwargs)
|
||||
computed = request.storage_conn.get_meter_statistics(f)
|
||||
computed = request.storage_conn.get_meter_statistics(f, period)
|
||||
# Find the original timestamp in the query to use for clamping
|
||||
# the duration returned in the statistics.
|
||||
start = end = None
|
||||
@ -381,10 +398,11 @@ class MeterController(RestController):
|
||||
end = timeutils.parse_isotime(i.value).replace(tzinfo=None)
|
||||
elif i.field == 'timestamp' and i.op in ('gt', 'ge'):
|
||||
start = timeutils.parse_isotime(i.value).replace(tzinfo=None)
|
||||
stat = Statistics(start_timestamp=start,
|
||||
end_timestamp=end,
|
||||
**computed)
|
||||
return stat
|
||||
|
||||
return [Statistics(start_timestamp=start,
|
||||
end_timestamp=end,
|
||||
**c)
|
||||
for c in computed]
|
||||
|
||||
|
||||
class Meter(Base):
|
||||
|
@ -162,7 +162,7 @@ class Connection(object):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_meter_statistics(self, event_filter):
|
||||
def get_meter_statistics(self, event_filter, period=None):
|
||||
"""Return a dictionary containing meter statistics.
|
||||
described by the query parameters.
|
||||
|
||||
@ -173,6 +173,9 @@ class Connection(object):
|
||||
'avg':
|
||||
'sum':
|
||||
'count':
|
||||
'period':
|
||||
'period_start':
|
||||
'period_end':
|
||||
'duration':
|
||||
'duration_start':
|
||||
'duration_end':
|
||||
|
@ -131,7 +131,7 @@ class Connection(base.Connection):
|
||||
matching the event_filter.
|
||||
"""
|
||||
|
||||
def get_meter_statistics(self, event_filter):
|
||||
def get_meter_statistics(self, event_filter, period=None):
|
||||
"""Return a dictionary containing meter statistics.
|
||||
described by the query parameters.
|
||||
|
||||
@ -142,6 +142,9 @@ class Connection(base.Connection):
|
||||
'avg':
|
||||
'sum':
|
||||
'count':
|
||||
'period':
|
||||
'period_start':
|
||||
'period_end':
|
||||
'duration':
|
||||
'duration_start':
|
||||
'duration_end':
|
||||
|
@ -182,8 +182,30 @@ class Connection(base.Connection):
|
||||
max : this.counter_volume,
|
||||
sum : this.counter_volume,
|
||||
count : 1,
|
||||
timestamp_min : this.timestamp,
|
||||
timestamp_max : this.timestamp } )
|
||||
duration_start : this.timestamp,
|
||||
duration_end : this.timestamp,
|
||||
period_start : this.timestamp,
|
||||
period_end : this.timestamp} )
|
||||
}
|
||||
""")
|
||||
|
||||
MAP_STATS_PERIOD = bson.code.Code("""
|
||||
function () {
|
||||
var period = %d * 1000;
|
||||
var period_first = %d * 1000;
|
||||
var period_start = period_first
|
||||
+ (Math.floor(new Date(this.timestamp.getTime()
|
||||
- period_first) / period)
|
||||
* period);
|
||||
emit(period_start,
|
||||
{ min : this.counter_volume,
|
||||
max : this.counter_volume,
|
||||
sum : this.counter_volume,
|
||||
count : 1,
|
||||
duration_start : this.timestamp,
|
||||
duration_end : this.timestamp,
|
||||
period_start : new Date(period_start),
|
||||
period_end : new Date(period_start + period) } )
|
||||
}
|
||||
""")
|
||||
|
||||
@ -197,10 +219,10 @@ class Connection(base.Connection):
|
||||
res.max = values[i].max;
|
||||
res.count += values[i].count;
|
||||
res.sum += values[i].sum;
|
||||
if ( values[i].timestamp_min < res.timestamp_min )
|
||||
res.timestamp_min = values[i].timestamp_min;
|
||||
if ( values[i].timestamp_max > res.timestamp_max )
|
||||
res.timestamp_max = values[i].timestamp_max;
|
||||
if ( values[i].duration_start < res.duration_start )
|
||||
res.duration_start = values[i].duration_start;
|
||||
if ( values[i].duration_end > res.duration_end )
|
||||
res.duration_end = values[i].duration_end;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
@ -209,7 +231,8 @@ class Connection(base.Connection):
|
||||
FINALIZE_STATS = bson.code.Code("""
|
||||
function (key, value) {
|
||||
value.avg = value.sum / value.count;
|
||||
value.duration = (value.timestamp_max - value.timestamp_min) / 1000;
|
||||
value.duration = (value.duration_end - value.duration_start) / 1000;
|
||||
value.period = (value.period_end - value.period_start) / 1000;
|
||||
return value;
|
||||
}""")
|
||||
|
||||
@ -452,7 +475,7 @@ class Connection(base.Connection):
|
||||
del e['_id']
|
||||
yield e
|
||||
|
||||
def get_meter_statistics(self, event_filter):
|
||||
def get_meter_statistics(self, event_filter, period=None):
|
||||
"""Return a dictionary containing meter statistics.
|
||||
described by the query parameters.
|
||||
|
||||
@ -463,6 +486,9 @@ class Connection(base.Connection):
|
||||
'avg':
|
||||
'sum':
|
||||
'count':
|
||||
'period':
|
||||
'period_start':
|
||||
'period_end':
|
||||
'duration':
|
||||
'duration_start':
|
||||
'duration_end':
|
||||
@ -470,23 +496,24 @@ class Connection(base.Connection):
|
||||
|
||||
"""
|
||||
q = make_query_from_filter(event_filter)
|
||||
results = self.db.meter.map_reduce(self.MAP_STATS,
|
||||
self.REDUCE_STATS,
|
||||
{'inline': 1},
|
||||
finalize=self.FINALIZE_STATS,
|
||||
query=q,
|
||||
)
|
||||
if results['results']:
|
||||
return results['results'][0]['value']
|
||||
|
||||
return {'count': 0,
|
||||
'min': None,
|
||||
'max': None,
|
||||
'avg': None,
|
||||
'sum': None,
|
||||
'duration': None,
|
||||
'duration_start': None,
|
||||
'duration_end': None}
|
||||
if period:
|
||||
map_stats = self.MAP_STATS_PERIOD % \
|
||||
(period,
|
||||
int(event_filter.start.strftime('%s'))
|
||||
if event_filter.start else 0)
|
||||
else:
|
||||
map_stats = self.MAP_STATS
|
||||
|
||||
results = self.db.meter.map_reduce(
|
||||
map_stats,
|
||||
self.REDUCE_STATS,
|
||||
{'inline': 1},
|
||||
finalize=self.FINALIZE_STATS,
|
||||
query=q,
|
||||
)
|
||||
|
||||
return [r['value'] for r in results['results']]
|
||||
|
||||
def get_volume_sum(self, event_filter):
|
||||
"""Return the sum of the volume field for the events
|
||||
|
@ -20,9 +20,11 @@ from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
import datetime
|
||||
import math
|
||||
from sqlalchemy import func
|
||||
|
||||
from ceilometer.openstack.common import log
|
||||
from ceilometer.openstack.common import timeutils
|
||||
from ceilometer.storage import base
|
||||
from ceilometer.storage.sqlalchemy.models import Meter, Project, Resource
|
||||
from ceilometer.storage.sqlalchemy.models import Source, User
|
||||
@ -378,7 +380,34 @@ class Connection(base.Connection):
|
||||
a_min, a_max = results[0]
|
||||
return (a_min, a_max)
|
||||
|
||||
def get_meter_statistics(self, event_filter):
|
||||
def _make_stats_query(self, event_filter):
|
||||
query = self.session.query(
|
||||
func.min(Meter.timestamp).label('tsmin'),
|
||||
func.max(Meter.timestamp).label('tsmax'),
|
||||
func.avg(Meter.counter_volume).label('avg'),
|
||||
func.sum(Meter.counter_volume).label('sum'),
|
||||
func.min(Meter.counter_volume).label('min'),
|
||||
func.max(Meter.counter_volume).label('max'),
|
||||
func.count(Meter.counter_volume).label('count'))
|
||||
|
||||
return make_query_from_filter(query, event_filter)
|
||||
|
||||
@staticmethod
|
||||
def _stats_result_to_dict(result, period_start, period_end):
|
||||
return {'count': result.count,
|
||||
'min': result.min,
|
||||
'max': result.max,
|
||||
'avg': result.avg,
|
||||
'sum': result.sum,
|
||||
'duration_start': result.tsmin,
|
||||
'duration_end': result.tsmax,
|
||||
'duration': timeutils.delta_seconds(result.tsmin,
|
||||
result.tsmax),
|
||||
'period': timeutils.delta_seconds(period_start, period_end),
|
||||
'period_start': period_start,
|
||||
'period_end': period_end}
|
||||
|
||||
def get_meter_statistics(self, event_filter, period=None):
|
||||
"""Return a dictionary containing meter statistics.
|
||||
described by the query parameters.
|
||||
|
||||
@ -389,32 +418,40 @@ class Connection(base.Connection):
|
||||
'avg':
|
||||
'sum':
|
||||
'count':
|
||||
'period':
|
||||
'period_start':
|
||||
'period_end':
|
||||
'duration':
|
||||
'duration_start':
|
||||
'duration_end':
|
||||
}
|
||||
"""
|
||||
query = self.session.query(func.min(Meter.timestamp),
|
||||
func.max(Meter.timestamp),
|
||||
func.sum(Meter.counter_volume),
|
||||
func.min(Meter.counter_volume),
|
||||
func.max(Meter.counter_volume),
|
||||
func.count(Meter.counter_volume))
|
||||
query = make_query_from_filter(query, event_filter)
|
||||
results = query.all()
|
||||
res = results[0]
|
||||
count = int(res[5])
|
||||
return {'count': count,
|
||||
'min': res[3],
|
||||
'max': res[4],
|
||||
'avg': (res[2] / count) if count > 0 else None,
|
||||
'sum': res[2],
|
||||
'duration': (res[1] - res[0]).seconds,
|
||||
'duration_start': res[0],
|
||||
'duration_end': res[1],
|
||||
}
|
||||
|
||||
############################
|
||||
res = self._make_stats_query(event_filter).all()[0]
|
||||
|
||||
if not period:
|
||||
return [self._stats_result_to_dict(res, res.tsmin, res.tsmax)]
|
||||
|
||||
query = self._make_stats_query(event_filter)
|
||||
# HACK(jd) This is an awful method to compute stats by period, but
|
||||
# since we're trying to be SQL agnostic we have to write portable
|
||||
# code, so here it is, admire! We're going to do one request to get
|
||||
# stats by period. We would like to use GROUP BY, but there's no
|
||||
# portable way to manipulate timestamp in SQL, so we can't.
|
||||
results = []
|
||||
for i in range(int(math.ceil(
|
||||
timeutils.delta_seconds(event_filter.start or res.tsmin,
|
||||
event_filter.end or res.tsmax)
|
||||
/ float(period)))):
|
||||
period_start = (event_filter.start
|
||||
+ datetime.timedelta(seconds=i * period))
|
||||
period_end = period_start + datetime.timedelta(seconds=period)
|
||||
q = query.filter(Meter.timestamp >= period_start)
|
||||
q = q.filter(Meter.timestamp < period_end)
|
||||
results.append(self._stats_result_to_dict(q.all()[0],
|
||||
period_start,
|
||||
period_end))
|
||||
return results
|
||||
|
||||
|
||||
def model_query(*args, **kwargs):
|
||||
|
@ -55,18 +55,16 @@ class TestComputeDurationByResource(FunctionalTest):
|
||||
func)
|
||||
|
||||
def _set_interval(self, start, end):
|
||||
def get_interval(ignore_self, event_filter):
|
||||
def get_interval(ignore_self, event_filter, period):
|
||||
assert event_filter.start
|
||||
assert event_filter.end
|
||||
return {'count': 0,
|
||||
'min': None,
|
||||
'max': None,
|
||||
'avg': None,
|
||||
'qty': None,
|
||||
'duration': None,
|
||||
'duration_start': start,
|
||||
'duration_end': end,
|
||||
}
|
||||
if (event_filter.start > end
|
||||
or event_filter.end < start):
|
||||
return []
|
||||
return [{'count': 0,
|
||||
# ...
|
||||
'duration_start': max(event_filter.start, start),
|
||||
'duration_end': min(event_filter.end, end)}]
|
||||
self._stub_interval_func(get_interval)
|
||||
|
||||
def _invoke_api(self):
|
||||
@ -83,12 +81,9 @@ class TestComputeDurationByResource(FunctionalTest):
|
||||
def test_before_range(self):
|
||||
self._set_interval(self.early1, self.early2)
|
||||
data = self._invoke_api()
|
||||
assert data['duration_start'] is None
|
||||
assert data['duration_end'] is None
|
||||
assert data['duration'] is None
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def _assert_times_match(self, actual, expected):
|
||||
#import pdb; pdb.set_trace()
|
||||
if actual:
|
||||
actual = timeutils.parse_isotime(actual)
|
||||
actual = actual.replace(tzinfo=None)
|
||||
@ -97,49 +92,46 @@ class TestComputeDurationByResource(FunctionalTest):
|
||||
def test_overlap_range_start(self):
|
||||
self._set_interval(self.early1, self.middle1)
|
||||
data = self._invoke_api()
|
||||
self._assert_times_match(data['duration_start'], self.start)
|
||||
self._assert_times_match(data['duration_end'], self.middle1)
|
||||
self.assertEqual(data['duration'], 8 * 60 * 60)
|
||||
self._assert_times_match(data[0]['duration_start'], self.start)
|
||||
self._assert_times_match(data[0]['duration_end'], self.middle1)
|
||||
self.assertEqual(data[0]['duration'], 8 * 60 * 60)
|
||||
|
||||
def test_within_range(self):
|
||||
self._set_interval(self.middle1, self.middle2)
|
||||
data = self._invoke_api()
|
||||
self._assert_times_match(data['duration_start'], self.middle1)
|
||||
self._assert_times_match(data['duration_end'], self.middle2)
|
||||
self.assertEqual(data['duration'], 10 * 60 * 60)
|
||||
self._assert_times_match(data[0]['duration_start'], self.middle1)
|
||||
self._assert_times_match(data[0]['duration_end'], self.middle2)
|
||||
self.assertEqual(data[0]['duration'], 10 * 60 * 60)
|
||||
|
||||
def test_within_range_zero_duration(self):
|
||||
self._set_interval(self.middle1, self.middle1)
|
||||
data = self._invoke_api()
|
||||
self._assert_times_match(data['duration_start'], self.middle1)
|
||||
self._assert_times_match(data['duration_end'], self.middle1)
|
||||
assert data['duration'] == 0
|
||||
self._assert_times_match(data[0]['duration_start'], self.middle1)
|
||||
self._assert_times_match(data[0]['duration_end'], self.middle1)
|
||||
self.assertEqual(data[0]['duration'], 0)
|
||||
|
||||
def test_overlap_range_end(self):
|
||||
self._set_interval(self.middle2, self.late1)
|
||||
data = self._invoke_api()
|
||||
self._assert_times_match(data['duration_start'], self.middle2)
|
||||
self._assert_times_match(data['duration_end'], self.end)
|
||||
self.assertEqual(data['duration'], ((6 * 60) - 1) * 60)
|
||||
self._assert_times_match(data[0]['duration_start'], self.middle2)
|
||||
self._assert_times_match(data[0]['duration_end'], self.end)
|
||||
self.assertEqual(data[0]['duration'], ((6 * 60) - 1) * 60)
|
||||
|
||||
def test_after_range(self):
|
||||
self._set_interval(self.late1, self.late2)
|
||||
data = self._invoke_api()
|
||||
assert data['duration_start'] is None
|
||||
assert data['duration_end'] is None
|
||||
assert data['duration'] is None
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_without_end_timestamp(self):
|
||||
def get_interval(ignore_self, event_filter):
|
||||
return {'count': 0,
|
||||
'min': None,
|
||||
'max': None,
|
||||
'avg': None,
|
||||
'qty': None,
|
||||
'duration': None,
|
||||
'duration_start': self.late1,
|
||||
'duration_end': self.late2,
|
||||
}
|
||||
def get_interval(ignore_self, event_filter, period):
|
||||
return [{'count': 0,
|
||||
'min': None,
|
||||
'max': None,
|
||||
'avg': None,
|
||||
'qty': None,
|
||||
'duration': None,
|
||||
'duration_start': self.late1,
|
||||
'duration_end': self.late2}]
|
||||
self._stub_interval_func(get_interval)
|
||||
data = self.get_json('/meters/instance:m1.tiny/statistics',
|
||||
q=[{'field': 'timestamp',
|
||||
@ -149,20 +141,19 @@ class TestComputeDurationByResource(FunctionalTest):
|
||||
'value': 'resource-id'},
|
||||
{'field': 'search_offset',
|
||||
'value': 10}])
|
||||
self._assert_times_match(data['duration_start'], self.late1)
|
||||
self._assert_times_match(data['duration_end'], self.late2)
|
||||
self._assert_times_match(data[0]['duration_start'], self.late1)
|
||||
self._assert_times_match(data[0]['duration_end'], self.late2)
|
||||
|
||||
def test_without_start_timestamp(self):
|
||||
def get_interval(ignore_self, event_filter):
|
||||
return {'count': 0,
|
||||
'min': None,
|
||||
'max': None,
|
||||
'avg': None,
|
||||
'qty': None,
|
||||
'duration': None,
|
||||
'duration_start': self.early1,
|
||||
'duration_end': self.early2,
|
||||
}
|
||||
def get_interval(ignore_self, event_filter, period):
|
||||
return [{'count': 0,
|
||||
'min': None,
|
||||
'max': None,
|
||||
'avg': None,
|
||||
'qty': None,
|
||||
'duration': None,
|
||||
'duration_start': self.early1,
|
||||
'duration_end': self.early2}]
|
||||
return (self.early1, self.early2)
|
||||
self._stub_interval_func(get_interval)
|
||||
data = self.get_json('/meters/instance:m1.tiny/statistics',
|
||||
@ -173,5 +164,5 @@ class TestComputeDurationByResource(FunctionalTest):
|
||||
'value': 'resource-id'},
|
||||
{'field': 'search_offset',
|
||||
'value': 10}])
|
||||
self._assert_times_match(data['duration_start'], self.early1)
|
||||
self._assert_times_match(data['duration_end'], self.early2)
|
||||
self._assert_times_match(data[0]['duration_start'], self.early1)
|
||||
self._assert_times_match(data[0]['duration_end'], self.early2)
|
||||
|
@ -63,8 +63,8 @@ class TestMaxProjectVolume(FunctionalTest):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
'value': 'project1',
|
||||
}])
|
||||
self.assertEqual(data['max'], 7)
|
||||
self.assertEqual(data['count'], 3)
|
||||
self.assertEqual(data[0]['max'], 7)
|
||||
self.assertEqual(data[0]['count'], 3)
|
||||
|
||||
def test_start_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -75,8 +75,8 @@ class TestMaxProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:30:00',
|
||||
},
|
||||
])
|
||||
self.assertEqual(data['max'], 7)
|
||||
self.assertEqual(data['count'], 2)
|
||||
self.assertEqual(data[0]['max'], 7)
|
||||
self.assertEqual(data[0]['count'], 2)
|
||||
|
||||
def test_start_timestamp_after(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -87,8 +87,7 @@ class TestMaxProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T12:34:00',
|
||||
},
|
||||
])
|
||||
self.assertEqual(data['max'], None)
|
||||
self.assertEqual(data['count'], 0)
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -99,8 +98,8 @@ class TestMaxProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:30:00',
|
||||
},
|
||||
])
|
||||
self.assertEqual(data['max'], 5)
|
||||
self.assertEqual(data['count'], 1)
|
||||
self.assertEqual(data[0]['max'], 5)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
||||
def test_end_timestamp_before(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -111,8 +110,7 @@ class TestMaxProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T09:54:00',
|
||||
},
|
||||
])
|
||||
self.assertEqual(data['max'], None)
|
||||
self.assertEqual(data['count'], 0)
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_start_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -127,5 +125,5 @@ class TestMaxProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:32:00',
|
||||
},
|
||||
])
|
||||
self.assertEqual(data['max'], 6)
|
||||
self.assertEqual(data['count'], 1)
|
||||
self.assertEqual(data[0]['max'], 6)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
@ -62,8 +62,24 @@ class TestMaxResourceVolume(FunctionalTest):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
'value': 'resource-id',
|
||||
}])
|
||||
assert data['max'] == 7
|
||||
assert data['count'] == 3
|
||||
self.assertEqual(data[0]['max'], 7)
|
||||
self.assertEqual(data[0]['count'], 3)
|
||||
|
||||
def test_no_time_bounds_with_period(self):
|
||||
data = self.get_json(self.PATH,
|
||||
q=[{'field': 'resource_id',
|
||||
'value': 'resource-id'}],
|
||||
period=3600)
|
||||
self.assertEqual(len(data), 3)
|
||||
self.assertEqual(set(x['duration_start'] for x in data),
|
||||
set([u'2012-09-25T10:30:00',
|
||||
u'2012-09-25T12:32:00',
|
||||
u'2012-09-25T11:31:00']))
|
||||
self.assertEqual(data[0]['period'], 3600)
|
||||
self.assertEqual(set(x['period_start'] for x in data),
|
||||
set([u'2012-09-25T10:00:00',
|
||||
u'2012-09-25T11:00:00',
|
||||
u'2012-09-25T12:00:00']))
|
||||
|
||||
def test_start_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -74,8 +90,8 @@ class TestMaxResourceVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:30:00',
|
||||
},
|
||||
])
|
||||
assert data['max'] == 7
|
||||
assert data['count'] == 2
|
||||
self.assertEqual(data[0]['max'], 7)
|
||||
self.assertEqual(data[0]['count'], 2)
|
||||
|
||||
def test_start_timestamp_after(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -86,8 +102,7 @@ class TestMaxResourceVolume(FunctionalTest):
|
||||
'value': '2012-09-25T12:34:00',
|
||||
},
|
||||
])
|
||||
assert data['max'] is None
|
||||
assert data['count'] == 0
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -98,8 +113,8 @@ class TestMaxResourceVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:30:00',
|
||||
},
|
||||
])
|
||||
assert data['max'] == 5
|
||||
assert data['count'] == 1
|
||||
self.assertEqual(data[0]['max'], 5)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
||||
def test_end_timestamp_before(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -110,8 +125,7 @@ class TestMaxResourceVolume(FunctionalTest):
|
||||
'value': '2012-09-25T09:54:00',
|
||||
},
|
||||
])
|
||||
assert data['max'] is None
|
||||
assert data['count'] == 0
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_start_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -126,5 +140,5 @@ class TestMaxResourceVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:32:00',
|
||||
},
|
||||
])
|
||||
assert data['max'] == 6
|
||||
assert data['count'] == 1
|
||||
self.assertEqual(data[0]['max'], 6)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
@ -63,8 +63,8 @@ class TestSumProjectVolume(FunctionalTest):
|
||||
'value': 'project1',
|
||||
}])
|
||||
expected = 5 + 6 + 7
|
||||
assert data['sum'] == expected
|
||||
assert data['count'] == 3
|
||||
self.assertEqual(data[0]['sum'], expected)
|
||||
self.assertEqual(data[0]['count'], 3)
|
||||
|
||||
def test_start_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -76,8 +76,8 @@ class TestSumProjectVolume(FunctionalTest):
|
||||
},
|
||||
])
|
||||
expected = 6 + 7
|
||||
assert data['sum'] == expected
|
||||
assert data['count'] == 2
|
||||
self.assertEqual(data[0]['sum'], expected)
|
||||
self.assertEqual(data[0]['count'], 2)
|
||||
|
||||
def test_start_timestamp_after(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -88,8 +88,7 @@ class TestSumProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T12:34:00',
|
||||
},
|
||||
])
|
||||
assert data['sum'] is None
|
||||
assert data['count'] == 0
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -100,8 +99,8 @@ class TestSumProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:30:00',
|
||||
},
|
||||
])
|
||||
assert data['sum'] == 5
|
||||
assert data['count'] == 1
|
||||
self.assertEqual(data[0]['sum'], 5)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
||||
def test_end_timestamp_before(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -112,8 +111,7 @@ class TestSumProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T09:54:00',
|
||||
},
|
||||
])
|
||||
assert data['sum'] is None
|
||||
assert data['count'] == 0
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_start_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'project_id',
|
||||
@ -128,5 +126,5 @@ class TestSumProjectVolume(FunctionalTest):
|
||||
'value': '2012-09-25T11:32:00',
|
||||
},
|
||||
])
|
||||
assert data['sum'] == 6
|
||||
assert data['count'] == 1
|
||||
self.assertEqual(data[0]['sum'], 6)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
@ -62,8 +62,24 @@ class TestSumResourceVolume(FunctionalTest):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
'value': 'resource-id',
|
||||
}])
|
||||
assert data['sum'] == 5 + 6 + 7
|
||||
assert data['count'] == 3
|
||||
self.assertEqual(data[0]['sum'], 5 + 6 + 7)
|
||||
self.assertEqual(data[0]['count'], 3)
|
||||
|
||||
def test_no_time_bounds_with_period(self):
|
||||
data = self.get_json(self.PATH,
|
||||
q=[{'field': 'resource_id',
|
||||
'value': 'resource-id'}],
|
||||
period=1800)
|
||||
self.assertEqual(len(data), 3)
|
||||
self.assertEqual(set(x['duration_start'] for x in data),
|
||||
set([u'2012-09-25T10:30:00',
|
||||
u'2012-09-25T12:32:00',
|
||||
u'2012-09-25T11:31:00']))
|
||||
self.assertEqual(data[0]['period'], 1800)
|
||||
self.assertEqual(set(x['period_start'] for x in data),
|
||||
set([u'2012-09-25T10:30:00',
|
||||
u'2012-09-25T11:30:00',
|
||||
u'2012-09-25T12:30:00']))
|
||||
|
||||
def test_start_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -73,8 +89,25 @@ class TestSumResourceVolume(FunctionalTest):
|
||||
'op': 'ge',
|
||||
'value': '2012-09-25T11:30:00',
|
||||
}])
|
||||
assert data['sum'] == 6 + 7
|
||||
assert data['count'] == 2
|
||||
self.assertEqual(data[0]['sum'], 6 + 7)
|
||||
self.assertEqual(data[0]['count'], 2)
|
||||
|
||||
def test_start_timestamp_with_period(self):
|
||||
data = self.get_json(self.PATH,
|
||||
q=[{'field': 'resource_id',
|
||||
'value': 'resource-id'},
|
||||
{'field': 'timestamp',
|
||||
'op': 'ge',
|
||||
'value': '2012-09-25T10:15:00'}],
|
||||
period=7200)
|
||||
self.assertEqual(len(data), 2)
|
||||
self.assertEqual(set(x['duration_start'] for x in data),
|
||||
set([u'2012-09-25T10:30:00',
|
||||
u'2012-09-25T12:32:00']))
|
||||
self.assertEqual(data[0]['period'], 7200)
|
||||
self.assertEqual(set(x['period_start'] for x in data),
|
||||
set([u'2012-09-25T10:15:00',
|
||||
u'2012-09-25T12:15:00']))
|
||||
|
||||
def test_start_timestamp_after(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -84,8 +117,7 @@ class TestSumResourceVolume(FunctionalTest):
|
||||
'op': 'ge',
|
||||
'value': '2012-09-25T12:34:00',
|
||||
}])
|
||||
assert data['sum'] is None
|
||||
assert data['count'] == 0
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -95,8 +127,8 @@ class TestSumResourceVolume(FunctionalTest):
|
||||
'op': 'le',
|
||||
'value': '2012-09-25T11:30:00',
|
||||
}])
|
||||
assert data['sum'] == 5
|
||||
assert data['count'] == 1
|
||||
self.assertEqual(data[0]['sum'], 5)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
||||
def test_end_timestamp_before(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -106,8 +138,7 @@ class TestSumResourceVolume(FunctionalTest):
|
||||
'op': 'le',
|
||||
'value': '2012-09-25T09:54:00',
|
||||
}])
|
||||
assert data['sum'] is None
|
||||
assert data['count'] == 0
|
||||
self.assertEqual(data, [])
|
||||
|
||||
def test_start_end_timestamp(self):
|
||||
data = self.get_json(self.PATH, q=[{'field': 'resource_id',
|
||||
@ -121,5 +152,5 @@ class TestSumResourceVolume(FunctionalTest):
|
||||
'op': 'lt',
|
||||
'value': '2012-09-25T11:32:00',
|
||||
}])
|
||||
assert data['sum'] == 6
|
||||
assert data['count'] == 1
|
||||
self.assertEqual(data[0]['sum'], 6)
|
||||
self.assertEqual(data[0]['count'], 1)
|
||||
|
@ -764,7 +764,7 @@ class StatisticsTest(DBTestBase):
|
||||
user='user-5',
|
||||
meter='volume.size',
|
||||
)
|
||||
results = self.conn.get_meter_statistics(f)
|
||||
results = self.conn.get_meter_statistics(f)[0]
|
||||
self.assertEqual(results['duration'],
|
||||
(datetime.datetime(2012, 9, 25, 12, 32)
|
||||
- datetime.datetime(2012, 9, 25, 10, 30)).seconds)
|
||||
@ -774,6 +774,37 @@ class StatisticsTest(DBTestBase):
|
||||
assert results['sum'] == 27
|
||||
assert results['avg'] == 9
|
||||
|
||||
def test_by_user_period(self):
|
||||
f = storage.EventFilter(
|
||||
user='user-5',
|
||||
meter='volume.size',
|
||||
start='2012-09-25T10:28:00',
|
||||
)
|
||||
results = self.conn.get_meter_statistics(f, period=7200)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(set(r['period_start'] for r in results),
|
||||
set([datetime.datetime(2012, 9, 25, 10, 28),
|
||||
datetime.datetime(2012, 9, 25, 12, 28)]))
|
||||
self.assertEqual(set(r['period_end'] for r in results),
|
||||
set([datetime.datetime(2012, 9, 25, 12, 28),
|
||||
datetime.datetime(2012, 9, 25, 14, 28)]))
|
||||
for r in results:
|
||||
if r['period_start'] == datetime.datetime(2012, 9, 25, 10, 0):
|
||||
self.assertEqual(r['count'], 2)
|
||||
self.assertEqual(r['avg'], 8.5)
|
||||
self.assertEqual(r['min'], 8)
|
||||
self.assertEqual(r['max'], 9)
|
||||
self.assertEqual(r['sum'], 17)
|
||||
self.assertEqual(r['period'], 7200)
|
||||
self.assertEqual(r['period_end'],
|
||||
r['period_start']
|
||||
+ datetime.timedelta(seconds=7200))
|
||||
self.assertEqual(r['duration'], 3660)
|
||||
self.assertEqual(r['duration_start'],
|
||||
datetime.datetime(2012, 9, 25, 10, 30))
|
||||
self.assertEqual(r['duration_end'],
|
||||
datetime.datetime(2012, 9, 25, 11, 31))
|
||||
|
||||
def test_by_project(self):
|
||||
f = storage.EventFilter(
|
||||
meter='volume.size',
|
||||
@ -781,7 +812,7 @@ class StatisticsTest(DBTestBase):
|
||||
start='2012-09-25T11:30:00',
|
||||
end='2012-09-25T11:32:00',
|
||||
)
|
||||
results = self.conn.get_meter_statistics(f)
|
||||
results = self.conn.get_meter_statistics(f)[0]
|
||||
self.assertEqual(results['duration'], 0)
|
||||
assert results['count'] == 1
|
||||
assert results['min'] == 6
|
||||
@ -794,7 +825,7 @@ class StatisticsTest(DBTestBase):
|
||||
user='user-id',
|
||||
meter='volume.size',
|
||||
)
|
||||
results = self.conn.get_meter_statistics(f)
|
||||
results = self.conn.get_meter_statistics(f)[0]
|
||||
self.assertEqual(results['duration'],
|
||||
(datetime.datetime(2012, 9, 25, 12, 32)
|
||||
- datetime.datetime(2012, 9, 25, 10, 30)).seconds)
|
||||
|
Loading…
Reference in New Issue
Block a user