Merge "Add python multiple metrics for measurement and statistics"

This commit is contained in:
Jenkins 2016-11-02 15:40:38 +00:00 committed by Gerrit Code Review
commit d49e04515f
10 changed files with 282 additions and 149 deletions

View File

@ -91,37 +91,31 @@ public class InfluxV9MeasurementRepo implements MeasurementRepo {
Boolean mergeMetricsFlag, String groupBy) throws Exception { Boolean mergeMetricsFlag, String groupBy) throws Exception {
String q; String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) { String groupByStr = "";
if ("*".equals(groupBy)) {
// The time column is automatically included in the results before all other columns. groupByStr = " group by * ";
q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s",
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime));
} else { } else {
if (Boolean.FALSE.equals(mergeMetricsFlag)) {
if (!"*".equals(groupBy) && if (!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions); throw new MultipleMetricsException(name, dimensions);
} }
groupByStr = this.influxV9Utils.groupByPart();
}
}
// The time column is automatically included in the results before all other columns. // The time column is automatically included in the results before all other columns.
q = String.format("select value, value_meta %1$s " q = String.format("select value, value_meta %1$s "
+ "where %2$s %3$s %4$s %5$s %6$s %7$s", //slimit 1 + "where %2$s %3$s %4$s %5$s %6$s %7$s",
this.influxV9Utils.namePart(name, true), this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId), this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region), this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime), this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions), this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime), this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.groupByPart()); groupByStr);
}
logger.debug("Measurements query: {}", q); logger.debug("Measurements query: {}", q);

View File

@ -116,29 +116,7 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
String q; String q;
if (Boolean.TRUE.equals(mergeMetricsFlag)) { if ("*".equals(groupBy) ) {
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offsetTimePart),
this.influxV9Utils.periodPart(period),
this.influxV9Utils.limitPart(limit));
} else {
if (!"*".equals(groupBy) &&
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
}
q = String.format("select %1$s %2$s " q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s", + "where %3$s %4$s %5$s %6$s %7$s %8$s",
@ -150,6 +128,29 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
this.influxV9Utils.dimPart(dimensions), this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime), this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.periodPartWithGroupBy(period)); this.influxV9Utils.periodPartWithGroupBy(period));
} else {
if (Boolean.FALSE.equals(mergeMetricsFlag) &&
!this.influxV9MetricDefinitionRepo.isAtMostOneSeries(tenantId, name, dimensions)) {
throw new MultipleMetricsException(name, dimensions);
}
q = String.format("select %1$s %2$s "
+ "where %3$s %4$s %5$s %6$s %7$s %8$s %9$s %10$s",
funcPart(statistics),
this.influxV9Utils.namePart(name, true),
this.influxV9Utils.privateTenantIdPart(tenantId),
this.influxV9Utils.privateRegionPart(this.region),
this.influxV9Utils.startTimePart(startTime),
this.influxV9Utils.dimPart(dimensions),
this.influxV9Utils.endTimePart(endTime),
this.influxV9Utils.timeOffsetPart(offsetTimePart),
this.influxV9Utils.periodPart(period, mergeMetricsFlag),
this.influxV9Utils.limitPart(limit));
} }
logger.debug("Statistics query: {}", q); logger.debug("Statistics query: {}", q);

View File

@ -259,10 +259,12 @@ public class InfluxV9Utils {
: " group by time(300s), *"; : " group by time(300s), *";
} }
public String periodPart(int period) { public String periodPart(int period, Boolean mergeMetricsFlag) {
String periodStr = period > 0 ? String.format(" group by time(%1$ds)", period)
return period > 0 ? String.format(" group by time(%1$ds)", period)
: " group by time(300s)"; : " group by time(300s)";
periodStr += mergeMetricsFlag ? "" : ", *";
return periodStr;
} }
Map<String, String> filterPrivateTags(Map<String, String> tagMap) { Map<String, String> filterPrivateTags(Map<String, String> tagMap) {

View File

@ -1,6 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# Copyright 2014 Hewlett-Packard # (C) Copyright 2014-2016 Hewlett Packard Enterprise Development LP
# (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP
# Copyright 2015 Cray Inc. All Rights Reserved. # Copyright 2015 Cray Inc. All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -61,21 +60,27 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
def _build_select_measurement_query(self, dimensions, name, tenant_id, def _build_select_measurement_query(self, dimensions, name, tenant_id,
region, start_timestamp, end_timestamp, region, start_timestamp, end_timestamp,
offset, limit): offset, group_by, limit):
from_clause = self._build_from_clause(dimensions, name, tenant_id, from_clause = self._build_from_clause(dimensions, name, tenant_id,
region, start_timestamp, region, start_timestamp,
end_timestamp) end_timestamp)
offset_clause = self._build_offset_clause(offset, limit) offset_clause = self._build_offset_clause(offset)
query = 'select value, value_meta ' + from_clause + offset_clause group_by_clause = self._build_group_by_clause(group_by)
limit_clause = self._build_limit_clause(limit)
query = 'select value, value_meta '\
+ from_clause + offset_clause\
+ group_by_clause + limit_clause
return query return query
def _build_statistics_query(self, dimensions, name, tenant_id, def _build_statistics_query(self, dimensions, name, tenant_id,
region, start_timestamp, end_timestamp, region, start_timestamp, end_timestamp,
statistics, period, offset, limit): statistics, period, offset, group_by, limit):
from_clause = self._build_from_clause(dimensions, name, tenant_id, from_clause = self._build_from_clause(dimensions, name, tenant_id,
region, start_timestamp, region, start_timestamp,
@ -104,9 +109,9 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
query = 'select ' + statistic_string + ' ' + from_clause query = 'select ' + statistic_string + ' ' + from_clause
query += " group by time(" + period + "s)" query += self._build_group_by_clause(group_by, period)
limit_clause = " limit {}".format(str(limit + 1)) limit_clause = self._build_limit_clause(limit)
query += limit_clause query += limit_clause
@ -316,7 +321,7 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
def measurement_list(self, tenant_id, region, name, dimensions, def measurement_list(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp, offset, start_timestamp, end_timestamp, offset,
limit, merge_metrics_flag): limit, merge_metrics_flag, group_by):
json_measurement_list = [] json_measurement_list = []
@ -326,9 +331,10 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
region, region,
start_timestamp, start_timestamp,
end_timestamp, end_timestamp,
offset, limit) offset, group_by,
limit)
if not merge_metrics_flag: if not group_by and not merge_metrics_flag:
dimensions = self._get_dimensions(tenant_id, region, name, dimensions) dimensions = self._get_dimensions(tenant_id, region, name, dimensions)
query += " slimit 1" query += " slimit 1"
@ -352,11 +358,16 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
measurement = {u'name': serie['name'], measurement = {u'name': serie['name'],
u'id': measurements_list[-1][0], u'id': measurements_list[-1][0],
u'dimensions': dimensions,
u'columns': [u'timestamp', u'value', u'columns': [u'timestamp', u'value',
u'value_meta'], u'value_meta'],
u'measurements': measurements_list} u'measurements': measurements_list}
if not group_by:
measurement[u'dimensions'] = dimensions
else:
measurement[u'dimensions'] = {key: value for key, value in serie['tags'].iteritems()
if not key.startswith('_')}
json_measurement_list.append(measurement) json_measurement_list.append(measurement)
return json_measurement_list return json_measurement_list
@ -407,20 +418,19 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
raise exceptions.RepositoryException(ex) raise exceptions.RepositoryException(ex)
def metrics_statistics(self, tenant_id, region, name, dimensions, def metrics_statistics(self, tenant_id, region, name, dimensions,
start_timestamp, start_timestamp, end_timestamp, statistics,
end_timestamp, statistics, period, offset, limit, period, offset, limit, merge_metrics_flag,
merge_metrics_flag): group_by):
json_statistics_list = [] json_statistics_list = []
try: try:
query = self._build_statistics_query(dimensions, name, tenant_id, query = self._build_statistics_query(dimensions, name, tenant_id,
region, region, start_timestamp,
start_timestamp,
end_timestamp, statistics, end_timestamp, statistics,
period, offset, limit) period, offset, group_by, limit)
if not merge_metrics_flag: if not group_by and not merge_metrics_flag:
dimensions = self._get_dimensions(tenant_id, region, name, dimensions) dimensions = self._get_dimensions(tenant_id, region, name, dimensions)
query += " slimit 1" query += " slimit 1"
@ -446,10 +456,15 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
statistic = {u'name': serie['name'], statistic = {u'name': serie['name'],
u'id': stats_list[-1][0], u'id': stats_list[-1][0],
u'dimensions': dimensions,
u'columns': columns, u'columns': columns,
u'statistics': stats_list} u'statistics': stats_list}
if not group_by:
statistic[u'dimensions'] = dimensions
else:
statistic[u'dimensions'] = {key: value for key, value in serie['tags'].iteritems()
if not key.startswith('_')}
json_statistics_list.append(statistic) json_statistics_list.append(statistic)
return json_statistics_list return json_statistics_list
@ -485,18 +500,31 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
raise exceptions.RepositoryException(ex) raise exceptions.RepositoryException(ex)
def _build_offset_clause(self, offset, limit): def _build_offset_clause(self, offset):
if offset: if offset:
offset_clause = " and time > '{}'".format(offset)
offset_clause = (
" and time > '{}' limit {}".format(offset, str(limit + 1)))
else: else:
offset_clause = ""
offset_clause = " limit {}".format(str(limit + 1))
return offset_clause return offset_clause
def _build_group_by_clause(self, group_by, period=None):
if group_by or period:
items = []
if period:
items.append("time(" + str(period) + "s)")
if group_by:
items.append('*')
clause = " group by " + ','.join(items)
else:
clause = ""
return clause
def _build_limit_clause(self, limit):
return " limit {} ".format(str(limit + 1))
def _has_measurements(self, tenant_id, region, name, dimensions, def _has_measurements(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp): start_timestamp, end_timestamp):
@ -521,7 +549,8 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
end_timestamp, end_timestamp,
0, 0,
1, 1,
False) False,
None)
if len(measurements) == 0: if len(measurements) == 0:
has_measurements = False has_measurements = False
@ -571,9 +600,11 @@ class MetricsRepository(metrics_repository.AbstractMetricsRepository):
time_clause += " and time <= " + str(int(end_timestamp * time_clause += " and time <= " + str(int(end_timestamp *
1000000)) + "u " 1000000)) + "u "
offset_clause = self._build_offset_clause(offset, limit) offset_clause = self._build_offset_clause(offset)
query += where_clause + time_clause + offset_clause limit_clause = self._build_limit_clause(limit)
query += where_clause + time_clause + offset_clause + limit_clause
result = self.influxdb_client.query(query) result = self.influxdb_client.query(query)

View File

@ -1,4 +1,4 @@
# Copyright 2014 Hewlett-Packard # (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -41,13 +41,15 @@ class AbstractMetricsRepository(object):
@abc.abstractmethod @abc.abstractmethod
def measurement_list(self, tenant_id, region, name, dimensions, def measurement_list(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp, offset, limit, start_timestamp, end_timestamp, offset, limit,
merge_metrics_flag): merge_metrics_flag,
group_by):
pass pass
@abc.abstractmethod @abc.abstractmethod
def metrics_statistics(self, tenant_id, region, name, dimensions, def metrics_statistics(self, tenant_id, region, name, dimensions,
start_timestamp, end_timestamp, statistics, start_timestamp, end_timestamp, statistics,
period, offset, limit, merge_metrics_flag): period, offset, limit, merge_metrics_flag,
group_by):
pass pass
@abc.abstractmethod @abc.abstractmethod

View File

@ -63,7 +63,8 @@ class TestRepoMetricsInfluxDB(unittest.TestCase):
end_timestamp=2, end_timestamp=2,
offset=None, offset=None,
limit=1, limit=1,
merge_metrics_flag=True) merge_metrics_flag=True,
group_by=None)
self.assertEqual(len(result), 1) self.assertEqual(len(result), 1)
self.assertIsNone(result[0]['dimensions']) self.assertIsNone(result[0]['dimensions'])

View File

@ -1,5 +1,5 @@
# Copyright 2015 Cray Inc. All Rights Reserved. # Copyright 2015 Cray Inc. All Rights Reserved.
# Copyright 2014,2016 Hewlett Packard Enterprise Development LP # (C) Copyright 2014,2016 Hewlett Packard Enterprise Development LP
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -463,7 +463,62 @@ def paginate_alarming(resource, uri, limit):
return resource return resource
def paginate_measurement(measurement, uri, limit): def paginate_dimension_values(dimvals, uri, offset, limit):
parsed_uri = urlparse.urlparse(uri)
self_link = build_base_uri(parsed_uri)
old_query_params = _get_old_query_params(parsed_uri)
if old_query_params:
self_link += '?' + '&'.join(old_query_params)
if (dimvals and dimvals[u'values']):
have_more, truncated_values = _truncate_dimension_values(dimvals[u'values'],
limit,
offset)
links = [{u'rel': u'self', u'href': self_link.decode('utf8')}]
if have_more:
new_offset = truncated_values[limit - 1]
next_link = build_base_uri(parsed_uri)
new_query_params = [u'offset' + '=' + urlparse.quote(
new_offset.encode('utf8'), safe='')]
_get_old_query_params_except_offset(new_query_params, parsed_uri)
if new_query_params:
next_link += '?' + '&'.join(new_query_params)
links.append({u'rel': u'next', u'href': next_link.decode('utf8')})
truncated_dimvals = {u'id': dimvals[u'id'],
u'dimension_name': dimvals[u'dimension_name'],
u'values': truncated_values}
#
# Only return metric name if one was provided
#
if u'metric_name' in dimvals:
truncated_dimvals[u'metric_name'] = dimvals[u'metric_name']
resource = {u'links': links,
u'elements': [truncated_dimvals]}
else:
resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')}]),
u'elements': [dimvals]}
return resource
def _truncate_dimension_values(values, limit, offset):
if offset and offset in values:
next_value_pos = values.index(offset) + 1
values = values[next_value_pos:]
have_more = len(values) > limit
return have_more, values[:limit]
def paginate_measurements(measurements, uri, limit):
parsed_uri = urlparse.urlparse(uri) parsed_uri = urlparse.urlparse(uri)
self_link = build_base_uri(parsed_uri) self_link = build_base_uri(parsed_uri)
@ -473,12 +528,15 @@ def paginate_measurement(measurement, uri, limit):
if old_query_params: if old_query_params:
self_link += '?' + '&'.join(old_query_params) self_link += '?' + '&'.join(old_query_params)
if (measurement if measurements:
and measurement[0] measurement_elements = []
and measurement[0]['measurements'] resource = {u'links': [{u'rel': u'self',
and len(measurement[0]['measurements']) > limit): u'href': self_link.decode('utf8')},
]}
for measurement in measurements:
if len(measurement['measurements']) >= limit:
new_offset = measurement[0]['measurements'][limit - 1][0] new_offset = measurement['measurements'][limit - 1][0]
next_link = build_base_uri(parsed_uri) next_link = build_base_uri(parsed_uri)
@ -490,24 +548,28 @@ def paginate_measurement(measurement, uri, limit):
if new_query_params: if new_query_params:
next_link += '?' + '&'.join(new_query_params) next_link += '?' + '&'.join(new_query_params)
truncated_measurement = [{u'dimensions': measurement[0]['dimensions'], resource[u'links'].append({u'rel': u'next',
u'measurements': (measurement[0] u'href': next_link.decode('utf8')})
['measurements'][:limit]),
u'name': measurement[0]['name'],
u'columns': measurement[0]['columns'],
u'id': new_offset}]
resource = {u'links': ([{u'rel': u'self', truncated_measurement = {u'dimensions': measurement['dimensions'],
u'href': self_link.decode('utf8')}, u'measurements': (measurement
{u'rel': u'next', ['measurements'][:limit]),
u'href': next_link.decode('utf8')}]), u'name': measurement['name'],
u'elements': truncated_measurement} u'columns': measurement['columns'],
u'id': new_offset}
measurement_elements.append(truncated_measurement)
break
else:
limit -= len(measurement['measurements'])
measurement_elements.append(measurement)
resource[u'elements'] = measurement_elements
else: else:
resource = {u'links': ([{u'rel': u'self', resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')}]), u'href': self_link.decode('utf8')}]),
u'elements': measurement} u'elements': []}
return resource return resource
@ -541,7 +603,7 @@ def _get_old_query_params_except_offset(new_query_params, parsed_uri):
'utf8'), safe='')) 'utf8'), safe=''))
def paginate_statistics(statistic, uri, limit): def paginate_statistics(statistics, uri, limit):
parsed_uri = urlparse.urlparse(uri) parsed_uri = urlparse.urlparse(uri)
self_link = build_base_uri(parsed_uri) self_link = build_base_uri(parsed_uri)
@ -551,13 +613,16 @@ def paginate_statistics(statistic, uri, limit):
if old_query_params: if old_query_params:
self_link += '?' + '&'.join(old_query_params) self_link += '?' + '&'.join(old_query_params)
if (statistic if statistics:
and statistic[0] statistic_elements = []
and statistic[0]['statistics'] resource = {u'links': [{u'rel': u'self',
and len(statistic[0]['statistics']) > limit): u'href': self_link.decode('utf8')}]}
for statistic in statistics:
if len(statistic['statistics']) >= limit:
new_offset = ( new_offset = (
statistic[0]['statistics'][limit - 1][0]) statistic['statistics'][limit - 1][0])
next_link = build_base_uri(parsed_uri) next_link = build_base_uri(parsed_uri)
@ -569,23 +634,28 @@ def paginate_statistics(statistic, uri, limit):
if new_query_params: if new_query_params:
next_link += '?' + '&'.join(new_query_params) next_link += '?' + '&'.join(new_query_params)
truncated_statistic = [{u'dimensions': statistic[0]['dimensions'], resource[u'links'].append({u'rel': u'next',
u'statistics': (statistic[0]['statistics'][:limit]), u'href': next_link.decode('utf8')})
u'name': statistic[0]['name'],
u'columns': statistic[0]['columns'],
u'id': new_offset}]
resource = {u'links': ([{u'rel': u'self', truncated_statistic = {u'dimensions': statistic['dimensions'],
u'href': self_link.decode('utf8')}, u'statistics': (statistic['statistics'][:limit]),
{u'rel': u'next', u'name': statistic['name'],
u'href': next_link.decode('utf8')}]), u'columns': statistic['columns'],
u'elements': truncated_statistic} u'id': new_offset}
statistic_elements.append(truncated_statistic)
break
else:
limit -= len(statistic['statistics'])
statistic_elements.append(statistic)
resource[u'elements'] = statistic_elements
else: else:
resource = {u'links': ([{u'rel': u'self', resource = {u'links': ([{u'rel': u'self',
u'href': self_link.decode('utf8')}]), u'href': self_link.decode('utf8')}]),
u'elements': statistic} u'elements': []}
return resource return resource

View File

@ -182,11 +182,13 @@ class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API):
offset = helpers.get_query_param(req, 'offset') offset = helpers.get_query_param(req, 'offset')
limit = helpers.get_limit(req) limit = helpers.get_limit(req)
merge_metrics_flag = get_merge_metrics_flag(req) merge_metrics_flag = get_merge_metrics_flag(req)
group_by = helpers.get_query_param(req, "group_by")
result = self._measurement_list(tenant_id, name, dimensions, result = self._measurement_list(tenant_id, name, dimensions,
start_timestamp, end_timestamp, start_timestamp, end_timestamp,
req.uri, offset, req.uri, offset,
limit, merge_metrics_flag) limit, merge_metrics_flag,
group_by)
res.body = helpers.dumpit_utf8(result) res.body = helpers.dumpit_utf8(result)
res.status = falcon.HTTP_200 res.status = falcon.HTTP_200
@ -194,7 +196,7 @@ class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API):
@resource.resource_try_catch_block @resource.resource_try_catch_block
def _measurement_list(self, tenant_id, name, dimensions, start_timestamp, def _measurement_list(self, tenant_id, name, dimensions, start_timestamp,
end_timestamp, req_uri, offset, end_timestamp, req_uri, offset,
limit, merge_metrics_flag): limit, merge_metrics_flag, group_by):
result = self._metrics_repo.measurement_list(tenant_id, result = self._metrics_repo.measurement_list(tenant_id,
self._region, self._region,
@ -204,9 +206,10 @@ class MetricsMeasurements(metrics_api_v2.MetricsMeasurementsV2API):
end_timestamp, end_timestamp,
offset, offset,
limit, limit,
merge_metrics_flag) merge_metrics_flag,
group_by)
return helpers.paginate_measurement(result, req_uri, limit) return helpers.paginate_measurements(result, req_uri, limit)
class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API): class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
@ -240,11 +243,13 @@ class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
offset = helpers.get_query_param(req, 'offset') offset = helpers.get_query_param(req, 'offset')
limit = helpers.get_limit(req) limit = helpers.get_limit(req)
merge_metrics_flag = get_merge_metrics_flag(req) merge_metrics_flag = get_merge_metrics_flag(req)
group_by = helpers.get_query_param(req, "group_by")
result = self._metric_statistics(tenant_id, name, dimensions, result = self._metric_statistics(tenant_id, name, dimensions,
start_timestamp, end_timestamp, start_timestamp, end_timestamp,
statistics, period, req.uri, statistics, period, req.uri,
offset, limit, merge_metrics_flag) offset, limit, merge_metrics_flag,
group_by)
res.body = helpers.dumpit_utf8(result) res.body = helpers.dumpit_utf8(result)
res.status = falcon.HTTP_200 res.status = falcon.HTTP_200
@ -252,7 +257,7 @@ class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
@resource.resource_try_catch_block @resource.resource_try_catch_block
def _metric_statistics(self, tenant_id, name, dimensions, start_timestamp, def _metric_statistics(self, tenant_id, name, dimensions, start_timestamp,
end_timestamp, statistics, period, req_uri, end_timestamp, statistics, period, req_uri,
offset, limit, merge_metrics_flag): offset, limit, merge_metrics_flag, group_by):
result = self._metrics_repo.metrics_statistics(tenant_id, result = self._metrics_repo.metrics_statistics(tenant_id,
self._region, self._region,
@ -263,7 +268,8 @@ class MetricsStatistics(metrics_api_v2.MetricsStatisticsV2API):
statistics, period, statistics, period,
offset, offset,
limit, limit,
merge_metrics_flag) merge_metrics_flag,
group_by)
return helpers.paginate_statistics(result, req_uri, limit) return helpers.paginate_statistics(result, req_uri, limit)

View File

@ -1,4 +1,4 @@
# (C) Copyright 2015-2016 Hewlett Packard Enterprise Development Company LP # (C) Copyright 2015-2016 Hewlett Packard Enterprise Development LP
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -258,6 +258,33 @@ class TestMeasurements(base.BaseMonascaTest):
query_parms) query_parms)
self.assertEqual(200, resp.status) self.assertEqual(200, resp.status)
@test.attr(type="gate")
def test_list_measurements_with_group_by(self):
query_parms = '?name=' + str(self._names_list[1]) + \
'&group_by=*' + \
'&start_time=' + str(self._start_time) + \
'&end_time=' + str(self._end_time)
resp, response_body = self.monasca_client.list_measurements(
query_parms)
self.assertEqual(200, resp.status)
elements = response_body['elements']
self.assertEqual(len(elements), 4)
self._verify_list_measurements_elements(elements, None, None)
@test.attr(type="gate")
def test_list_measurements_with_group_by_and_merge(self):
query_parms = '?name=' + str(self._names_list[1]) + \
'&group_by=*' + \
'&merge_metrics=true' + \
'&start_time=' + str(self._start_time) + \
'&end_time=' + str(self._end_time)
resp, response_body = self.monasca_client.list_measurements(
query_parms)
self.assertEqual(200, resp.status)
elements = response_body['elements']
self.assertEqual(len(elements), 4)
self._verify_list_measurements_elements(elements, None, None)
@test.attr(type="gate") @test.attr(type="gate")
@test.attr(type=['negative']) @test.attr(type=['negative'])
def test_list_measurements_with_name_exceeds_max_length(self): def test_list_measurements_with_name_exceeds_max_length(self):
@ -320,8 +347,13 @@ class TestMeasurements(base.BaseMonascaTest):
def _verify_list_measurements_elements(self, elements, test_key, def _verify_list_measurements_elements(self, elements, test_key,
test_value): test_value):
if elements: if not elements:
element = elements[0] error_msg = "Failed: at least one element is needed. " \
"Number of element = 0."
self.fail(error_msg)
for element in elements:
# element = elements[0]
self.assertEqual(set(element), self.assertEqual(set(element),
set(['columns', 'dimensions', 'id', set(['columns', 'dimensions', 'id',
'measurements', 'name'])) 'measurements', 'name']))
@ -335,10 +367,6 @@ class TestMeasurements(base.BaseMonascaTest):
if test_key is not None and test_value is not None: if test_key is not None and test_value is not None:
self.assertEqual(str(element['dimensions'][test_key]), self.assertEqual(str(element['dimensions'][test_key]),
test_value) test_value)
else:
error_msg = "Failed: at least one element is needed. " \
"Number of element = 0."
self.fail(error_msg)
def _verify_list_measurements_meas_len(self, measurements, test_len): def _verify_list_measurements_meas_len(self, measurements, test_len):
if measurements: if measurements:

View File

@ -241,8 +241,7 @@ class TestStatistics(base.BaseMonascaTest):
('start_time', str(start_time)), ('start_time', str(start_time)),
('end_time', str(end_time)), ('end_time', str(end_time)),
('period', 1), ('period', 1),
('limit', limit) ('limit', limit)]
]
offset = None offset = None
while True: while True:
num_expected_elements = limit num_expected_elements = limit
@ -271,7 +270,6 @@ class TestStatistics(base.BaseMonascaTest):
# Get the next set # Get the next set
offset = self._get_offset(response_body) offset = self._get_offset(response_body)
@test.attr(type="gate") @test.attr(type="gate")
@test.attr(type=['negative']) @test.attr(type=['negative'])
def test_list_statistics_with_no_merge_metrics(self): def test_list_statistics_with_no_merge_metrics(self):