Merge "Remove 'fill(0)' and fix influx time offset"
This commit is contained in:
commit
62f2380731
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2014, 2016 Hewlett-Packard Development Company, L.P.
|
||||
* (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
|
||||
* in compliance with the License. You may obtain a copy of the License at
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* (C) Copyright 2014, 2016 Hewlett-Packard Development LP
|
||||
* (C) Copyright 2014, 2016 Hewlett Packard Enterprise Development LP
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
|
||||
* in compliance with the License. You may obtain a copy of the License at
|
||||
@ -79,8 +79,9 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
|
||||
int indexOfUnderscore = offset.indexOf('_');
|
||||
if (indexOfUnderscore > -1) {
|
||||
offsetTimePart = offset.substring(indexOfUnderscore + 1);
|
||||
// Add the period to the offset to ensure only the next group of points are returned
|
||||
DateTime offsetDateTime = DateTime.parse(offsetTimePart).plusSeconds(period);
|
||||
// Add the period minus one millisecond to the offset
|
||||
// to ensure only the next group of points are returned
|
||||
DateTime offsetDateTime = DateTime.parse(offsetTimePart).plusSeconds(period).minusMillis(1);
|
||||
// leave out any ID, as influx doesn't understand it
|
||||
offset = offsetDateTime.toString();
|
||||
}
|
||||
@ -196,6 +197,8 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
|
||||
}
|
||||
|
||||
List<Object> values = buildValsList(valueObjects);
|
||||
if (values == null)
|
||||
continue;
|
||||
|
||||
if (((String) values.get(0)).compareTo(offsetTimestamp) >= 0 || index > offsetId) {
|
||||
statistics.addMeasurement(values);
|
||||
@ -229,9 +232,13 @@ public class InfluxV9StatisticRepo implements StatisticRepo {
|
||||
else
|
||||
valObjArryList.add(timestamp);
|
||||
|
||||
// All other values are doubles.
|
||||
// All other values are doubles or nulls.
|
||||
for (int i = 1; i < values.length; ++i) {
|
||||
valObjArryList.add(Double.parseDouble((String) values[i]));
|
||||
if (values[i] != null) {
|
||||
valObjArryList.add(Double.parseDouble((String) values[i]));
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return valObjArryList;
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2015,2016 Hewlett Packard Enterprise Development Company, L.P.
|
||||
* (C) Copyright 2015,2016 Hewlett Packard Enterprise Development LP
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
|
||||
* in compliance with the License. You may obtain a copy of the License at
|
||||
@ -255,14 +255,14 @@ public class InfluxV9Utils {
|
||||
|
||||
public String periodPartWithGroupBy(int period) {
|
||||
|
||||
return period > 0 ? String.format(" group by time(%1$ds), * fill(0)", period)
|
||||
: " group by time(300s), * fill(0)";
|
||||
return period > 0 ? String.format(" group by time(%1$ds), *", period)
|
||||
: " group by time(300s), *";
|
||||
}
|
||||
|
||||
public String periodPart(int period) {
|
||||
|
||||
return period > 0 ? String.format(" group by time(%1$ds) fill(0)", period)
|
||||
: " group by time(300s) fill(0)";
|
||||
return period > 0 ? String.format(" group by time(%1$ds)", period)
|
||||
: " group by time(300s)";
|
||||
}
|
||||
|
||||
Map<String, String> filterPrivateTags(Map<String, String> tagMap) {
|
||||
|
@ -181,19 +181,19 @@ class TestStatistics(base.BaseMonascaTest):
|
||||
start_timestamp = int(time.time() * 1000)
|
||||
name = data_utils.rand_name()
|
||||
metric = [
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 0,
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 1,
|
||||
dimensions={'key1': 'value-1',
|
||||
'key2': 'value-1'},
|
||||
value=1),
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 1000,
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 1001,
|
||||
dimensions={'key1': 'value-2',
|
||||
'key2': 'value-2'},
|
||||
value=2),
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 2000,
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 2001,
|
||||
dimensions={'key1': 'value-3',
|
||||
'key2': 'value-3'},
|
||||
value=3),
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 3000,
|
||||
helpers.create_metric(name=name, timestamp=start_timestamp + 3001,
|
||||
dimensions={'key1': 'value-4',
|
||||
'key2': 'value-4'},
|
||||
value=4)
|
||||
@ -213,7 +213,7 @@ class TestStatistics(base.BaseMonascaTest):
|
||||
self._check_timeout(i, constants.MAX_RETRIES, elements, num_metrics)
|
||||
|
||||
start_time = helpers.timestamp_to_iso(start_timestamp)
|
||||
end_timestamp = start_timestamp + 4000
|
||||
end_timestamp = start_timestamp + 4001
|
||||
end_time = helpers.timestamp_to_iso(end_timestamp)
|
||||
query_parms = '?name=' + name + '&merge_metrics=true&statistics=avg' \
|
||||
+ '&start_time=' + str(start_time) + '&end_time=' + \
|
||||
|
Loading…
Reference in New Issue
Block a user