Add 'merge' functionality to SLA
Merge data from several SLA instances into a single one so that the code sla1 = SLA() sla1.add_iteration(a) sla1.add_iteration(b) sla2 = SLA() sla2.add_iteration(c) sla2.add_iteration(d) sla1.merge(sla2) is equivalent to sla1 = SLA() sla1.add_iteration(a) sla1.add_iteration(b) sla1.add_iteration(c) sla1.add_iteration(d) It is needed for distribluted load generation. Change-Id: I1bf4b5a76a2729e800928ddd7306864de4612461
This commit is contained in:
parent
54ca5da93c
commit
de6808a850
@ -31,6 +31,10 @@ class StreamingAlgorithm(object):
|
||||
def add(self, value):
|
||||
"""Process a single value from the input stream."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def merge(self, other):
|
||||
"""Merge results processed by another instance."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def result(self):
|
||||
"""Return the result based on the values processed so far."""
|
||||
@ -53,6 +57,10 @@ class MeanComputation(StreamingAlgorithm):
|
||||
self.count += 1
|
||||
self.total += value
|
||||
|
||||
def merge(self, other):
|
||||
self.count += other.count
|
||||
self.total += other.total
|
||||
|
||||
def result(self):
|
||||
if self.count == 0:
|
||||
message = _("Unable to calculate the mean: "
|
||||
@ -81,6 +89,23 @@ class StdDevComputation(StreamingAlgorithm):
|
||||
self.mean = self.mean_computation.result()
|
||||
self.dev_sum = self.dev_sum + (value - mean_prev) * (value - self.mean)
|
||||
|
||||
def merge(self, other):
|
||||
dev_sum1 = self.dev_sum
|
||||
count1 = self.count
|
||||
mean1 = self.mean
|
||||
|
||||
dev_sum2 = other.dev_sum
|
||||
count2 = other.count
|
||||
mean2 = other.mean
|
||||
|
||||
self.mean_computation.merge(other.mean_computation)
|
||||
self.mean = self.mean_computation.result()
|
||||
self.count += other.count
|
||||
|
||||
self.dev_sum = (dev_sum1 + count1 * mean1 ** 2 +
|
||||
dev_sum2 + count2 * mean2 ** 2 -
|
||||
self.count * self.mean ** 2)
|
||||
|
||||
def result(self):
|
||||
if self.count < 2:
|
||||
message = _("Unable to calculate the standard deviation: "
|
||||
@ -101,6 +126,10 @@ class MinComputation(StreamingAlgorithm):
|
||||
if self._value is None or value < self._value:
|
||||
self._value = value
|
||||
|
||||
def merge(self, other):
|
||||
if other._value is not None:
|
||||
self.add(other._value)
|
||||
|
||||
def result(self):
|
||||
if self._value is None:
|
||||
raise ValueError("No values have been processed")
|
||||
@ -119,6 +148,10 @@ class MaxComputation(StreamingAlgorithm):
|
||||
if self._value is None or value > self._value:
|
||||
self._value = value
|
||||
|
||||
def merge(self, other):
|
||||
if other._value is not None:
|
||||
self.add(other._value)
|
||||
|
||||
def result(self):
|
||||
if self._value is None:
|
||||
raise ValueError("No values have been processed")
|
||||
@ -143,6 +176,10 @@ class PercentileComputation(StreamingAlgorithm):
|
||||
def add(self, value):
|
||||
self._graph_zipper.add_point(value)
|
||||
|
||||
def merge(self, other):
|
||||
# TODO(ikhudoshyn): Implement me
|
||||
raise NotImplementedError()
|
||||
|
||||
def result(self):
|
||||
results = list(
|
||||
map(lambda x: x[1], self._graph_zipper.get_zipped_graph()))
|
||||
@ -160,5 +197,8 @@ class IncrementComputation(StreamingAlgorithm):
|
||||
def add(self, *args):
|
||||
self._count += 1
|
||||
|
||||
def merge(self, other):
|
||||
self._count += other._count
|
||||
|
||||
def result(self):
|
||||
return self._count
|
||||
|
@ -52,6 +52,14 @@ class FailureRate(sla.SLA):
|
||||
self.success = self.min_percent <= self.error_rate <= self.max_percent
|
||||
return self.success
|
||||
|
||||
def merge(self, other):
|
||||
self.total += other.total
|
||||
self.errors += other.errors
|
||||
if self.total:
|
||||
self.error_rate = self.errors * 100.0 / self.total
|
||||
self.success = self.min_percent <= self.error_rate <= self.max_percent
|
||||
return self.success
|
||||
|
||||
def details(self):
|
||||
return (_("Failure rate criteria %.2f%% <= %.2f%% <= %.2f%% - %s") %
|
||||
(self.min_percent, self.error_rate, self.max_percent,
|
||||
|
@ -39,6 +39,12 @@ class IterationTime(sla.SLA):
|
||||
self.success = self.max_iteration_time <= self.criterion_value
|
||||
return self.success
|
||||
|
||||
def merge(self, other):
|
||||
if other.max_iteration_time > self.max_iteration_time:
|
||||
self.max_iteration_time = other.max_iteration_time
|
||||
self.success = self.max_iteration_time <= self.criterion_value
|
||||
return self.success
|
||||
|
||||
def details(self):
|
||||
return (_("Maximum seconds per iteration %.2fs <= %.2fs - %s") %
|
||||
(self.max_iteration_time, self.criterion_value, self.status()))
|
@ -42,6 +42,12 @@ class MaxAverageDuration(sla.SLA):
|
||||
self.success = self.avg <= self.criterion_value
|
||||
return self.success
|
||||
|
||||
def merge(self, other):
|
||||
self.avg_comp.merge(other.avg_comp)
|
||||
self.avg = self.avg_comp.result()
|
||||
self.success = self.avg <= self.criterion_value
|
||||
return self.success
|
||||
|
||||
def details(self):
|
||||
return (_("Average duration of one iteration %.2fs <= %.2fs - %s") %
|
||||
(self.avg, self.criterion_value, self.status()))
|
||||
|
@ -56,6 +56,14 @@ class Outliers(sla.SLA):
|
||||
self.std_comp = streaming_algorithms.StdDevComputation()
|
||||
|
||||
def add_iteration(self, iteration):
|
||||
# NOTE(ikhudoshyn): This method can not be implemented properly.
|
||||
# After adding a new iteration, both mean and standard deviation
|
||||
# may change. Hence threshold will change as well. In this case we
|
||||
# should again compare durations of all accounted iterations
|
||||
# to the threshold. Unfortunately we can not do it since
|
||||
# we do not store durations.
|
||||
# Implementation provided here only gives rough approximation
|
||||
# of outliers number.
|
||||
if not iteration.get("error"):
|
||||
duration = iteration["duration"]
|
||||
self.iterations += 1
|
||||
@ -76,6 +84,28 @@ class Outliers(sla.SLA):
|
||||
self.success = self.outliers <= self.max_outliers
|
||||
return self.success
|
||||
|
||||
def merge(self, other):
|
||||
# NOTE(ikhudoshyn): This method can not be implemented properly.
|
||||
# After merge, both mean and standard deviation may change.
|
||||
# Hence threshold will change as well. In this case we
|
||||
# should again compare durations of all accounted iterations
|
||||
# to the threshold. Unfortunately we can not do it since
|
||||
# we do not store durations.
|
||||
# Implementation provided here only gives rough approximation
|
||||
# of outliers number.
|
||||
self.iterations += other.iterations
|
||||
self.outliers += other.outliers
|
||||
self.mean_comp.merge(other.mean_comp)
|
||||
self.std_comp.merge(other.std_comp)
|
||||
|
||||
if self.iterations >= 2:
|
||||
mean = self.mean_comp.result()
|
||||
std = self.std_comp.result()
|
||||
self.threshold = mean + self.sigmas * std
|
||||
|
||||
self.success = self.outliers <= self.max_outliers
|
||||
return self.success
|
||||
|
||||
def details(self):
|
||||
return (_("Maximum number of outliers %i <= %i - %s") %
|
||||
(self.outliers, self.max_outliers, self.status()))
|
||||
|
@ -57,6 +57,30 @@ class SLAChecker(object):
|
||||
"""
|
||||
return all([sla.add_iteration(iteration) for sla in self.sla_criteria])
|
||||
|
||||
def merge(self, other):
|
||||
self._validate_config(other)
|
||||
self._validate_sla_types(other)
|
||||
|
||||
return all([self_sla.merge(other_sla)
|
||||
for self_sla, other_sla
|
||||
in six.moves.zip(
|
||||
self.sla_criteria, other.sla_criteria)])
|
||||
|
||||
def _validate_sla_types(self, other):
|
||||
for self_sla, other_sla in six.moves.zip_longest(
|
||||
self.sla_criteria, other.sla_criteria):
|
||||
self_sla.validate_type(other_sla)
|
||||
|
||||
def _validate_config(self, other):
|
||||
self_config = self.config.get("sla", {})
|
||||
other_config = other.config.get("sla", {})
|
||||
if self_config != other_config:
|
||||
message = _(
|
||||
"Error merging SLACheckers with configs %s, %s. "
|
||||
"Only SLACheckers with the same config could be merged."
|
||||
) % (self_config, other_config)
|
||||
raise TypeError(message)
|
||||
|
||||
def results(self):
|
||||
results = [sla.result() for sla in self.sla_criteria]
|
||||
if self.aborted_on_sla:
|
||||
@ -132,3 +156,44 @@ class SLA(plugin.Plugin):
|
||||
def status(self):
|
||||
"""Return "Passed" or "Failed" depending on the current SLA status."""
|
||||
return "Passed" if self.success else "Failed"
|
||||
|
||||
@abc.abstractmethod
|
||||
def merge(self, other):
|
||||
"""Merge aggregated data from another SLA instance into self.
|
||||
|
||||
Process the results of several iterations aggregated in another
|
||||
instance of SLA together with ones stored in self so that the
|
||||
code
|
||||
|
||||
sla1 = SLA()
|
||||
sla1.add_iteration(a)
|
||||
sla1.add_iteration(b)
|
||||
|
||||
sla2 = SLA()
|
||||
sla2.add_iteration(c)
|
||||
sla2.add_iteration(d)
|
||||
|
||||
sla1.merge(sla2)
|
||||
|
||||
is equivalent to
|
||||
|
||||
sla1 = SLA()
|
||||
sla1.add_iteration(a)
|
||||
sla1.add_iteration(b)
|
||||
sla1.add_iteration(c)
|
||||
sla1.add_iteration(d)
|
||||
|
||||
The call to merge() will return True if the SLA check
|
||||
passed, and False otherwise.
|
||||
|
||||
:param other: another SLA object
|
||||
:returns: True if the SLA check passed, False otherwise
|
||||
"""
|
||||
|
||||
def validate_type(self, other):
|
||||
if type(self) != type(other):
|
||||
message = _(
|
||||
"Error merging SLAs of types %s, %s. "
|
||||
"Only SLAs of the same type could be merged."
|
||||
) % (type(self), type(other))
|
||||
raise TypeError(message)
|
||||
|
@ -13,10 +13,11 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
|
||||
import math
|
||||
|
||||
import ddt
|
||||
import six
|
||||
|
||||
from rally.common import streaming_algorithms as algo
|
||||
from rally import exceptions
|
||||
from tests.unit import test
|
||||
@ -41,6 +42,27 @@ class MeanComputationTestCase(test.TestCase):
|
||||
excepted_mean = float(sum(stream)) / len(stream)
|
||||
self.assertEqual(excepted_mean, mean_computation.result())
|
||||
|
||||
def test_merge(self):
|
||||
single_mean = algo.MeanComputation()
|
||||
|
||||
for val in six.moves.range(100):
|
||||
single_mean.add(val)
|
||||
|
||||
means = [algo.MeanComputation()
|
||||
for _ in six.moves.range(10)]
|
||||
|
||||
for idx, mean in enumerate(means):
|
||||
for val in six.moves.range(idx * 10, (idx + 1) * 10):
|
||||
mean.add(val)
|
||||
|
||||
merged_mean = means[0]
|
||||
for mean in means[1:]:
|
||||
merged_mean.merge(mean)
|
||||
|
||||
self.assertEqual(single_mean.count, merged_mean.count)
|
||||
self.assertEqual(single_mean.total, merged_mean.total)
|
||||
self.assertEqual(single_mean.result(), merged_mean.result())
|
||||
|
||||
|
||||
class StdDevComputationTestCase(test.TestCase):
|
||||
|
||||
@ -69,6 +91,28 @@ class StdDevComputationTestCase(test.TestCase):
|
||||
(len(stream) - 1))
|
||||
self.assertEqual(excepted_std, std_computation.result())
|
||||
|
||||
def test_merge(self):
|
||||
single_std = algo.StdDevComputation()
|
||||
|
||||
for val in six.moves.range(100):
|
||||
single_std.add(val)
|
||||
|
||||
stds = [algo.StdDevComputation()
|
||||
for _ in six.moves.range(10)]
|
||||
|
||||
for idx, std in enumerate(stds):
|
||||
for val in six.moves.range(idx * 10, (idx + 1) * 10):
|
||||
std.add(val)
|
||||
|
||||
merged_std = stds[0]
|
||||
for std in stds[1:]:
|
||||
merged_std.merge(std)
|
||||
|
||||
self.assertEqual(single_std.count, merged_std.count)
|
||||
self.assertEqual(single_std.mean, merged_std.mean)
|
||||
self.assertEqual(single_std.dev_sum, merged_std.dev_sum)
|
||||
self.assertEqual(single_std.result(), merged_std.result())
|
||||
|
||||
|
||||
class MinComputationTestCase(test.TestCase):
|
||||
|
||||
@ -88,6 +132,26 @@ class MinComputationTestCase(test.TestCase):
|
||||
self.assertRaises(TypeError, comp.result, 1)
|
||||
self.assertRaises(ValueError, comp.result)
|
||||
|
||||
def test_merge(self):
|
||||
single_min_algo = algo.MinComputation()
|
||||
|
||||
for val in six.moves.range(100):
|
||||
single_min_algo.add(val)
|
||||
|
||||
algos = [algo.MinComputation()
|
||||
for _ in six.moves.range(10)]
|
||||
|
||||
for idx, min_algo in enumerate(algos):
|
||||
for val in six.moves.range(idx * 10, (idx + 1) * 10):
|
||||
min_algo.add(val)
|
||||
|
||||
merged_min_algo = algos[0]
|
||||
for min_algo in algos[1:]:
|
||||
merged_min_algo.merge(min_algo)
|
||||
|
||||
self.assertEqual(single_min_algo._value, merged_min_algo._value)
|
||||
self.assertEqual(single_min_algo.result(), merged_min_algo.result())
|
||||
|
||||
|
||||
class MaxComputationTestCase(test.TestCase):
|
||||
|
||||
@ -107,6 +171,26 @@ class MaxComputationTestCase(test.TestCase):
|
||||
self.assertRaises(TypeError, comp.result, 1)
|
||||
self.assertRaises(ValueError, comp.result)
|
||||
|
||||
def test_merge(self):
|
||||
single_max_algo = algo.MaxComputation()
|
||||
|
||||
for val in six.moves.range(100):
|
||||
single_max_algo.add(val)
|
||||
|
||||
algos = [algo.MaxComputation()
|
||||
for _ in six.moves.range(10)]
|
||||
|
||||
for idx, max_algo in enumerate(algos):
|
||||
for val in six.moves.range(idx * 10, (idx + 1) * 10):
|
||||
max_algo.add(val)
|
||||
|
||||
merged_max_algo = algos[0]
|
||||
for max_algo in algos[1:]:
|
||||
merged_max_algo.merge(max_algo)
|
||||
|
||||
self.assertEqual(single_max_algo._value, merged_max_algo._value)
|
||||
self.assertEqual(single_max_algo.result(), merged_max_algo.result())
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class PercentileComputationTestCase(test.TestCase):
|
||||
@ -171,3 +255,23 @@ class IncrementComputationTestCase(test.TestCase):
|
||||
self.assertEqual(i - 1, comp.result())
|
||||
comp.add(42)
|
||||
self.assertEqual(i, comp.result())
|
||||
|
||||
def test_merge(self):
|
||||
single_inc = algo.IncrementComputation()
|
||||
|
||||
for val in six.moves.range(100):
|
||||
single_inc.add(val)
|
||||
|
||||
incs = [algo.IncrementComputation()
|
||||
for _ in six.moves.range(10)]
|
||||
|
||||
for idx, inc in enumerate(incs):
|
||||
for val in six.moves.range(idx * 10, (idx + 1) * 10):
|
||||
inc.add(val)
|
||||
|
||||
merged_inc = incs[0]
|
||||
for inc in incs[1:]:
|
||||
merged_inc.merge(inc)
|
||||
|
||||
self.assertEqual(single_inc._count, merged_inc._count)
|
||||
self.assertEqual(single_inc.result(), merged_inc.result())
|
||||
|
@ -14,6 +14,7 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import ddt
|
||||
import jsonschema
|
||||
|
||||
from rally.plugins.common.sla import failure_rate
|
||||
@ -37,6 +38,7 @@ class SLAPluginTestCase(test.TestCase):
|
||||
{"test_criterion": 42.0})
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class FailureRateTestCase(test.TestCase):
|
||||
|
||||
def test_config_schema(self):
|
||||
@ -107,3 +109,28 @@ class FailureRateTestCase(test.TestCase):
|
||||
self.assertTrue(sla.add_iteration({"error": []}))
|
||||
self.assertTrue(sla.add_iteration({"error": ["error"]})) # 33%
|
||||
self.assertFalse(sla.add_iteration({"error": ["error"]})) # 40%
|
||||
|
||||
@ddt.data([[0, 1, 0, 0],
|
||||
[0, 1, 1, 1, 0, 0, 0, 0],
|
||||
[0, 0, 1, 0, 0, 1]])
|
||||
def test_merge(self, errors):
|
||||
|
||||
single_sla = failure_rate.FailureRate({"max": 25})
|
||||
|
||||
for ee in errors:
|
||||
for e in ee:
|
||||
single_sla.add_iteration({"error": ["error"] if e else []})
|
||||
|
||||
slas = [failure_rate.FailureRate({"max": 25}) for _ in errors]
|
||||
|
||||
for idx, sla in enumerate(slas):
|
||||
for e in errors[idx]:
|
||||
sla.add_iteration({"error": ["error"] if e else []})
|
||||
|
||||
merged_sla = slas[0]
|
||||
for sla in slas[1:]:
|
||||
merged_sla.merge(sla)
|
||||
|
||||
self.assertEqual(single_sla.success, merged_sla.success)
|
||||
self.assertEqual(single_sla.errors, merged_sla.errors)
|
||||
self.assertEqual(single_sla.total, merged_sla.total)
|
||||
|
@ -14,23 +14,25 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import ddt
|
||||
import jsonschema
|
||||
|
||||
from rally.plugins.common.sla import iteraion_time
|
||||
from rally.plugins.common.sla import iteration_time
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class IterationTimeTestCase(test.TestCase):
|
||||
def test_config_schema(self):
|
||||
properties = {
|
||||
"max_seconds_per_iteration": 0
|
||||
}
|
||||
self.assertRaises(jsonschema.ValidationError,
|
||||
iteraion_time.IterationTime.validate, properties)
|
||||
iteration_time.IterationTime.validate, properties)
|
||||
|
||||
def test_result(self):
|
||||
sla1 = iteraion_time.IterationTime(42)
|
||||
sla2 = iteraion_time.IterationTime(3.62)
|
||||
sla1 = iteration_time.IterationTime(42)
|
||||
sla2 = iteration_time.IterationTime(3.62)
|
||||
for sla in [sla1, sla2]:
|
||||
sla.add_iteration({"duration": 3.14})
|
||||
sla.add_iteration({"duration": 6.28})
|
||||
@ -40,13 +42,38 @@ class IterationTimeTestCase(test.TestCase):
|
||||
self.assertEqual("Failed", sla2.status())
|
||||
|
||||
def test_result_no_iterations(self):
|
||||
sla = iteraion_time.IterationTime(42)
|
||||
sla = iteration_time.IterationTime(42)
|
||||
self.assertTrue(sla.result()["success"])
|
||||
|
||||
def test_add_iteration(self):
|
||||
sla = iteraion_time.IterationTime(4.0)
|
||||
sla = iteration_time.IterationTime(4.0)
|
||||
self.assertTrue(sla.add_iteration({"duration": 3.14}))
|
||||
self.assertTrue(sla.add_iteration({"duration": 2.0}))
|
||||
self.assertTrue(sla.add_iteration({"duration": 3.99}))
|
||||
self.assertFalse(sla.add_iteration({"duration": 4.5}))
|
||||
self.assertFalse(sla.add_iteration({"duration": 3.8}))
|
||||
|
||||
@ddt.data([[1.0, 2.0, 1.5, 4.3],
|
||||
[2.1, 3.4, 1.2, 6.3, 7.2, 7.0, 1.],
|
||||
[1.1, 1.1, 2.2, 2.2, 3.3, 4.3]])
|
||||
def test_merge(self, durations):
|
||||
|
||||
single_sla = iteration_time.IterationTime(4.0)
|
||||
|
||||
for dd in durations:
|
||||
for d in dd:
|
||||
single_sla.add_iteration({"duration": d})
|
||||
|
||||
slas = [iteration_time.IterationTime(4.0) for _ in durations]
|
||||
|
||||
for idx, sla in enumerate(slas):
|
||||
for duration in durations[idx]:
|
||||
sla.add_iteration({"duration": duration})
|
||||
|
||||
merged_sla = slas[0]
|
||||
for sla in slas[1:]:
|
||||
merged_sla.merge(sla)
|
||||
|
||||
self.assertEqual(single_sla.success, merged_sla.success)
|
||||
self.assertEqual(single_sla.max_iteration_time,
|
||||
merged_sla.max_iteration_time)
|
||||
|
@ -14,12 +14,14 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import ddt
|
||||
import jsonschema
|
||||
|
||||
from rally.plugins.common.sla import max_average_duration
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class MaxAverageDurationTestCase(test.TestCase):
|
||||
def test_config_schema(self):
|
||||
properties = {
|
||||
@ -51,3 +53,28 @@ class MaxAverageDurationTestCase(test.TestCase):
|
||||
self.assertTrue(sla.add_iteration({"duration": 5.0})) # avg = 3.667
|
||||
self.assertFalse(sla.add_iteration({"duration": 7.0})) # avg = 4.5
|
||||
self.assertTrue(sla.add_iteration({"duration": 1.0})) # avg = 3.8
|
||||
|
||||
@ddt.data([[1.0, 2.0, 1.5, 4.3],
|
||||
[2.1, 3.4, 1.2, 6.3, 7.2, 7.0, 1.],
|
||||
[1.1, 1.1, 2.2, 2.2, 3.3, 4.3]])
|
||||
def test_merge(self, durations):
|
||||
|
||||
single_sla = max_average_duration.MaxAverageDuration(4.0)
|
||||
|
||||
for dd in durations:
|
||||
for d in dd:
|
||||
single_sla.add_iteration({"duration": d})
|
||||
|
||||
slas = [max_average_duration.MaxAverageDuration(4.0)
|
||||
for _ in durations]
|
||||
|
||||
for idx, sla in enumerate(slas):
|
||||
for duration in durations[idx]:
|
||||
sla.add_iteration({"duration": duration})
|
||||
|
||||
merged_sla = slas[0]
|
||||
for sla in slas[1:]:
|
||||
merged_sla.merge(sla)
|
||||
|
||||
self.assertEqual(single_sla.success, merged_sla.success)
|
||||
self.assertEqual(single_sla.avg, merged_sla.avg)
|
||||
|
@ -14,12 +14,14 @@
|
||||
# under the License.
|
||||
|
||||
|
||||
import ddt
|
||||
import jsonschema
|
||||
|
||||
from rally.plugins.common.sla import outliers
|
||||
from tests.unit import test
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class OutliersTestCase(test.TestCase):
|
||||
|
||||
def test_config_schema(self):
|
||||
@ -89,3 +91,39 @@ class OutliersTestCase(test.TestCase):
|
||||
# NOTE(msdubov): 12th iteration makes the SLA always failed
|
||||
self.assertFalse(sla.add_iteration({"duration": 11.2}))
|
||||
self.assertFalse(sla.add_iteration({"duration": 3.4}))
|
||||
|
||||
@ddt.data([[3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 3.8, 4.3, 2.9, 10.2],
|
||||
[3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 20.1, 3.8, 4.3, 2.9, 24.2],
|
||||
[3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 30.8, 4.3, 49.9, 69.2]])
|
||||
def test_merge(self, durations):
|
||||
|
||||
single_sla = outliers.Outliers({"max": 1})
|
||||
|
||||
for dd in durations:
|
||||
for d in dd:
|
||||
single_sla.add_iteration({"duration": d})
|
||||
|
||||
slas = [outliers.Outliers({"max": 1})
|
||||
for _ in durations]
|
||||
|
||||
for idx, sla in enumerate(slas):
|
||||
for duration in durations[idx]:
|
||||
sla.add_iteration({"duration": duration})
|
||||
|
||||
merged_sla = slas[0]
|
||||
for sla in slas[1:]:
|
||||
merged_sla.merge(sla)
|
||||
|
||||
self.assertEqual(single_sla.success, merged_sla.success)
|
||||
self.assertEqual(single_sla.iterations, merged_sla.iterations)
|
||||
|
||||
# self.assertEqual(single_sla.threshold, merged_sla.threshold)
|
||||
|
||||
# NOTE(ikhudoshyn): We are unable to implement
|
||||
# rally.plugins.common.sla.outliers.Outliers.merge(..) correctly
|
||||
# (see my comment for the method)
|
||||
# The assert above will fail with the majority of data
|
||||
# The line below passes with this particular data
|
||||
# but may fail as well on another data
|
||||
|
||||
self.assertEqual(single_sla.outliers, merged_sla.outliers)
|
||||
|
@ -13,6 +13,8 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import ddt
|
||||
import mock
|
||||
|
||||
from rally.common.plugin import plugin
|
||||
from rally.task import sla
|
||||
@ -27,10 +29,14 @@ class TestCriterion(sla.SLA):
|
||||
self.success = self.criterion_value == iteration
|
||||
return self.success
|
||||
|
||||
def merge(self, other):
|
||||
raise NotImplementedError()
|
||||
|
||||
def details(self):
|
||||
return "detail"
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class SLACheckerTestCase(test.TestCase):
|
||||
|
||||
def test_add_iteration_and_results(self):
|
||||
@ -86,3 +92,78 @@ class SLACheckerTestCase(test.TestCase):
|
||||
"success": success,
|
||||
"detail": detail},
|
||||
sla._format_result(name, success, detail))
|
||||
|
||||
def test__validate_config_positive(self):
|
||||
sla_checker = sla.SLAChecker({"sla": {}})
|
||||
another_sla_checker = sla.SLAChecker({"sla": {}})
|
||||
sla_checker._validate_config(another_sla_checker)
|
||||
|
||||
def test__validate_config_negative(self):
|
||||
sla_checker = sla.SLAChecker({"sla": {}})
|
||||
another_sla_checker = sla.SLAChecker({"sla": {"test_criterion": 42}})
|
||||
self.assertRaises(TypeError, sla_checker._validate_config,
|
||||
another_sla_checker)
|
||||
|
||||
def test__validate_sla_types(self):
|
||||
sla_checker = sla.SLAChecker({"sla": {}})
|
||||
mock_sla1 = mock.MagicMock()
|
||||
mock_sla2 = mock.MagicMock()
|
||||
sla_checker.sla_criteria = [mock_sla1, mock_sla2]
|
||||
|
||||
another_sla_checker = sla.SLAChecker({"sla": {}})
|
||||
mock_sla3 = mock.MagicMock()
|
||||
mock_sla4 = mock.MagicMock()
|
||||
another_sla_checker.sla_criteria = [mock_sla3, mock_sla4]
|
||||
|
||||
sla_checker._validate_sla_types(another_sla_checker)
|
||||
|
||||
mock_sla1.assert_has_calls([
|
||||
mock.call.validate_type(mock_sla3)
|
||||
])
|
||||
|
||||
mock_sla1.validate_type.assert_called_once_with(mock_sla3)
|
||||
mock_sla2.validate_type.assert_called_once_with(mock_sla4)
|
||||
|
||||
@ddt.data({"merge_result1": True, "merge_result2": True,
|
||||
"result": True},
|
||||
{"merge_result1": True, "merge_result2": False,
|
||||
"result": False},
|
||||
{"merge_result1": False, "merge_result2": False,
|
||||
"result": False})
|
||||
@ddt.unpack
|
||||
def test_merge(self, merge_result1, merge_result2, result):
|
||||
sla_checker = sla.SLAChecker({"sla": {}})
|
||||
mock_sla1 = mock.MagicMock()
|
||||
mock_sla2 = mock.MagicMock()
|
||||
sla_checker.sla_criteria = [mock_sla1, mock_sla2]
|
||||
|
||||
mock_sla1.merge.return_value = merge_result1
|
||||
mock_sla2.merge.return_value = merge_result2
|
||||
|
||||
another_sla_checker = sla.SLAChecker({"sla": {}})
|
||||
mock_sla3 = mock.MagicMock()
|
||||
mock_sla4 = mock.MagicMock()
|
||||
another_sla_checker.sla_criteria = [mock_sla3, mock_sla4]
|
||||
|
||||
sla_checker._validate_config = mock.MagicMock()
|
||||
sla_checker._validate_sla_types = mock.MagicMock()
|
||||
|
||||
self.assertEqual(result, sla_checker.merge(another_sla_checker))
|
||||
mock_sla1.merge.assert_called_once_with(mock_sla3)
|
||||
mock_sla2.merge.assert_called_once_with(mock_sla4)
|
||||
|
||||
|
||||
class SLATestCase(test.TestCase):
|
||||
def test_validate_type_positive(self):
|
||||
sla1 = TestCriterion(0)
|
||||
sla2 = TestCriterion(0)
|
||||
sla1.validate_type(sla2)
|
||||
|
||||
def test_validate_type_negative(self):
|
||||
sla1 = TestCriterion(0)
|
||||
|
||||
class AnotherTestCriterion(TestCriterion):
|
||||
pass
|
||||
|
||||
sla2 = AnotherTestCriterion(0)
|
||||
self.assertRaises(TypeError, sla1.validate_type, sla2)
|
||||
|
Loading…
x
Reference in New Issue
Block a user