From 9be6554a9413512d7091ebdc0cb3d006f776ab35 Mon Sep 17 00:00:00 2001 From: Ilya Shakhat Date: Fri, 13 Mar 2015 19:39:17 +0300 Subject: [PATCH] Added netperf-wrapper test suite and aggregate report Change-Id: Iaaa504de674ada557ff3fbe1331f26e1d5de8650 --- scenarios/networking/l2.yaml | 12 ++- shaker/engine/aggregators/__init__.py | 1 + shaker/engine/aggregators/base.py | 3 + shaker/engine/aggregators/traffic.py | 95 ++++++++++++----- shaker/engine/executors/iperf.py | 2 +- shaker/engine/executors/netperf.py | 31 +++++- shaker/engine/report.py | 2 + shaker/resources/report_template.jinja2 | 112 ++++++++++++++++---- tests/test_iperf_graph_executor.py | 2 +- tests/test_netperf_wrapper_executor.py | 73 +++++++++++++ tests/test_traffic_aggregator.py | 131 ++++++++++++++++++++++++ 11 files changed, 407 insertions(+), 57 deletions(-) create mode 100644 tests/test_netperf_wrapper_executor.py create mode 100644 tests/test_traffic_aggregator.py diff --git a/scenarios/networking/l2.yaml b/scenarios/networking/l2.yaml index bc1f7aa..d740457 100644 --- a/scenarios/networking/l2.yaml +++ b/scenarios/networking/l2.yaml @@ -13,10 +13,18 @@ execution: class: iperf_graph time: 60 - - title: Iperf UDP 5 threads + title: TCP download + class: netperf_wrapper + method: tcp_download + - + title: TCP bi-directional + class: netperf_wrapper + method: tcp_bidirectional + - + title: Iperf UDP 8 threads class: iperf udp: 1 - threads: 5 + threads: 8 - title: Netperf TCP_STREAM class: netperf diff --git a/shaker/engine/aggregators/__init__.py b/shaker/engine/aggregators/__init__.py index bad0c03..11f6757 100644 --- a/shaker/engine/aggregators/__init__.py +++ b/shaker/engine/aggregators/__init__.py @@ -19,6 +19,7 @@ from shaker.engine.aggregators import traffic AGGREGATORS = { 'iperf_graph': traffic.TrafficAggregator, + 'netperf_wrapper': traffic.TrafficAggregator, '_default': base.BaseAggregator, } diff --git a/shaker/engine/aggregators/base.py b/shaker/engine/aggregators/base.py index 45afb4e..89aac73 100644 --- a/shaker/engine/aggregators/base.py +++ b/shaker/engine/aggregators/base.py @@ -18,6 +18,9 @@ class BaseAggregator(object): def __init__(self, test_definition): self.test_definition = test_definition + def test_summary(self, test_data): + pass + def iteration_summary(self, iteration_data): pass diff --git a/shaker/engine/aggregators/traffic.py b/shaker/engine/aggregators/traffic.py index ef9d1b2..856030f 100644 --- a/shaker/engine/aggregators/traffic.py +++ b/shaker/engine/aggregators/traffic.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import uuid from oslo_log import log as logging @@ -26,39 +27,75 @@ LOG = logging.getLogger(__name__) def mean(array): if not array: return 0 + array = [x for x in array if x] return sum(array) / len(array) +def safe_max(array): + return max(x for x in array if x) + + +def safe_min(array): + return min(x for x in array if x) + + class TrafficAggregator(base.BaseAggregator): def __init__(self, test_definition): super(TrafficAggregator, self).__init__(test_definition) + def test_summary(self, test_data): + chart = [] + xs = [] + mean_v = collections.defaultdict(list) + + for iteration in test_data['results_per_iteration']: + xs.append(len(iteration['results_per_agent'])) + for k, v in iteration['stats'].items(): + mean_v[k].append(v['mean']) + + for k in mean_v.keys(): + chart.append(['Mean %s' % k] + mean_v[k]) + + chart.append(['x'] + xs) + test_data.update({ + 'chart': chart, + }) + def iteration_summary(self, iteration_data): - max_v = [] - min_v = [] - mean_v = [] + max_v = collections.defaultdict(list) + min_v = collections.defaultdict(list) + mean_v = collections.defaultdict(list) + unit_v = dict() + chart = [] + nodes = [] for one in iteration_data['results_per_agent']: nodes.append(one['agent']['node']) - max_v.append(one['stats']['max']) - min_v.append(one['stats']['min']) - mean_v.append(one['stats']['mean']) + chart += one['chart'] + + for k, v in one['stats'].items(): + max_v[k].append(v['max']) + min_v[k].append(v['min']) + mean_v[k].append(v['mean']) + unit_v[k] = v['unit'] + + stats = {} + node_chart = [['x'] + nodes] + + for k in max_v.keys(): + stats[k] = dict(max=max(max_v[k]), + min=min(min_v[k]), + mean=mean(mean_v[k]), + unit=unit_v[k]) + node_chart.append(['Mean %s' % k] + mean_v[k]) + node_chart.append(['Max %s' % k] + max_v[k]) + node_chart.append(['Min %s' % k] + min_v[k]) iteration_data.update({ - 'stats': { - 'max': max(max_v), - 'min': min(min_v), - 'mean': mean(mean_v), - }, - 'agent_chart': { - 'uuid': uuid.uuid4(), - 'data': [ - ['x'] + nodes, - ['min'] + min_v, - ['mean'] + mean_v, - ['max'] + max_v, - ] - } + 'uuid': uuid.uuid4(), + 'stats': stats, + 'x-chart': chart, + 'node_chart': node_chart, }) def agent_summary(self, agent_data): @@ -66,7 +103,8 @@ class TrafficAggregator(base.BaseAggregator): for idx, item_meta in enumerate(agent_data['meta']): if item_meta[1] == 'bps': for row in agent_data['samples']: - row[idx] = float(row[idx]) / 1024 / 1024 + if row[idx]: + row[idx] = float(row[idx]) / 1024 / 1024 item_meta[1] = 'Mbps' # calculate stats @@ -76,12 +114,15 @@ class TrafficAggregator(base.BaseAggregator): for idx, item_meta in enumerate(agent_data['meta']): column = [row[idx] for row in agent_data['samples']] - if item_meta[1] == 'Mbps': - agent_data['stats']['max'] = max(column) - agent_data['stats']['min'] = min(column) - agent_data['stats']['mean'] = mean(column) - - agent_data['chart'].append([item_meta[0]] + column) + item_title = item_meta[0] + if item_title != 'time': + agent_data['stats'][item_title] = { + 'max': safe_max(column), + 'min': safe_min(column), + 'mean': mean(column), + 'unit': item_meta[1], + } + agent_data['chart'].append([item_title] + column) # drop stdout del agent_data['stdout'] diff --git a/shaker/engine/executors/iperf.py b/shaker/engine/executors/iperf.py index a90657b..b769807 100644 --- a/shaker/engine/executors/iperf.py +++ b/shaker/engine/executors/iperf.py @@ -63,5 +63,5 @@ class IperfGraphExecutor(IperfExecutor): samples.pop() # the last line is summary, remove it result['samples'] = samples - result['meta'] = [['time', 'sec'], ['bandwidth', 'bps']] + result['meta'] = [['time', 's'], ['bandwidth', 'bps']] return result diff --git a/shaker/engine/executors/netperf.py b/shaker/engine/executors/netperf.py index 3827100..c92334f 100644 --- a/shaker/engine/executors/netperf.py +++ b/shaker/engine/executors/netperf.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import csv + from shaker.engine.executors import base @@ -27,7 +29,28 @@ class NetperfExecutor(base.BaseExecutor): class NetperfWrapperExecutor(base.BaseExecutor): def get_command(self): - target_ip = self.agent['slave']['ip'] - return ('netperf-wrapper -H %(ip)s -f stats %(method)s' % - dict(ip=target_ip, - method=self.test_definition['method'])) + cmd = base.CommandLine('netperf-wrapper') + cmd.add('-H', self.agent['slave']['ip']) + cmd.add('-l', self.test_definition.get('time') or 60) + cmd.add('-s', self.test_definition.get('interval') or 1) + cmd.add('-f', 'csv') + cmd.add(self.test_definition.get('method') or 'tcp_download') + return cmd.make() + + def process_reply(self, message): + result = super(NetperfWrapperExecutor, self).process_reply(message) + + data_stream = csv.reader(result['stdout'].split('\n')) + + header = next(data_stream) + meta = [['time', 's']] + for el in header[1:]: + if el.find('Ping') >= 0: + meta.append([el, 'ms']) + else: + meta.append([el, 'Mbps']) + result['meta'] = meta + + result['samples'] = [[(float(x) if x else None) for x in row] + for row in data_stream if row] + return result diff --git a/shaker/engine/report.py b/shaker/engine/report.py index b0e2aa8..b6f7631 100644 --- a/shaker/engine/report.py +++ b/shaker/engine/report.py @@ -40,6 +40,8 @@ def calculate_stats(data): aggregator.iteration_summary(iteration_result) + aggregator.test_summary(test_result) + def generate_report(report_template, report_filename, data): LOG.debug('Generating report, template: %s, output: %s', diff --git a/shaker/resources/report_template.jinja2 b/shaker/resources/report_template.jinja2 index 9033b97..f4b2fec 100644 --- a/shaker/resources/report_template.jinja2 +++ b/shaker/resources/report_template.jinja2 @@ -360,13 +360,14 @@ {% endif %} + {% set iterations = test.results_per_iteration|length %} {% for result_per_iteration in test.results_per_iteration %} {% set cnt = result_per_iteration.agents|length %}
  • - {% if cnt == 1 %} - full iteration + {% if iterations == 1 %} + Details {% else %} - {{ cnt }} threads + {{ cnt }} Threads {% endif %}
  • {% endfor %} @@ -413,6 +414,31 @@

    Test Case Specification

    {{ test.definition|yaml }}
    + + {# show summary only of number of iterations > 1 #} + {% set iteration_cnt = test.results_per_iteration|length %} + {% if iteration_cnt > 1 %} + + {% if test.chart %} +
    + + {% endif %} + {% endif %}
    {% for result_per_iteration in test.results_per_iteration %} @@ -421,26 +447,57 @@

    Iteration Summary

    + {# show summary only of number of agents > 1 #} + {% set agent_cnt = result_per_iteration.results_per_agent|length %} + {% if agent_cnt > 1 %} + {% if result_per_iteration.stats %} -
    Traffic stats
    -
    -
    Max bandwidth
    {{ result_per_iteration.stats.max|round(2) }} Mbits/s
    -
    Min bandwidth
    {{ result_per_iteration.stats.min|round(2) }} Mbits/s
    -
    Mean bandwidth
    {{ result_per_iteration.stats.mean|round(2) }} Mbits/s
    -
    +
    + {% for stat_title, stat_values in result_per_iteration.stats.items() %} +
    +
    Stats for {{ stat_title }}
    + +
    +
    Max
    {{ stat_values.max|round(2) }} {{ stat_values.unit }}
    +
    Min
    {{ stat_values.min|round(2) }} {{ stat_values.unit }}
    +
    Mean
    {{ stat_values.mean|round(2) }} {{ stat_values.unit }}
    +
    +
    + {% endfor %} +
    {% endif %} - {% if result_per_iteration.agent_chart %} -
    Agent Chart
    -
    + {% if result_per_iteration.chart %} +
    + + {% endif %} + + {% if result_per_iteration.node_chart %} +
    Per-node stats
    +
    {% endif %} + {% endif %} {#### PER-AGENT DATA ####} {% for result_per_agent in result_per_iteration.results_per_agent %}

    Agent {{ result_per_agent.agent.id }} - ({{ result_per_agent.agent.ip }})

    + ({{ result_per_agent.agent.ip }}, {{ result_per_agent.agent.node }}) {% if result_per_agent.samples %} -
    Traffic stats
    + {% if result_per_agent.stats %} +
    + {% for stat_title, stat_values in result_per_agent.stats.items() %} +
    +
    Stats for {{ stat_title }}
    -
    -
    Max bandwidth
    {{ result_per_agent.stats.max|round(2) }} Mbits/s
    -
    Min bandwidth
    {{ result_per_agent.stats.min|round(2) }} Mbits/s
    -
    Mean bandwidth
    {{ result_per_agent.stats.mean|round(2) }} Mbits/s
    -
    +
    +
    Max
    {{ stat_values.max|round(2) }} {{ stat_values.unit }}
    +
    Min
    {{ stat_values.min|round(2) }} {{ stat_values.unit }}
    +
    Mean
    {{ stat_values.mean|round(2) }} {{ stat_values.unit }}
    +
    +
    + {% endfor %} +
    + {% endif %} + {% if result_per_agent.chart %}
    + {% endif %} {% endif %} {% if result_per_agent.command %} diff --git a/tests/test_iperf_graph_executor.py b/tests/test_iperf_graph_executor.py index 8c269f5..239e328 100644 --- a/tests/test_iperf_graph_executor.py +++ b/tests/test_iperf_graph_executor.py @@ -58,7 +58,7 @@ class TestIperfGraphExecutor(testtools.TestCase): [3.0, 405798912], ], 'meta': [ - ['time', 'sec'], ['bandwidth', 'bps'] + ['time', 's'], ['bandwidth', 'bps'] ] } reply = executor.process_reply(message) diff --git a/tests/test_netperf_wrapper_executor.py b/tests/test_netperf_wrapper_executor.py new file mode 100644 index 0000000..ae6f7ae --- /dev/null +++ b/tests/test_netperf_wrapper_executor.py @@ -0,0 +1,73 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import testtools + +from shaker.engine.executors import netperf + + +IP = '10.0.0.10' +AGENT = {'slave': {'ip': IP}} + + +class TestNetperfWrapperExecutor(testtools.TestCase): + + def test_get_command(self): + executor = netperf.NetperfWrapperExecutor({}, AGENT) + + expected = 'netperf-wrapper -H %s -l 60 -s 1 -f csv tcp_download' % IP + self.assertEqual(expected, executor.get_command()) + + def test_get_command_with_params(self): + executor = netperf.NetperfWrapperExecutor( + dict(method='ping', time=10, interval=0.5), AGENT) + + expected = 'netperf-wrapper -H %s -l 10 -s 0.5 -f csv ping' % IP + self.assertEqual(expected, executor.get_command()) + + def test_process_reply(self): + executor = netperf.NetperfWrapperExecutor({}, AGENT) + message = { + 'stdout': """tcp_download,Ping ICMP,TCP download +0.0,0.09, +2.0,0.0800211283506, +4.0,0.0602545096056, +6.0,0.0502416561724,28555.9 +8.0,0.05,25341.9871721 +10.0,0.0500947171761,30486.4518264 +12.0,0.0603484557656, +14.0,0.0603987445198, +""" + } + expected = { + 'samples': [ + [0.0, 0.09, None], + [2.0, 0.0800211283506, None], + [4.0, 0.0602545096056, None], + [6.0, 0.0502416561724, 28555.9], + [8.0, 0.05, 25341.9871721], + [10.0, 0.0500947171761, 30486.4518264], + [12.0, 0.0603484557656, None], + [14.0, 0.0603987445198, None], + ], + 'meta': [ + ['time', 's'], ['Ping ICMP', 'ms'], ['TCP download', 'Mbps'], + ] + } + reply = executor.process_reply(message) + self.assertEqual(expected['samples'], reply['samples'], + message='Samples data') + self.assertEqual(expected['meta'], reply['meta'], + message='Metadata') diff --git a/tests/test_traffic_aggregator.py b/tests/test_traffic_aggregator.py new file mode 100644 index 0000000..1e3bc3e --- /dev/null +++ b/tests/test_traffic_aggregator.py @@ -0,0 +1,131 @@ +# Copyright (c) 2015 Mirantis Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy + +import testtools + +from shaker.engine.aggregators import traffic + + +class TestTrafficAggregator(testtools.TestCase): + def test_agent_summary(self): + aggregator = traffic.TrafficAggregator(None) + + original = { + "stderr": "", "stdout": '', + "meta": [["time", "s"], ["Ping ICMP", "ms"], + ["TCP download", "bps"]], + "samples": [[0, 1.9, None], + [1, 2.4, None], + [2, 2.6, 60 * 1024 * 1024], + [3, 2.2, 65 * 1024 * 1024], + [4, 2.2, 61 * 1024 * 1024], + [5, 1.9, None]], + } + processed = copy.deepcopy(original) + aggregator.agent_summary(processed) + + self.assertFalse('stdout' in processed) + + expected_stats = { + 'Ping ICMP': { + 'max': 2.6, + 'min': 1.9, + 'mean': 2.2, + 'unit': 'ms', + }, + 'TCP download': { + 'max': 65.0, + 'min': 60.0, + 'mean': 62.0, + 'unit': 'Mbps', + } + } + self.assertEqual(expected_stats, processed['stats']) + + expected_chart = [['time', 0, 1, 2, 3, 4, 5], + ['Ping ICMP', 1.9, 2.4, 2.6, 2.2, 2.2, 1.9], + ['TCP download', None, None, 60.0, 65.0, 61.0, None]] + self.assertEqual(expected_chart, processed['chart']) + + def test_iteration_summary(self): + aggregator = traffic.TrafficAggregator(None) + + original = { + 'results_per_agent': [ + { + 'agent': {'node': 'alpha'}, + 'stats': { + 'Ping ICMP': { + 'max': 2.6, + 'min': 1.9, + 'mean': 2.2, + 'unit': 'ms', + }, + 'TCP download': { + 'max': 65.0, + 'min': 60.0, + 'mean': 62.0, + 'unit': 'Mbps', + } + }, + 'chart': [['time', 0, 1, 2, 3, 4, 5], + ['Ping ICMP', 1.9, 2.4, 2.6, 2.2, 2.2, 1.9], + ['TCP download', None, None, 60.0, 65.0, 61.0, + None]] + }, + { + 'agent': {'node': 'beta'}, + 'stats': { + 'Ping ICMP': { + 'max': 3.6, + 'min': 2.9, + 'mean': 3.2, + 'unit': 'ms', + }, + 'TCP download': { + 'max': 75.0, + 'min': 70.0, + 'mean': 72.0, + 'unit': 'Mbps', + } + }, + 'chart': [['time', 0, 1, 2, 3, 4, 5], + ['Ping ICMP', 2.9, 3.4, 3.6, 3.2, 3.2, 2.9], + ['TCP download', None, None, 70.0, 75.0, 71.0, + None]] + }, + ] + } + processed = copy.deepcopy(original) + aggregator.iteration_summary(processed) + + expected_stats = { + 'Ping ICMP': { + 'max': 3.6, + 'min': 1.9, + 'mean': 2.7, + 'unit': 'ms', + }, + 'TCP download': { + 'max': 75.0, + 'min': 60.0, + 'mean': 67.0, + 'unit': 'Mbps', + } + } + + self.assertEqual(expected_stats, processed['stats'])