Added netperf-wrapper test suite and aggregate report

Change-Id: Iaaa504de674ada557ff3fbe1331f26e1d5de8650
This commit is contained in:
Ilya Shakhat 2015-03-13 19:39:17 +03:00
parent db2076bed2
commit 9be6554a94
11 changed files with 407 additions and 57 deletions

View File

@ -13,10 +13,18 @@ execution:
class: iperf_graph
time: 60
-
title: Iperf UDP 5 threads
title: TCP download
class: netperf_wrapper
method: tcp_download
-
title: TCP bi-directional
class: netperf_wrapper
method: tcp_bidirectional
-
title: Iperf UDP 8 threads
class: iperf
udp: 1
threads: 5
threads: 8
-
title: Netperf TCP_STREAM
class: netperf

View File

@ -19,6 +19,7 @@ from shaker.engine.aggregators import traffic
AGGREGATORS = {
'iperf_graph': traffic.TrafficAggregator,
'netperf_wrapper': traffic.TrafficAggregator,
'_default': base.BaseAggregator,
}

View File

@ -18,6 +18,9 @@ class BaseAggregator(object):
def __init__(self, test_definition):
self.test_definition = test_definition
def test_summary(self, test_data):
pass
def iteration_summary(self, iteration_data):
pass

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import uuid
from oslo_log import log as logging
@ -26,39 +27,75 @@ LOG = logging.getLogger(__name__)
def mean(array):
if not array:
return 0
array = [x for x in array if x]
return sum(array) / len(array)
def safe_max(array):
return max(x for x in array if x)
def safe_min(array):
return min(x for x in array if x)
class TrafficAggregator(base.BaseAggregator):
def __init__(self, test_definition):
super(TrafficAggregator, self).__init__(test_definition)
def test_summary(self, test_data):
chart = []
xs = []
mean_v = collections.defaultdict(list)
for iteration in test_data['results_per_iteration']:
xs.append(len(iteration['results_per_agent']))
for k, v in iteration['stats'].items():
mean_v[k].append(v['mean'])
for k in mean_v.keys():
chart.append(['Mean %s' % k] + mean_v[k])
chart.append(['x'] + xs)
test_data.update({
'chart': chart,
})
def iteration_summary(self, iteration_data):
max_v = []
min_v = []
mean_v = []
max_v = collections.defaultdict(list)
min_v = collections.defaultdict(list)
mean_v = collections.defaultdict(list)
unit_v = dict()
chart = []
nodes = []
for one in iteration_data['results_per_agent']:
nodes.append(one['agent']['node'])
max_v.append(one['stats']['max'])
min_v.append(one['stats']['min'])
mean_v.append(one['stats']['mean'])
chart += one['chart']
for k, v in one['stats'].items():
max_v[k].append(v['max'])
min_v[k].append(v['min'])
mean_v[k].append(v['mean'])
unit_v[k] = v['unit']
stats = {}
node_chart = [['x'] + nodes]
for k in max_v.keys():
stats[k] = dict(max=max(max_v[k]),
min=min(min_v[k]),
mean=mean(mean_v[k]),
unit=unit_v[k])
node_chart.append(['Mean %s' % k] + mean_v[k])
node_chart.append(['Max %s' % k] + max_v[k])
node_chart.append(['Min %s' % k] + min_v[k])
iteration_data.update({
'stats': {
'max': max(max_v),
'min': min(min_v),
'mean': mean(mean_v),
},
'agent_chart': {
'uuid': uuid.uuid4(),
'data': [
['x'] + nodes,
['min'] + min_v,
['mean'] + mean_v,
['max'] + max_v,
]
}
'uuid': uuid.uuid4(),
'stats': stats,
'x-chart': chart,
'node_chart': node_chart,
})
def agent_summary(self, agent_data):
@ -66,7 +103,8 @@ class TrafficAggregator(base.BaseAggregator):
for idx, item_meta in enumerate(agent_data['meta']):
if item_meta[1] == 'bps':
for row in agent_data['samples']:
row[idx] = float(row[idx]) / 1024 / 1024
if row[idx]:
row[idx] = float(row[idx]) / 1024 / 1024
item_meta[1] = 'Mbps'
# calculate stats
@ -76,12 +114,15 @@ class TrafficAggregator(base.BaseAggregator):
for idx, item_meta in enumerate(agent_data['meta']):
column = [row[idx] for row in agent_data['samples']]
if item_meta[1] == 'Mbps':
agent_data['stats']['max'] = max(column)
agent_data['stats']['min'] = min(column)
agent_data['stats']['mean'] = mean(column)
agent_data['chart'].append([item_meta[0]] + column)
item_title = item_meta[0]
if item_title != 'time':
agent_data['stats'][item_title] = {
'max': safe_max(column),
'min': safe_min(column),
'mean': mean(column),
'unit': item_meta[1],
}
agent_data['chart'].append([item_title] + column)
# drop stdout
del agent_data['stdout']

View File

@ -63,5 +63,5 @@ class IperfGraphExecutor(IperfExecutor):
samples.pop() # the last line is summary, remove it
result['samples'] = samples
result['meta'] = [['time', 'sec'], ['bandwidth', 'bps']]
result['meta'] = [['time', 's'], ['bandwidth', 'bps']]
return result

View File

@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
from shaker.engine.executors import base
@ -27,7 +29,28 @@ class NetperfExecutor(base.BaseExecutor):
class NetperfWrapperExecutor(base.BaseExecutor):
def get_command(self):
target_ip = self.agent['slave']['ip']
return ('netperf-wrapper -H %(ip)s -f stats %(method)s' %
dict(ip=target_ip,
method=self.test_definition['method']))
cmd = base.CommandLine('netperf-wrapper')
cmd.add('-H', self.agent['slave']['ip'])
cmd.add('-l', self.test_definition.get('time') or 60)
cmd.add('-s', self.test_definition.get('interval') or 1)
cmd.add('-f', 'csv')
cmd.add(self.test_definition.get('method') or 'tcp_download')
return cmd.make()
def process_reply(self, message):
result = super(NetperfWrapperExecutor, self).process_reply(message)
data_stream = csv.reader(result['stdout'].split('\n'))
header = next(data_stream)
meta = [['time', 's']]
for el in header[1:]:
if el.find('Ping') >= 0:
meta.append([el, 'ms'])
else:
meta.append([el, 'Mbps'])
result['meta'] = meta
result['samples'] = [[(float(x) if x else None) for x in row]
for row in data_stream if row]
return result

View File

@ -40,6 +40,8 @@ def calculate_stats(data):
aggregator.iteration_summary(iteration_result)
aggregator.test_summary(test_result)
def generate_report(report_template, report_filename, data):
LOG.debug('Generating report, template: %s, output: %s',

View File

@ -360,13 +360,14 @@
{% endif %}
</a></li>
{% set iterations = test.results_per_iteration|length %}
{% for result_per_iteration in test.results_per_iteration %}
{% set cnt = result_per_iteration.agents|length %}
<li class="col-md-offset-1"><a href="#test-{{ test.definition.uuid }}-{{ cnt }}" data-toggle="tab">
{% if cnt == 1 %}
full iteration
{% if iterations == 1 %}
Details
{% else %}
{{ cnt }} threads
{{ cnt }} Threads
{% endif %}
</a></li>
{% endfor %}
@ -413,6 +414,31 @@
<div id="test-{{ test.definition.uuid }}" class="tab-pane">
<h3>Test Case Specification</h3>
<pre>{{ test.definition|yaml }}</pre>
{# show summary only of number of iterations > 1 #}
{% set iteration_cnt = test.results_per_iteration|length %}
{% if iteration_cnt > 1 %}
{% if test.chart %}
<div id="chart-{{ test.definition.uuid }}"></div>
<script type="application/javascript">
$(document).ready(function () {
c3.generate({
bindto: '#chart-{{ test.definition.uuid }}',
data: {
x: 'x',
columns: {{ test.chart|json }},
type: 'step'
},
axis: {
x: { label: '# threads' },
y: { label: 'Bandwidth, Mbits/s', min: 0 }
}
});
});
</script>
{% endif %}
{% endif %}
</div>
{% for result_per_iteration in test.results_per_iteration %}
@ -421,26 +447,57 @@
<h3>Iteration Summary</h3>
{# show summary only of number of agents > 1 #}
{% set agent_cnt = result_per_iteration.results_per_agent|length %}
{% if agent_cnt > 1 %}
{% if result_per_iteration.stats %}
<h5>Traffic stats</h5>
<dl class="dl-horizontal">
<dt>Max bandwidth</dt><dd>{{ result_per_iteration.stats.max|round(2) }} Mbits/s</dd>
<dt>Min bandwidth</dt><dd>{{ result_per_iteration.stats.min|round(2) }} Mbits/s</dd>
<dt>Mean bandwidth</dt><dd>{{ result_per_iteration.stats.mean|round(2) }} Mbits/s</dd>
</dl>
<div class="row">
{% for stat_title, stat_values in result_per_iteration.stats.items() %}
<div class="col-md-4">
<h5>Stats for {{ stat_title }}</h5>
<dl class="dl-horizontal">
<dt>Max</dt><dd>{{ stat_values.max|round(2) }} {{ stat_values.unit }}</dd>
<dt>Min</dt><dd>{{ stat_values.min|round(2) }} {{ stat_values.unit }}</dd>
<dt>Mean</dt><dd>{{ stat_values.mean|round(2) }} {{ stat_values.unit }}</dd>
</dl>
</div>
{% endfor %}
</div>
{% endif %}
{% if result_per_iteration.agent_chart %}
<h5>Agent Chart</h5>
<div id="chart-{{ result_per_iteration.agent_chart.uuid }}"></div>
{% if result_per_iteration.chart %}
<div id="chart-{{ result_per_iteration.uuid }}"></div>
<script type="application/javascript">
$(document).ready(function () {
c3.generate({
bindto: '#chart-{{ result_per_iteration.uuid }}',
data: {
x: 'time',
columns: {{ result_per_iteration.chart|json }},
types: { bandwidth: 'area' }
},
axis: {
x: { label: 'time' },
y: { label: 'Bandwidth, Mbits/s', min: 0 }
}
});
});
</script>
{% endif %}
{% if result_per_iteration.node_chart %}
<h5>Per-node stats</h5>
<div id="chart-{{ result_per_iteration.uuid }}-node"></div>
<script type="application/javascript">
$(document).ready(function () {
c3.generate({
bindto: '#chart-{{ result_per_iteration.agent_chart.uuid }}',
bindto: '#chart-{{ result_per_iteration.uuid }}-node',
data: {
x: 'x',
columns: {{ result_per_iteration.agent_chart.data|json }},
type: 'area-step',
columns: {{ result_per_iteration.node_chart|json }},
type: 'step',
order: null
},
axis: {
@ -450,21 +507,31 @@
});
</script>
{% endif %}
{% endif %}
{#### PER-AGENT DATA ####}
{% for result_per_agent in result_per_iteration.results_per_agent %}
<h4>Agent {{ result_per_agent.agent.id }}
({{ result_per_agent.agent.ip }})</h4>
({{ result_per_agent.agent.ip }}, {{ result_per_agent.agent.node }})</h4>
{% if result_per_agent.samples %}
<h5>Traffic stats</h5>
{% if result_per_agent.stats %}
<div class="row">
{% for stat_title, stat_values in result_per_agent.stats.items() %}
<div class="col-md-4">
<h5>Stats for {{ stat_title }}</h5>
<dl class="dl-horizontal">
<dt>Max bandwidth</dt><dd>{{ result_per_agent.stats.max|round(2) }} Mbits/s</dd>
<dt>Min bandwidth</dt><dd>{{ result_per_agent.stats.min|round(2) }} Mbits/s</dd>
<dt>Mean bandwidth</dt><dd>{{ result_per_agent.stats.mean|round(2) }} Mbits/s</dd>
</dl>
<dl class="dl-horizontal">
<dt>Max</dt><dd>{{ stat_values.max|round(2) }} {{ stat_values.unit }}</dd>
<dt>Min</dt><dd>{{ stat_values.min|round(2) }} {{ stat_values.unit }}</dd>
<dt>Mean</dt><dd>{{ stat_values.mean|round(2) }} {{ stat_values.unit }}</dd>
</dl>
</div>
{% endfor %}
</div>
{% endif %}
{% if result_per_agent.chart %}
<div id="chart-{{ result_per_agent.uuid }}"></div>
<script type="application/javascript">
$(document).ready(function () {
@ -482,6 +549,7 @@
});
});
</script>
{% endif %}
{% endif %}
{% if result_per_agent.command %}

View File

@ -58,7 +58,7 @@ class TestIperfGraphExecutor(testtools.TestCase):
[3.0, 405798912],
],
'meta': [
['time', 'sec'], ['bandwidth', 'bps']
['time', 's'], ['bandwidth', 'bps']
]
}
reply = executor.process_reply(message)

View File

@ -0,0 +1,73 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import testtools
from shaker.engine.executors import netperf
IP = '10.0.0.10'
AGENT = {'slave': {'ip': IP}}
class TestNetperfWrapperExecutor(testtools.TestCase):
def test_get_command(self):
executor = netperf.NetperfWrapperExecutor({}, AGENT)
expected = 'netperf-wrapper -H %s -l 60 -s 1 -f csv tcp_download' % IP
self.assertEqual(expected, executor.get_command())
def test_get_command_with_params(self):
executor = netperf.NetperfWrapperExecutor(
dict(method='ping', time=10, interval=0.5), AGENT)
expected = 'netperf-wrapper -H %s -l 10 -s 0.5 -f csv ping' % IP
self.assertEqual(expected, executor.get_command())
def test_process_reply(self):
executor = netperf.NetperfWrapperExecutor({}, AGENT)
message = {
'stdout': """tcp_download,Ping ICMP,TCP download
0.0,0.09,
2.0,0.0800211283506,
4.0,0.0602545096056,
6.0,0.0502416561724,28555.9
8.0,0.05,25341.9871721
10.0,0.0500947171761,30486.4518264
12.0,0.0603484557656,
14.0,0.0603987445198,
"""
}
expected = {
'samples': [
[0.0, 0.09, None],
[2.0, 0.0800211283506, None],
[4.0, 0.0602545096056, None],
[6.0, 0.0502416561724, 28555.9],
[8.0, 0.05, 25341.9871721],
[10.0, 0.0500947171761, 30486.4518264],
[12.0, 0.0603484557656, None],
[14.0, 0.0603987445198, None],
],
'meta': [
['time', 's'], ['Ping ICMP', 'ms'], ['TCP download', 'Mbps'],
]
}
reply = executor.process_reply(message)
self.assertEqual(expected['samples'], reply['samples'],
message='Samples data')
self.assertEqual(expected['meta'], reply['meta'],
message='Metadata')

View File

@ -0,0 +1,131 @@
# Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import testtools
from shaker.engine.aggregators import traffic
class TestTrafficAggregator(testtools.TestCase):
def test_agent_summary(self):
aggregator = traffic.TrafficAggregator(None)
original = {
"stderr": "", "stdout": '',
"meta": [["time", "s"], ["Ping ICMP", "ms"],
["TCP download", "bps"]],
"samples": [[0, 1.9, None],
[1, 2.4, None],
[2, 2.6, 60 * 1024 * 1024],
[3, 2.2, 65 * 1024 * 1024],
[4, 2.2, 61 * 1024 * 1024],
[5, 1.9, None]],
}
processed = copy.deepcopy(original)
aggregator.agent_summary(processed)
self.assertFalse('stdout' in processed)
expected_stats = {
'Ping ICMP': {
'max': 2.6,
'min': 1.9,
'mean': 2.2,
'unit': 'ms',
},
'TCP download': {
'max': 65.0,
'min': 60.0,
'mean': 62.0,
'unit': 'Mbps',
}
}
self.assertEqual(expected_stats, processed['stats'])
expected_chart = [['time', 0, 1, 2, 3, 4, 5],
['Ping ICMP', 1.9, 2.4, 2.6, 2.2, 2.2, 1.9],
['TCP download', None, None, 60.0, 65.0, 61.0, None]]
self.assertEqual(expected_chart, processed['chart'])
def test_iteration_summary(self):
aggregator = traffic.TrafficAggregator(None)
original = {
'results_per_agent': [
{
'agent': {'node': 'alpha'},
'stats': {
'Ping ICMP': {
'max': 2.6,
'min': 1.9,
'mean': 2.2,
'unit': 'ms',
},
'TCP download': {
'max': 65.0,
'min': 60.0,
'mean': 62.0,
'unit': 'Mbps',
}
},
'chart': [['time', 0, 1, 2, 3, 4, 5],
['Ping ICMP', 1.9, 2.4, 2.6, 2.2, 2.2, 1.9],
['TCP download', None, None, 60.0, 65.0, 61.0,
None]]
},
{
'agent': {'node': 'beta'},
'stats': {
'Ping ICMP': {
'max': 3.6,
'min': 2.9,
'mean': 3.2,
'unit': 'ms',
},
'TCP download': {
'max': 75.0,
'min': 70.0,
'mean': 72.0,
'unit': 'Mbps',
}
},
'chart': [['time', 0, 1, 2, 3, 4, 5],
['Ping ICMP', 2.9, 3.4, 3.6, 3.2, 3.2, 2.9],
['TCP download', None, None, 70.0, 75.0, 71.0,
None]]
},
]
}
processed = copy.deepcopy(original)
aggregator.iteration_summary(processed)
expected_stats = {
'Ping ICMP': {
'max': 3.6,
'min': 1.9,
'mean': 2.7,
'unit': 'ms',
},
'TCP download': {
'max': 75.0,
'min': 60.0,
'mean': 67.0,
'unit': 'Mbps',
}
}
self.assertEqual(expected_stats, processed['stats'])