Add subunit2sql CLI option to use run wall time
This commit adds a new CLI/config option for the subunit2sql cli command to use the run wall time when populating the run_time column of the runs table. This is often desirable because it gives you a better feeling for the duration of the run. However, since using the sum of individual tests executed during a run has been the default behavior so long we can't just switch it for backwards compat reasons. This commit thus adds an option to use the behavior if that's what you need to use. Change-Id: I08fe16604454f5888d56573e5e919e40ac82efd1
This commit is contained in:
parent
fd6c9ec3d3
commit
08da4131ed
@ -17,13 +17,16 @@ Properties:
|
||||
* **passes**: The total number of successful tests in the run.
|
||||
* **fails**: The total number of failed tests during the run.
|
||||
* **skips**: The total number of skipped tests during the run.
|
||||
* **run_time**: The sum of the duration of executed tests during the run. Note,
|
||||
this is not the time it necessarily took for the run to finish. For
|
||||
* **run_time**: The run_time for the run. By default the subunit2sql CLI
|
||||
command will use the sum of the duration of executed tests during the run.
|
||||
Note, this is not the time it necessarily took for the run to finish. For
|
||||
example, the time for setUpClass and tearDownClass (assuming the
|
||||
stream is from a python unittest run) would not be factored in. (as
|
||||
they aren't stored in the subunit stream) Also, if the tests are
|
||||
being run in parallel since this is just a raw sum this is not
|
||||
factored in.
|
||||
factored in. However there is a option for the subunit2sql CLI to specify
|
||||
using the run wall time which will take the duration from the first test's
|
||||
start time and the last test's end time.
|
||||
* **artifacts**: An optional link to where the logs or any other artifacts from
|
||||
the run are stored.
|
||||
* **run_at**: The time at which the run was stored in the DB.
|
||||
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
features:
|
||||
- A new option is added to the subunit2sql CLI command,
|
||||
--use_run_wall_time/-w, that is used to populate the run_time column with
|
||||
the wall time of the run instead of the default behavior which uses the
|
||||
sum of the individual test execution times
|
@ -34,11 +34,12 @@ def get_duration(start, end):
|
||||
class ReadSubunit(object):
|
||||
|
||||
def __init__(self, stream_file, attachments=False, attr_regex=None,
|
||||
targets=None):
|
||||
targets=None, use_wall_time=False):
|
||||
if targets is None:
|
||||
targets = []
|
||||
else:
|
||||
targets = targets[:]
|
||||
self.use_wall_time = use_wall_time
|
||||
self.stream_file = stream_file
|
||||
self.stream = subunit.ByteStreamToStreamResult(self.stream_file)
|
||||
starts = testtools.StreamResult()
|
||||
@ -137,6 +138,16 @@ class ReadSubunit(object):
|
||||
|
||||
def run_time(self):
|
||||
runtime = 0.0
|
||||
for name, data in self.results.items():
|
||||
runtime += get_duration(data['start_time'], data['end_time'])
|
||||
if self.use_wall_time:
|
||||
start_time = None
|
||||
stop_time = None
|
||||
for name, data in self.results.items():
|
||||
if not start_time or data['start_time'] < start_time:
|
||||
start_time = data['start_time']
|
||||
if not stop_time or data['end_time'] > stop_time:
|
||||
stop_time = data['end_time']
|
||||
runtime = get_duration(start_time, stop_time)
|
||||
else:
|
||||
for name, data in self.results.items():
|
||||
runtime += get_duration(data['start_time'], data['end_time'])
|
||||
return runtime
|
||||
|
@ -55,7 +55,11 @@ SHELL_OPTS = [
|
||||
cfg.StrOpt('run_at', default=None,
|
||||
help="The optional datetime string for the run was started, "
|
||||
"If one isn't provided the date and time of when "
|
||||
"subunit2sql is called will be used")
|
||||
"subunit2sql is called will be used"),
|
||||
cfg.BoolOpt('use_run_wall_time', default=False, short='w',
|
||||
help="When True the wall time of a run will be used for the "
|
||||
"run_time column in the runs table. By default the sum of"
|
||||
" the test executions are used instead."),
|
||||
]
|
||||
|
||||
_version_ = version.VersionInfo('subunit2sql').version_string()
|
||||
@ -227,13 +231,15 @@ def main():
|
||||
streams = [subunit.ReadSubunit(open(s, 'r'),
|
||||
attachments=CONF.store_attachments,
|
||||
attr_regex=CONF.attr_regex,
|
||||
targets=targets)
|
||||
targets=targets,
|
||||
use_wall_time=CONF.use_run_wall_time)
|
||||
for s in CONF.subunit_files]
|
||||
else:
|
||||
streams = [subunit.ReadSubunit(sys.stdin,
|
||||
attachments=CONF.store_attachments,
|
||||
attr_regex=CONF.attr_regex,
|
||||
targets=targets)]
|
||||
targets=targets,
|
||||
use_wall_time=CONF.use_run_wall_time)]
|
||||
for stream in streams:
|
||||
process_results(stream.get_results())
|
||||
|
||||
|
@ -86,6 +86,31 @@ class TestReadSubunit(base.TestCase):
|
||||
runtime = fake_subunit.run_time()
|
||||
self.assertEqual(runtime, 5000.0)
|
||||
|
||||
def test_wall_run_time(self):
|
||||
fake_subunit = subunit.ReadSubunit(mock.MagicMock(),
|
||||
use_wall_time=True)
|
||||
fake_results = {}
|
||||
start_time = datetime.datetime(1914, 6, 28, 10, 45, 0)
|
||||
stop_time = datetime.datetime(1914, 6, 28, 10, 45, 50)
|
||||
fifty_sec_run_result = {
|
||||
'start_time': start_time,
|
||||
'end_time': stop_time,
|
||||
}
|
||||
fake_results['first'] = fifty_sec_run_result
|
||||
for num in range(100):
|
||||
test_name = 'test_fake_' + str(num)
|
||||
start_time = start_time + datetime.timedelta(minutes=1)
|
||||
stop_time = stop_time + datetime.timedelta(minutes=1)
|
||||
fake_result = {
|
||||
'start_time': start_time,
|
||||
'end_time': stop_time,
|
||||
}
|
||||
fake_results[test_name] = fake_result
|
||||
fake_subunit.results = fake_results
|
||||
runtime = fake_subunit.run_time()
|
||||
# Wall time should be (60 * 100) + 50
|
||||
self.assertEqual(runtime, 6050.0)
|
||||
|
||||
def test_parse_outcome(self):
|
||||
fake_subunit = subunit.ReadSubunit(mock.MagicMock())
|
||||
|
||||
|
@ -159,7 +159,8 @@ class TestMain(base.TestCase):
|
||||
read_subunit_mock.assert_called_once_with(sys.stdin,
|
||||
attachments=False,
|
||||
attr_regex='\[(.*)\]',
|
||||
targets=[])
|
||||
targets=[],
|
||||
use_wall_time=False)
|
||||
process_results_mock.assert_called_once_with(fake_get_results)
|
||||
|
||||
@mock.patch('subunit2sql.read_subunit.ReadSubunit')
|
||||
@ -184,7 +185,8 @@ class TestMain(base.TestCase):
|
||||
read_subunit_mock.assert_called_with(mock.ANY,
|
||||
attachments=False,
|
||||
attr_regex='\[(.*)\]',
|
||||
targets=[])
|
||||
targets=[],
|
||||
use_wall_time=False)
|
||||
self.assertEqual(2, len(read_subunit_mock.call_args_list))
|
||||
file_1 = read_subunit_mock.call_args_list[0][0][0]
|
||||
file_1.seek(0)
|
||||
@ -214,7 +216,8 @@ class TestMain(base.TestCase):
|
||||
shell.main()
|
||||
read_subunit_mock.assert_called_once_with(
|
||||
sys.stdin, attachments=False, attr_regex='\[(.*)\]',
|
||||
targets=[mock.sentinel.extension])
|
||||
targets=[mock.sentinel.extension],
|
||||
use_wall_time=False)
|
||||
process_results_mock.assert_called_once_with(fake_get_results)
|
||||
|
||||
|
||||
@ -715,4 +718,4 @@ class TestProcessResults(base.TestCase):
|
||||
self.db_api_mock.add_test_run_metadata.assert_has_calls([
|
||||
mock.call(fake_results['test1']['metadata'], fake_db_test_run_id,
|
||||
self.fake_session)])
|
||||
self.fake_session.close.assert_called_once()
|
||||
self.fake_session.close.assert_called_once()
|
||||
|
Loading…
x
Reference in New Issue
Block a user