Improve running performance unit tests
* added decorator which run test multiple times and evaluate results * added helping functions to read and write statistics from tests * modified node performance tests to work faster Change-Id: I09c11ceeed42a747129239bcd8d90159d17c22ba
This commit is contained in:
parent
4b325a95b0
commit
f0b788eae0
@ -802,10 +802,14 @@ DUMP:
|
||||
|
||||
# performance tests settings
|
||||
PERFORMANCE_PROFILING_TESTS: 0
|
||||
PERFORMANCE_TESTS_RUN_NUMBER: 10
|
||||
PERFORMANCE_TESTS_TOLERANCE: 0.1
|
||||
LOAD_TESTS_PATHS:
|
||||
load_tests_base: "/tmp/nailgun_performance_tests/tests/"
|
||||
last_performance_test: "/tmp/nailgun_performance_tests/tests/last/"
|
||||
last_performance_test_run: "/tmp/nailgun_performance_tests/tests/last/run/"
|
||||
load_tests_results: "/tmp/nailgun_performance_tests/results/"
|
||||
load_previous_tests_results: "/tmp/nailgun_performance_tests/previous_results.json"
|
||||
|
||||
DNS_UPSTREAM: "8.8.8.8, 8.8.4.4"
|
||||
NTP_UPSTREAM: "0.pool.ntp.org, 1.pool.ntp.org"
|
||||
|
@ -18,6 +18,7 @@ import functools
|
||||
from nose import SkipTest
|
||||
import os.path
|
||||
import shutil
|
||||
import six
|
||||
import tarfile
|
||||
import time
|
||||
from timeit import Timer
|
||||
@ -60,6 +61,9 @@ class BaseLoadTestCase(BaseTestCase):
|
||||
cls.app = app.TestApp(build_app(db_driver=test_db_driver).
|
||||
wsgifunc(ProfilerMiddleware))
|
||||
syncdb()
|
||||
cls.tests_results = defaultdict(
|
||||
lambda: defaultdict(lambda: defaultdict(list)))
|
||||
cls.tests_stats = defaultdict(lambda: defaultdict(dict))
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
@ -87,10 +91,12 @@ class BaseLoadTestCase(BaseTestCase):
|
||||
tar.add(settings.LOAD_TESTS_PATHS['load_tests_base'])
|
||||
tar.close()
|
||||
shutil.rmtree(settings.LOAD_TESTS_PATHS['load_tests_base'])
|
||||
write_results(str(cls.__name__), cls.tests_stats)
|
||||
|
||||
def setUp(self):
|
||||
super(BaseLoadTestCase, self).setUp()
|
||||
self.start_time = time.time()
|
||||
self.call_number = 1
|
||||
|
||||
def tearDown(self):
|
||||
"""Copy all files from profiling from last test to separate folder.
|
||||
@ -103,7 +109,7 @@ class BaseLoadTestCase(BaseTestCase):
|
||||
settings.LOAD_TESTS_PATHS['load_tests_base'],
|
||||
'{exec_time}_{test_name}'.format(
|
||||
exec_time=exec_time,
|
||||
test_name=self.__str__().split()[0]))
|
||||
test_name=str(self).split()[0]))
|
||||
shutil.copytree(settings.LOAD_TESTS_PATHS['last_performance_test'],
|
||||
test_path)
|
||||
shutil.rmtree(settings.LOAD_TESTS_PATHS['last_performance_test'])
|
||||
@ -116,22 +122,23 @@ class BaseLoadTestCase(BaseTestCase):
|
||||
to_add = len(self.slowest_calls) < self.TOP_SLOWEST
|
||||
fastest = (sorted(self.slowest_calls.keys())[0]
|
||||
if len(self.slowest_calls) else None)
|
||||
test_name = str(self)
|
||||
request_name = str(func.args[0])
|
||||
name = ':'.join([test_name, request_name])
|
||||
if not to_add:
|
||||
if fastest < exec_time:
|
||||
del self.slowest_calls[fastest]
|
||||
to_add = True
|
||||
|
||||
if to_add:
|
||||
name = ':'.join([self.__str__(), str(func.args[0])])
|
||||
self.slowest_calls[exec_time].append(name)
|
||||
|
||||
self.assertGreater(
|
||||
max_exec_time,
|
||||
exec_time,
|
||||
"Execution time: {0} is greater, than expected: {1}".format(
|
||||
exec_time, max_exec_time
|
||||
)
|
||||
)
|
||||
test_results_d = self.tests_results[test_name][str(self.call_number)]
|
||||
test_results_d['results'].append(exec_time)
|
||||
if self.call_number == 1:
|
||||
test_results_d['request_name'] = request_name
|
||||
test_results_d['expect_time'] = max_exec_time
|
||||
self.call_number += 1
|
||||
|
||||
def get_handler(self, handler_name, handler_kwargs={}):
|
||||
resp = self.app.get(
|
||||
@ -223,6 +230,7 @@ class BaseUnitLoadTestCase(BaseLoadTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.start_time = time.time()
|
||||
self.call_number = 1
|
||||
|
||||
|
||||
class BaseIntegrationLoadTestCase(BaseLoadTestCase):
|
||||
@ -242,3 +250,114 @@ class BaseIntegrationLoadTestCase(BaseLoadTestCase):
|
||||
exec_time=exec_time,
|
||||
max_exec_time=self.total_time))
|
||||
self.db.remove()
|
||||
|
||||
|
||||
def copy_test_results(run_number):
|
||||
"""Copy test result from separate run to new directory.
|
||||
|
||||
:parameter run_number: run number, used in creating new directory
|
||||
"""
|
||||
path_to_write = os.path.join(
|
||||
settings.LOAD_TESTS_PATHS['last_performance_test'],
|
||||
'run{0}'.format(run_number))
|
||||
shutil.copytree(settings.LOAD_TESTS_PATHS['last_performance_test_run'],
|
||||
path_to_write)
|
||||
shutil.rmtree(settings.LOAD_TESTS_PATHS['last_performance_test_run'])
|
||||
|
||||
|
||||
def normalize(N, percentile):
|
||||
"""Normalize N and remove first and last percentile
|
||||
|
||||
:parameter N: is a list of values.
|
||||
:parameter percentile: a float value from 0.0 to 1.0.
|
||||
|
||||
:return: the percentile of the values
|
||||
"""
|
||||
if not N:
|
||||
return None
|
||||
k = (len(N) - 1) * percentile
|
||||
floor = int(k)
|
||||
return sorted(N)[floor:len(N) - floor - 1]
|
||||
|
||||
|
||||
def read_previous_results():
|
||||
"""Read results of previous run.
|
||||
|
||||
:return: dictionary of results if exist
|
||||
"""
|
||||
if os.path.exists(
|
||||
settings.LOAD_TESTS_PATHS['load_previous_tests_results']):
|
||||
with open(settings.LOAD_TESTS_PATHS['load_previous_tests_results'],
|
||||
'r') as results_file:
|
||||
results = jsonutils.load(results_file)
|
||||
return results
|
||||
return {}
|
||||
|
||||
|
||||
def write_results(test_class_name, results):
|
||||
"""Write tests results to file defined in settings.
|
||||
"""
|
||||
prev_results = read_previous_results()
|
||||
if test_class_name in prev_results:
|
||||
prev_results[test_class_name].update(results)
|
||||
else:
|
||||
prev_results[test_class_name] = results
|
||||
with open(settings.LOAD_TESTS_PATHS['load_previous_tests_results'],
|
||||
'w') as results_file:
|
||||
results_file.write(jsonutils.dumps(prev_results))
|
||||
|
||||
|
||||
def evaluate_unit_performance(f):
|
||||
@functools.wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# read results of previous correct run
|
||||
test = args[0]
|
||||
number_of_runs = settings.PERFORMANCE_TESTS_RUN_NUMBER
|
||||
|
||||
# run tests multiple time to get more exact results
|
||||
for run in six.moves.range(number_of_runs):
|
||||
f(*args, **kwargs)
|
||||
copy_test_results(run)
|
||||
# reset call number for unittests
|
||||
test.call_number = 1
|
||||
|
||||
compare_with_previous = False
|
||||
class_name = test.__class__.__name__
|
||||
previous_test_results = read_previous_results().get(class_name, {}).\
|
||||
get(str(test), {})
|
||||
current_rest_results = test.tests_results[str(test)]
|
||||
if len(previous_test_results) == len(current_rest_results):
|
||||
compare_with_previous = True
|
||||
|
||||
for call_number, results in six.iteritems(
|
||||
test.tests_results[str(test)]):
|
||||
request_name = results['request_name']
|
||||
|
||||
# normalize results and compute avg
|
||||
normalized = normalize(results['results'], 0.025)
|
||||
avg_time = sum(normalized) / len(normalized)
|
||||
|
||||
# check if previous results exists
|
||||
prev_time = None
|
||||
if compare_with_previous:
|
||||
if request_name in \
|
||||
previous_test_results[call_number]['request_name']:
|
||||
# we give some % (default 10%) of tolerance for previous
|
||||
# expected time
|
||||
prev_time = (
|
||||
previous_test_results[call_number]['expect_time'] *
|
||||
(1.0 + settings.PERFORMANCE_TESTS_TOLERANCE))
|
||||
expect_time = prev_time or results['expect_time']
|
||||
test.tests_results[str(test)]
|
||||
test.assertTrue(
|
||||
avg_time <= expect_time,
|
||||
"Average execution time: {exec_time} is greater, "
|
||||
"than expected: {max_exec_time}".format(
|
||||
exec_time=avg_time,
|
||||
max_exec_time=expect_time))
|
||||
test.tests_stats[str(test)][call_number]['request_name'] =\
|
||||
request_name
|
||||
test.tests_stats[str(test)][call_number]['expect_time'] =\
|
||||
avg_time
|
||||
|
||||
return wrapper
|
||||
|
@ -58,8 +58,8 @@ class Profiler(object):
|
||||
self.method = method
|
||||
self.handler_name = handler_name
|
||||
if not os.path.exists(settings.
|
||||
LOAD_TESTS_PATHS['last_performance_test']):
|
||||
os.makedirs(settings.LOAD_TESTS_PATHS['last_performance_test'])
|
||||
LOAD_TESTS_PATHS['last_performance_test_run']):
|
||||
os.makedirs(settings.LOAD_TESTS_PATHS['last_performance_test_run'])
|
||||
self.profiler = cProfile.Profile()
|
||||
self.profiler.enable()
|
||||
self.start = time.time()
|
||||
@ -67,7 +67,7 @@ class Profiler(object):
|
||||
def save_data(self):
|
||||
elapsed = time.time() - self.start
|
||||
pref_filename = os.path.join(
|
||||
settings.LOAD_TESTS_PATHS['last_performance_test'],
|
||||
settings.LOAD_TESTS_PATHS['last_performance_test_run'],
|
||||
'{method:s}.{handler_name:s}.{elapsed_time:.0f}ms.{t_time}.'.
|
||||
format(
|
||||
method=self.method,
|
||||
|
@ -21,6 +21,7 @@ from random import randint
|
||||
from nailgun.openstack.common import jsonutils
|
||||
from nailgun.test.base import fake_tasks
|
||||
from nailgun.test.performance.base import BaseUnitLoadTestCase
|
||||
from nailgun.test.performance.base import evaluate_unit_performance
|
||||
|
||||
|
||||
class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
@ -30,6 +31,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
super(ClusterOperationsLoadTest, cls).setUpClass()
|
||||
cls.env.create_nodes(cls.NODES_NUM, cluster_id=cls.cluster['id'])
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_cluster(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -38,6 +40,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_cluster(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -47,6 +50,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_default_deployment_info(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -55,6 +59,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func, 70)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_generated_data(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -63,6 +68,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_default_provisioning_info(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -71,6 +77,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_deployment_info(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -79,6 +86,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_deployment_info(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -88,6 +96,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_delete_deployment_info(self):
|
||||
func = functools.partial(
|
||||
self.delete_handler,
|
||||
@ -96,6 +105,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_provisioning_info(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -104,6 +114,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_provisioning_info(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -113,6 +124,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_delete_provisioning_info(self):
|
||||
func = functools.partial(
|
||||
self.delete_handler,
|
||||
@ -121,6 +133,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_clusters(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -138,11 +151,13 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
cluster_data
|
||||
)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_post_cluster(self):
|
||||
release = self.env.create_release()
|
||||
func = functools.partial(self.post_cluster, release.id)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_attributes(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -151,6 +166,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_attributes(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -160,6 +176,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_patch_attributes(self):
|
||||
func = functools.partial(
|
||||
self.patch_handler,
|
||||
@ -169,6 +186,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_default_attributes(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -177,6 +195,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_default_attributes(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -187,6 +206,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
self.check_time_exec(func)
|
||||
|
||||
@fake_tasks()
|
||||
@evaluate_unit_performance
|
||||
def test_put_provision_selected_nodes(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -197,6 +217,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
self.check_time_exec(func)
|
||||
|
||||
@fake_tasks()
|
||||
@evaluate_unit_performance
|
||||
def test_put_deploy_selected_nodes(self):
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
@ -207,6 +228,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
self.check_time_exec(func, 10)
|
||||
|
||||
@fake_tasks()
|
||||
@evaluate_unit_performance
|
||||
def test_put_stop_deployment(self):
|
||||
self.put_handler(
|
||||
'DeploySelectedNodes',
|
||||
@ -233,6 +255,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_nova_network_configuration(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -241,6 +264,7 @@ class ClusterOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_nova_network_configuration(self):
|
||||
resp = self.get_handler(
|
||||
'NovaNetworkConfigurationHandler',
|
||||
@ -269,6 +293,7 @@ class ClusterNeutronOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
mode='ha_compact')
|
||||
cls.env.create_nodes(cls.NODES_NUM, cluster_id=cls.cluster['id'])
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_neutron_network_configuration(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -277,6 +302,7 @@ class ClusterNeutronOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_neutron_network_configuration(self):
|
||||
resp = self.get_handler(
|
||||
'NeutronNetworkConfigurationHandler',
|
||||
@ -303,6 +329,7 @@ class ClusterNodeOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
handler_kwargs={'cluster_id': cluster_id}
|
||||
)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_post_node_assingment(self):
|
||||
cluster_id = self.env.create_cluster()['id']
|
||||
func = functools.partial(self.assign_node, cluster_id)
|
||||
@ -316,6 +343,7 @@ class ClusterNodeOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
handler_kwargs={'cluster_id': cluster_id}
|
||||
)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_post_node_unassingment(self):
|
||||
cluster_id = self.env.create_cluster()['id']
|
||||
func = functools.partial(self.unassign_node, cluster_id)
|
||||
|
@ -14,10 +14,14 @@
|
||||
# under the License.
|
||||
|
||||
import functools
|
||||
import random
|
||||
|
||||
from nailgun import consts
|
||||
from nailgun.openstack.common import jsonutils
|
||||
from nailgun.test.utils import random_string
|
||||
|
||||
from nailgun.test.performance.base import BaseUnitLoadTestCase
|
||||
from nailgun.test.performance.base import evaluate_unit_performance
|
||||
|
||||
|
||||
class NodeOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
@ -27,51 +31,54 @@ class NodeOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
super(NodeOperationsLoadTest, cls).setUpClass()
|
||||
cls.env.create_nodes(cls.NODES_NUM, cluster_id=cls.cluster['id'])
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_node(self):
|
||||
for node in self.env.nodes:
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
'NodeHandler',
|
||||
{'status': consts.NODE_STATUSES.ready},
|
||||
handler_kwargs={'obj_id': node.id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
func = functools.partial(
|
||||
self.put_handler,
|
||||
'NodeHandler',
|
||||
{'status': consts.NODE_STATUSES.ready,
|
||||
'name': random_string(20)},
|
||||
handler_kwargs={'obj_id': random.choice(self.env.nodes).id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_nodes(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeCollectionHandler',
|
||||
handler_kwargs={'cluster_id': self.cluster['id']}
|
||||
)
|
||||
self.check_time_exec(func, 5)
|
||||
self.check_time_exec(func, 6)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_defaults_disk(self):
|
||||
for node in self.env.nodes:
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeDefaultsDisksHandler',
|
||||
handler_kwargs={'node_id': node.id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeDefaultsDisksHandler',
|
||||
handler_kwargs={'node_id': random.choice(self.env.nodes).id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_volumes_info(self):
|
||||
for node in self.env.nodes:
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeVolumesInformationHandler',
|
||||
handler_kwargs={'node_id': node.id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeVolumesInformationHandler',
|
||||
handler_kwargs={'node_id': random.choice(self.env.nodes).id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_node_nic(self):
|
||||
for node in self.env.nodes:
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeNICsHandler',
|
||||
handler_kwargs={'node_id': node.id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
'NodeNICsHandler',
|
||||
handler_kwargs={'node_id': random.choice(self.env.nodes).id}
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_put_nodes_nics(self):
|
||||
nodes_list = []
|
||||
for node in self.env.nodes:
|
||||
@ -89,6 +96,7 @@ class NodeOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func, 14)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_get_allocation_stats(self):
|
||||
func = functools.partial(
|
||||
self.get_handler,
|
||||
@ -96,6 +104,7 @@ class NodeOperationsLoadTest(BaseUnitLoadTestCase):
|
||||
)
|
||||
self.check_time_exec(func)
|
||||
|
||||
@evaluate_unit_performance
|
||||
def test_add_delete_nodes(self):
|
||||
nodes_delete_list = []
|
||||
nodes_add_list = []
|
||||
|
35
nailgun/nailgun/test/utils.py
Normal file
35
nailgun/nailgun/test/utils.py
Normal file
@ -0,0 +1,35 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2015 Mirantis, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import random
|
||||
import six
|
||||
import string
|
||||
|
||||
|
||||
def random_string(lenght, charset=None):
|
||||
"""Returns a random string of the specified length
|
||||
|
||||
:param length: The length of the resulting string.
|
||||
:type lenght: int.
|
||||
:param charset: A set of characters to use for building random strings.
|
||||
:type charset: Iterable object. Default: ASCII letters and digits.
|
||||
:return: str
|
||||
|
||||
"""
|
||||
charset = charset or string.ascii_letters + string.digits
|
||||
|
||||
return ''.join([str(random.choice(charset))
|
||||
for i in six.moves.range(lenght)])
|
Loading…
Reference in New Issue
Block a user