Added base classes for Load Tests and profiler

* added nailgun profiler
* added base clases for load test
* added integration tests

Tests are run only when teh are set in settings
(PERFORMANCE_PROFILING_TESTS) is set on True, also profiler runs
only when this variable is set. All artifacts from profiler are
writen in /tmp/nailgun_load_tests/results/, directory can be
changed in settings.yaml.

DocImpact
Change-Id: I2be46c2a0a51544b12a1d9f2a840d4edc3ede2c4
Blueprint: 100-nodes-support
This commit is contained in:
Kamil Sambor 2014-09-25 09:35:50 +02:00
parent 1ca63c355a
commit cabab8ab7b
8 changed files with 416 additions and 6 deletions

View File

@ -16,19 +16,15 @@
from datetime import datetime from datetime import datetime
from decorator import decorator from decorator import decorator
from sqlalchemy import exc as sa_exc from sqlalchemy import exc as sa_exc
import web import web
from nailgun.api.v1.validators.base import BasicValidator from nailgun.api.v1.validators.base import BasicValidator
from nailgun.db import db from nailgun.db import db
from nailgun.objects.serializers.base import BasicSerializer
from nailgun.errors import errors from nailgun.errors import errors
from nailgun.logger import logger from nailgun.logger import logger
from nailgun import objects from nailgun import objects
from nailgun.objects.serializers.base import BasicSerializer
from nailgun.openstack.common import jsonutils from nailgun.openstack.common import jsonutils
@ -89,6 +85,7 @@ def load_db_driver(handler):
@decorator @decorator
def content_json(func, *args, **kwargs): def content_json(func, *args, **kwargs):
try: try:
data = func(*args, **kwargs) data = func(*args, **kwargs)
except web.notmodified: except web.notmodified:
@ -99,6 +96,7 @@ def content_json(func, *args, **kwargs):
http_error.data = build_json_response(http_error.data) http_error.data = build_json_response(http_error.data)
raise raise
web.header('Content-Type', 'application/json') web.header('Content-Type', 'application/json')
return build_json_response(data) return build_json_response(data)
@ -129,7 +127,7 @@ class BaseHandler(object):
:param status_code: the HTTP status code as an integer :param status_code: the HTTP status code as an integer
:param message: the message to send along, as a string :param message: the message to send along, as a string
:param headers: the headeers to send along, as a dictionary :param headers: the headers to send along, as a dictionary
""" """
class _nocontent(web.HTTPError): class _nocontent(web.HTTPError):
message = 'No Content' message = 'No Content'

View File

@ -762,3 +762,10 @@ DUMP:
- type: command - type: command
command: ceph osd tree command: ceph osd tree
to_file: ceph_osd_tree.txt to_file: ceph_osd_tree.txt
# performance tests settings
PERFORMANCE_PROFILING_TESTS: 0
LOAD_TESTS_PATHS:
load_tests_base: "/tmp/nailgun_performance_tests/tests/"
last_performance_test: "/tmp/nailgun_performance_tests/tests/last/"
load_tests_results: "/tmp/nailgun_performance_tests/results/"

View File

@ -0,0 +1,243 @@
# -*- coding: utf-8 -*-
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import functools
from nose import SkipTest
import os.path
import shutil
import tarfile
import time
from timeit import Timer
from webtest import app
from nailgun.app import build_app
from nailgun.db import db
from nailgun.db import flush
from nailgun.db import syncdb
from nailgun.openstack.common import jsonutils
from nailgun.settings import settings
from nailgun.test.base import BaseTestCase
from nailgun.test.base import Environment
from nailgun.test.base import reverse
from nailgun.test.base import test_db_driver
from nailgun.test.performance.profiler import ProfilerMiddleware
class BaseLoadTestCase(BaseTestCase):
"""All load test are long and test suits should be run only in purpose.
"""
# Number of nodes will be added during the test
NODES_NUM = 100
# Maximal allowed execution time of tested handler call
MAX_EXEC_TIME = 8
# Maximal allowed slowest calls from TestCase
TOP_SLOWEST = 10
# Values needed for creating list of the slowest calls
slowest_calls = defaultdict(list)
@classmethod
def setUpClass(cls):
if not settings.PERFORMANCE_PROFILING_TESTS:
raise SkipTest("PERFORMANCE_PROFILING_TESTS in settings.yaml"
"is not set")
if os.path.exists(settings.LOAD_TESTS_PATHS['load_tests_base']):
shutil.rmtree(settings.LOAD_TESTS_PATHS['load_tests_base'])
os.makedirs(settings.LOAD_TESTS_PATHS['load_tests_base'])
cls.app = app.TestApp(build_app(db_driver=test_db_driver).
wsgifunc(ProfilerMiddleware))
syncdb()
@classmethod
def tearDownClass(cls):
"""Packs all the files from the profiling.
"""
if not os.path.exists(settings.LOAD_TESTS_PATHS['load_tests_results']):
os.makedirs(settings.LOAD_TESTS_PATHS['load_tests_results'])
if os.path.exists(settings.LOAD_TESTS_PATHS['load_tests_base']):
# Write list of the slowest calls
file_path = (settings.LOAD_TESTS_PATHS['load_tests_base'] +
'slowest_calls.txt')
with file(file_path, 'w') as file_o:
exec_times = sorted(cls.slowest_calls.keys(), reverse=True)
for exec_time in exec_times:
line = '\t'.join([str(exec_time),
'|'.join(cls.slowest_calls[exec_time]),
'\n'])
file_o.write(line)
test_result_name = os.path.join(
settings.LOAD_TESTS_PATHS['load_tests_results'],
'{name:s}_{timestamp}.tar.gz'.format(name=cls.__name__,
timestamp=time.time()))
tar = tarfile.open(test_result_name, "w:gz")
tar.add(settings.LOAD_TESTS_PATHS['load_tests_base'])
tar.close()
shutil.rmtree(settings.LOAD_TESTS_PATHS['load_tests_base'])
def setUp(self):
super(BaseLoadTestCase, self).setUp()
self.start_time = time.time()
def tearDown(self):
"""Copy all files from profiling from last test to separate folder.
Folder name starts from execution time of the test, it will help to
find data from tests that test bottlenecks
"""
self.stop_time = time.time()
exec_time = self.stop_time - self.start_time
test_path = os.path.join(
settings.LOAD_TESTS_PATHS['load_tests_base'],
'{exec_time}_{test_name}'.format(
exec_time=exec_time,
test_name=self.__str__().split()[0]))
shutil.copytree(settings.LOAD_TESTS_PATHS['last_performance_test'],
test_path)
shutil.rmtree(settings.LOAD_TESTS_PATHS['last_performance_test'])
def check_time_exec(self, func, max_exec_time=None):
max_exec_time = max_exec_time or self.MAX_EXEC_TIME
exec_time = Timer(func).timeit(number=1)
# Checking whether the call should be to the slowest one
to_add = len(self.slowest_calls) < self.TOP_SLOWEST
fastest = (sorted(self.slowest_calls.keys())[0]
if len(self.slowest_calls) else None)
if not to_add:
if fastest < exec_time:
del self.slowest_calls[fastest]
to_add = True
if to_add:
name = ':'.join([self.__str__(), str(func.args[0])])
self.slowest_calls[exec_time].append(name)
self.assertGreater(
max_exec_time,
exec_time,
"Execution time: {0} is greater, than expected: {1}".format(
exec_time, max_exec_time
)
)
def get_handler(self, handler_name, handler_kwargs={}):
resp = self.app.get(
reverse(handler_name, kwargs=handler_kwargs),
headers=self.default_headers
)
self.assertEqual(200, resp.status_code)
return resp
def put_handler(self, handler_name, data, handler_kwargs={}):
resp = self.app.put(
reverse(handler_name, kwargs=handler_kwargs),
jsonutils.dumps(data),
headers=self.default_headers
)
self.assertIn(resp.status_code, (200, 202))
return resp
def patch_handler(self, handler_name, request_params, handler_kwargs={}):
resp = self.app.patch(
reverse(handler_name, kwargs=handler_kwargs),
params=jsonutils.dumps(request_params),
headers=self.default_headers
)
self.assertIn(resp.status_code, (200, 202))
return resp
def post_handler(self, handler_name, obj_data, handler_kwargs={}):
resp = self.app.post(
reverse(handler_name, kwargs=handler_kwargs),
jsonutils.dumps(obj_data),
headers=self.default_headers
)
self.assertIn(resp.status_code, (200, 201))
return resp
def delete_handler(self, handler_name, handler_kwargs={}):
resp = self.app.delete(
reverse(handler_name, kwargs=handler_kwargs),
headers=self.default_headers
)
self.assertIn(resp.status_code, (200, 202, 204))
return resp
def provision(self, cluster_id, nodes_ids):
url = reverse(
'ProvisionSelectedNodes',
kwargs={'cluster_id': cluster_id}) + \
'?nodes={0}'.format(','.join(nodes_ids))
func = functools.partial(self.app.put,
url,
'',
headers=self.default_headers,
expect_errors=True)
self.check_time_exec(func, 90)
def deployment(self, cluster_id, nodes_ids):
url = reverse(
'DeploySelectedNodes',
kwargs={'cluster_id': cluster_id}) + \
'?nodes={0}'.format(','.join(nodes_ids))
func = functools.partial(self.app.put,
url,
'',
headers=self.default_headers,
expect_errors=True)
self.check_time_exec(func, 90)
class BaseUnitLoadTestCase(BaseLoadTestCase):
@classmethod
def setUpClass(cls):
super(BaseUnitLoadTestCase, cls).setUpClass()
cls.app = app.TestApp(
build_app(db_driver=test_db_driver).wsgifunc(ProfilerMiddleware)
)
syncdb()
cls.db = db
flush()
cls.env = Environment(app=cls.app, session=cls.db)
cls.env.upload_fixtures(cls.fixtures)
cls.cluster = cls.env.create_cluster(api=False)
@classmethod
def tearDownClass(cls):
super(BaseUnitLoadTestCase, cls).tearDownClass()
cls.db.remove()
def setUp(self):
self.start_time = time.time()
class BaseIntegrationLoadTestCase(BaseLoadTestCase):
# max execution time of whole test
MAX_TOTAL_EXEC_TIME = 230
def setUp(self):
super(BaseIntegrationLoadTestCase, self).setUp()
self.total_time = self.MAX_TOTAL_EXEC_TIME
def tearDown(self):
super(BaseIntegrationLoadTestCase, self).tearDown()
exec_time = self.stop_time - self.start_time
self.assertTrue(exec_time <= self.total_time,
"Execution time: {exec_time} is greater, "
"than expected: {max_exec_time}".format(
exec_time=exec_time,
max_exec_time=self.total_time))

View File

@ -0,0 +1,52 @@
# -*- coding: utf-8 -*-
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mock import patch
from nailgun.test.base import fake_tasks
from nailgun.test.performance.base import BaseIntegrationLoadTestCase
class IntegrationClusterTests(BaseIntegrationLoadTestCase):
MAX_EXEC_TIME = 60
def setUp(self):
super(IntegrationClusterTests, self).setUp()
self.env.create_nodes(self.NODES_NUM, api=True)
self.cluster = self.env.create_cluster(api=False)
controllers = 3
created_controllers = 0
nodes = []
self.nodes_ids = []
for node in self.env.nodes:
if created_controllers < controllers:
nodes.append({'id': node.id,
'role': ['controller'],
'cluster': self.cluster['id'],
'pending_addition': True})
created_controllers += 1
else:
nodes.append({'id': node.id,
'role': ['compute'],
'cluster': self.cluster['id'],
'pending_addition': True})
self.nodes_ids.append(str(node.id))
self.put_handler('NodeCollectionHandler', nodes)
@fake_tasks(fake_rpc=False, mock_rpc=False)
@patch('nailgun.rpc.cast')
def test_deploy(self, mock_rpc):
self.provision(self.cluster['id'], self.nodes_ids)
self.deployment(self.cluster['id'], self.nodes_ids)

View File

@ -0,0 +1,108 @@
# -*- coding: utf-8 -*-
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import time
import cProfile
import gprof2dot
from pstats import Stats
import pyprof2calltree
from nailgun.settings import settings
class ProfilerMiddleware(object):
def __init__(self, app):
self._app = app
def __call__(self, environ, start_response):
response_body = []
def catching_start_response(status, headers, exc_info=None):
start_response(status, headers, exc_info)
return response_body.append
def runapp():
appiter = self._app(environ, catching_start_response)
response_body.extend(appiter)
if hasattr(appiter, 'close'):
appiter.close()
handler_name = environ.get('PATH_INFO').strip('/').replace('/', '.') \
or 'root'
profiler = Profiler(environ['REQUEST_METHOD'], handler_name)
profiler.profiler.runcall(runapp)
body = b''.join(response_body)
profiler.save_data()
return [body]
class Profiler(object):
"""Run profiler and save profile
"""
def __init__(self, method='', handler_name=''):
self.method = method
self.handler_name = handler_name
if not os.path.exists(settings.
LOAD_TESTS_PATHS['last_performance_test']):
os.makedirs(settings.LOAD_TESTS_PATHS['last_performance_test'])
self.profiler = cProfile.Profile()
self.profiler.enable()
self.start = time.time()
def save_data(self):
elapsed = time.time() - self.start
pref_filename = os.path.join(
settings.LOAD_TESTS_PATHS['last_performance_test'],
'{method:s}.{handler_name:s}.{elapsed_time:.0f}ms.{t_time}.'.
format(
method=self.method,
handler_name=self.handler_name or 'root',
elapsed_time=elapsed * 1000.0,
t_time=time.time()))
tree_file = pref_filename + 'prof'
stats_file = pref_filename + 'txt'
callgraph_file = pref_filename + 'dot'
# write pstats
with file(stats_file, 'w') as file_o:
stats = Stats(self.profiler, stream=file_o)
stats.sort_stats('time', 'cumulative').print_stats()
# write callgraph in dot format
parser = gprof2dot.PstatsParser(self.profiler)
def get_function_name((filename, line, name)):
module = os.path.splitext(filename)[0]
module_pieces = module.split(os.path.sep)
return "{module:s}:{line:d}:{name:s}".format(
module="/".join(module_pieces[-4:]),
line=line,
name=name)
parser.get_function_name = get_function_name
gprof = parser.parse()
with open(callgraph_file, 'w') as file_o:
dot = gprof2dot.DotWriter(file_o)
theme = gprof2dot.TEMPERATURE_COLORMAP
dot.graph(gprof, theme)
# write calltree
call_tree = pyprof2calltree.CalltreeConverter(stats)
with file(tree_file, 'wb') as file_o:
call_tree.output(file_o)

View File

@ -14,3 +14,5 @@ sphinxcontrib-seqdiag==0.6.0
sphinxcontrib-nwdiag==0.7.0 sphinxcontrib-nwdiag==0.7.0
tox==1.7.1 tox==1.7.1
webtest==2.0.14 webtest==2.0.14
pyprof2calltree==1.3.2
gprof2dot==2014.09.29