Cleaned and made integration tests working

* can be run by tox -epy26 -- fuel_plugin/testing/tests/integration
 * removed code that mimic nailgun
 * used requests_mock to mock http requests
 * added BaseIntegrationTest class that set ups DB
 * it is possible to pass own session when creating
   Pecan app for OSTF
 * added WebTest as a requirement for tests

Change-Id: Id73208c3b0a000c017ec44388e3cecc78d2228a5
Partial-Bug: #1404892
This commit is contained in:
Sebastian Kalinowski 2015-01-07 10:02:20 +01:00 committed by Sebastian Kalinowski
parent 47b82d5130
commit 98d0287158
12 changed files with 210 additions and 618 deletions

View File

@ -38,3 +38,11 @@ def contexted_session(dbpath):
raise raise
finally: finally:
session.close() session.close()
def get_session(dbpath):
"""Returns SQLAlchemy scoped session for given DB configuration string."""
engine = create_engine(dbpath)
session = orm.scoped_session(orm.sessionmaker())
session.configure(bind=engine)
return session

View File

@ -17,6 +17,7 @@ import pecan
from fuel_plugin.ostf_adapter.wsgi import access_control from fuel_plugin.ostf_adapter.wsgi import access_control
from fuel_plugin.ostf_adapter.wsgi import hooks from fuel_plugin.ostf_adapter.wsgi import hooks
from fuel_plugin.ostf_adapter.storage import engine
CONF = cfg.CONF CONF = cfg.CONF
@ -49,13 +50,17 @@ def setup_config(custom_pecan_config):
pecan.conf.update(config_to_use) pecan.conf.update(config_to_use)
def setup_app(config=None): def setup_app(config=None, session=None):
setup_config(config or {}) setup_config(config or {})
session = session or engine.get_session(pecan.conf.dbpath)
app_hooks = [
hooks.CustomTransactionalHook(session),
hooks.AddTokenHook()
]
app = pecan.make_app( app = pecan.make_app(
pecan.conf.app.root, pecan.conf.app.root,
debug=pecan.conf.debug, debug=pecan.conf.debug,
force_canonical=True, force_canonical=True,
hooks=[hooks.CustomTransactionalHook(dbpath=pecan.conf.dbpath), hooks=app_hooks,
hooks.AddTokenHook()]
) )
return access_control.setup(app) return access_control.setup(app)

View File

@ -13,7 +13,6 @@
# under the License. # under the License.
import logging import logging
from sqlalchemy import create_engine, orm
from pecan import hooks from pecan import hooks
@ -22,10 +21,8 @@ LOG = logging.getLogger(__name__)
class CustomTransactionalHook(hooks.TransactionHook): class CustomTransactionalHook(hooks.TransactionHook):
def __init__(self, dbpath): def __init__(self, session):
engine = create_engine(dbpath) self.session = session
self.session = orm.scoped_session(orm.sessionmaker())
self.session.configure(bind=engine)
def start(): def start():
pass pass

View File

@ -1,13 +0,0 @@
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

View File

@ -1,119 +0,0 @@
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from bottle import route, run
cluster_fixture = {
1: {
'cluster_meta': {
'release_id': 1,
'mode': 'ha'
},
'release_data': {
'operating_system': 'rhel'
},
'cluster_attributes': {
'editable': {
'additional_components': {},
'common': {}
}
}
},
2: {
'cluster_meta': {
'release_id': 2,
'mode': 'multinode',
},
'release_data': {
'operating_system': 'ubuntu'
},
'cluster_attributes': {
'editable': {
'additional_components': {},
'common': {}
}
}
},
3: {
'cluster_meta': {
'release_id': 3,
'mode': 'ha'
},
'release_data': {
'operating_system': 'rhel'
},
'cluster_attributes': {
'editable': {
'additional_components': {
'murano': {
'value': True
},
'sahara': {
'value': False
}
},
'common': {}
}
}
},
4: {
'cluster_meta': {
'release_id': 4,
'mode': 'test_error'
},
'release_data': {
'operating_system': 'none'
},
'cluster_attributes': {
'editable': {
'additional_components': {},
'common': {}
}
}
},
5: {
'cluster_meta': {
'release_id': 5,
'mode': 'dependent_tests'
},
'release_data': {
'operating_system': 'none'
},
'cluster_attributes': {
'editable': {
'additional_components': {},
'common': {}
}
}
}
}
@route('/api/clusters/<id:int>')
def serve_cluster_meta(id):
return cluster_fixture[id]['cluster_meta']
@route('/api/releases/<id:int>')
def serve_cluster_release_info(id):
return cluster_fixture[id]['release_data']
@route('/api/clusters/<id:int>/attributes')
def serve_cluster_attributes(id):
return cluster_fixture[id]['cluster_attributes']
run(host='localhost', port=8000, debug=True)

View File

@ -1,236 +0,0 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Profiled execution.
profile=no
# Add <file or directory> to the black list. It should be a base name, not a
# path. You may set this option multiple times.
ignore=CVS
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifier separated by comma (,) or put this option
# multiple time.
disable=F0401,R0201,W0311,C0111
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html
output-format=parseable
# Include message's id in output
include-ids=yes
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (R0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Add a comment according to your evaluation note. This is used by the global
# evaluation report (R0004).
comment=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=120
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching names used for dummy variables (i.e. not used).
dummy-variables-rgx=_|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=SQLObject
# When zope mode is activated, add a predefined set of Zope acquired attributes
# to generated-members.
zope=no
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed.
generated-members=REQUEST,acl_users,aq_parent
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[BASIC]
# Required attributes for module, separated by a comma
required-attributes=
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=app,uwsgi,e,i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match functions or classes name which do
# not require a docstring
no-docstring-rgx=__.*__|[Tt]est.*
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=20
# Maximum number of return / yield for function / method body
max-returns=10
# Maximum number of branch for function / method body
max-branchs=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=10
# Minimum number of public methods for a class (see R0903).
min-public-methods=0
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[CLASSES]
# List of interface methods to ignore, separated by a comma. This is used for
# instance to not check methods defines in Zope's Interface base class.
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp

View File

@ -1,55 +0,0 @@
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import signal
import subprocess
import time
processes_pool = None
def setup():
global processes_pool
with open('/dev/null', 'w') as devnull:
processes_pool = tuple(
[
subprocess.Popen(
[
'python',
'fuel_plugin/testing/test_utils/nailgun_mimic.py'
],
stdout=devnull,
stderr=devnull
),
subprocess.Popen(
[
'ostf-server',
'--debug',
('--debug_tests=fuel_plugin/testing/'
'fixture/dummy_tests')
],
stdout=devnull,
stderr=devnull
)
]
)
time.sleep(5)
def teardown():
for process in processes_pool:
process.send_signal(signal.SIGINT)
process.wait()

View File

@ -15,15 +15,18 @@
# under the License. # under the License.
import mock import requests_mock
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlalchemy import event
from sqlalchemy.orm import sessionmaker, scoped_session
import unittest2 import unittest2
import webtest
from fuel_plugin.ostf_adapter import config from fuel_plugin.ostf_adapter import config
from fuel_plugin.ostf_adapter import mixins from fuel_plugin.ostf_adapter import mixins
from fuel_plugin.ostf_adapter.nose_plugin.nose_discovery import discovery from fuel_plugin.ostf_adapter.nose_plugin import nose_discovery
from fuel_plugin.ostf_adapter.storage import models from fuel_plugin.ostf_adapter.storage import models
from fuel_plugin.ostf_adapter.wsgi import app
TEST_PATH = 'fuel_plugin/testing/fixture/dummy_tests' TEST_PATH = 'fuel_plugin/testing/fixture/dummy_tests'
@ -119,16 +122,77 @@ class BaseUnitTest(unittest2.TestCase):
"""Base class for all unit tests.""" """Base class for all unit tests."""
class BaseWSGITest(unittest2.TestCase): class BaseIntegrationTest(BaseUnitTest):
"""Base class for all integration tests."""
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls.dbpath = 'postgresql+psycopg2://ostf:ostf@localhost/ostf' config.init_config([])
cls.Session = sessionmaker() # db connection
cls.dbpath = config.cfg.CONF.adapter.dbpath
cls.engine = create_engine(cls.dbpath) cls.engine = create_engine(cls.dbpath)
cls.ext_id = 'fuel_plugin.testing.fixture.dummy_tests.' # mock http requests
cls.expected = { cls.requests_mock = requests_mock.Mocker()
cls.requests_mock.start()
@classmethod
def tearDownClass(cls):
# stop https requests mocking
cls.requests_mock.stop()
def setUp(self):
self.connection = self.engine.connect()
self.trans = self.connection.begin()
self.session = scoped_session(sessionmaker())
self.session.configure(bind=self.connection)
# supprot tests with rollbacks
# start the session in a SAVEPOINT...
self.session.begin_nested()
# # then each time that SAVEPOINT ends, reopen it
@event.listens_for(self.session, "after_transaction_end")
def restart_savepoint(session, transaction):
if transaction.nested and not transaction._parent.nested:
session.begin_nested()
def tearDown(self):
# rollback changes to database
# made by tests
self.trans.rollback()
self.session.close()
self.connection.close()
def mock_api_for_cluster(self, cluster_id):
"""Mock requests to Nailgun to mimic behavior of
Nailgun's API
"""
cluster = CLUSTERS[cluster_id]
release_id = cluster['cluster_meta']['release_id']
self.requests_mock.register_uri(
'GET',
'/api/clusters/{0}'.format(cluster_id),
json=cluster['cluster_meta'])
self.requests_mock.register_uri(
'GET',
'/api/releases/{0}'.format(release_id),
json=cluster['release_data'])
self.requests_mock.register_uri(
'GET',
'/api/clusters/{0}/attributes'.format(cluster_id),
json=cluster['cluster_attributes'])
class BaseWSGITest(BaseIntegrationTest):
def setUp(self):
super(BaseWSGITest, self).setUp()
self.ext_id = 'fuel_plugin.testing.fixture.dummy_tests.'
self.expected = {
'cluster': { 'cluster': {
'id': 1, 'id': 1,
'deployment_tags': set(['ha', 'rhel', 'nova_network', 'deployment_tags': set(['ha', 'rhel', 'nova_network',
@ -137,7 +201,7 @@ class BaseWSGITest(unittest2.TestCase):
'test_sets': ['general_test', 'test_sets': ['general_test',
'stopped_test', 'ha_deployment_test', 'stopped_test', 'ha_deployment_test',
'environment_variables'], 'environment_variables'],
'tests': [cls.ext_id + test for test in [ 'tests': [self.ext_id + test for test in [
('deployment_types_tests.ha_deployment_test.' ('deployment_types_tests.ha_deployment_test.'
'HATest.test_ha_depl'), 'HATest.test_ha_depl'),
('deployment_types_tests.ha_deployment_test.' ('deployment_types_tests.ha_deployment_test.'
@ -157,51 +221,17 @@ class BaseWSGITest(unittest2.TestCase):
]] ]]
} }
def setUp(self): self.discovery()
# orm session wrapping
config.init_config([])
self.connection = self.engine.connect()
self.trans = self.connection.begin()
self.Session.configure( self.app = webtest.TestApp(app.setup_app(session=self.session))
bind=self.connection
)
self.session = self.Session()
test_sets = self.session.query(models.TestSet).all()
# need this if start unit tests in conjuction with integration
if not test_sets:
discovery(path=TEST_PATH, session=self.session)
mixins.cache_test_repository(self.session)
# mocking
# request mocking
self.request_mock = mock.MagicMock()
self.request_patcher = mock.patch(
'fuel_plugin.ostf_adapter.wsgi.controllers.request',
self.request_mock
)
self.request_patcher.start()
# engine.get_session mocking
self.request_mock.session = self.session
def tearDown(self):
# rollback changes to database
# made by tests
self.trans.rollback()
self.session.close()
self.connection.close()
# end of test_case patching
self.request_patcher.stop()
def discovery(self):
"""Discover dummy tests used for testsing."""
mixins.TEST_REPOSITORY = [] mixins.TEST_REPOSITORY = []
nose_discovery.discovery(path=TEST_PATH, session=self.session)
mixins.cache_test_repository(self.session)
self.session.flush()
@property
def is_background_working(self): def is_background_working(self):
is_working = True is_working = True

View File

@ -18,7 +18,7 @@ from sqlalchemy import create_engine
from fuel_plugin.testing.tests.functional.base import \ from fuel_plugin.testing.tests.functional.base import \
BaseAdapterTest, Response BaseAdapterTest, Response
from fuel_plugin.ostf_client.client import TestingAdapterClient as adapter from fuel_plugin.ostf_client import client
class AdapterTests(BaseAdapterTest): class AdapterTests(BaseAdapterTest):
@ -74,7 +74,7 @@ class AdapterTests(BaseAdapterTest):
] ]
} }
cls.adapter = adapter(url) cls.adapter = client.TestingAdapterClient(url)
cls.client = cls.init_client(url) cls.client = cls.init_client(url)
@classmethod @classmethod

View File

@ -12,39 +12,32 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import json import mock
from mock import patch, Mock
from fuel_plugin.ostf_adapter.wsgi import controllers
from fuel_plugin.ostf_adapter.storage import models from fuel_plugin.ostf_adapter.storage import models
from fuel_plugin.testing.tests import base from fuel_plugin.testing.tests import base
class TestTestsController(base.BaseWSGITest): class TestTestsController(base.BaseWSGITest):
def setUp(self):
super(TestTestsController, self).setUp()
self.controller = controllers.TestsController()
def test_get(self): def test_get(self):
res = self.controller.get(self.expected['cluster']['id']) cluster_id = self.expected['cluster']['id']
self.mock_api_for_cluster(cluster_id)
resp = self.app.get(
'/v1/tests/{0}'.format(cluster_id)
)
resp_tests = [test['id'] for test in resp.json]
self.assertTrue(self.is_background_working) self.assertTrue(self.is_background_working)
self.assertTrue(len(res) == len(self.expected['tests'])) self.assertItemsEqual(
self.assertTrue( resp_tests,
sorted([test['id'] for test in res]), self.expected['tests']
sorted(self.expected['tests'])
) )
class TestTestSetsController(base.BaseWSGITest): class TestTestSetsController(base.BaseWSGITest):
def setUp(self):
super(TestTestSetsController, self).setUp()
self.controller = controllers.TestsetsController()
def test_get(self): def test_get(self):
self.expected['test_set_description'] = [ self.expected['test_set_description'] = [
'General fake tests', 'General fake tests',
@ -52,64 +45,56 @@ class TestTestSetsController(base.BaseWSGITest):
'Fake tests for HA deployment', 'Fake tests for HA deployment',
'Test for presence of env variables inside of testrun subprocess' 'Test for presence of env variables inside of testrun subprocess'
] ]
res = self.controller.get(self.expected['cluster']['id'])
cluster_id = self.expected['cluster']['id']
self.mock_api_for_cluster(cluster_id)
resp = self.app.get(
'/v1/testsets/{0}'.format(cluster_id)
)
resp_testsets_ids = [testset['id'] for testset in resp.json]
self.assertTrue(self.is_background_working) self.assertTrue(self.is_background_working)
self.assertTrue( self.assertItemsEqual(
sorted([testset['id'] for testset in res]) == resp_testsets_ids,
sorted(self.expected['test_sets']) self.expected['test_sets']
)
self.assertTrue(
sorted([testset['name'] for testset in res]) ==
sorted(self.expected['test_set_description'])
) )
test_set_order = { self.assertItemsEqual(
'general_test': 0, [testset['name'] for testset in resp.json],
'stopped_test': 1, self.expected['test_set_description']
'ha_deployment_test': 2, )
'environment_variables': 3
}
resp_elements = [testset['id'] for testset in res] test_sets_order = (
for test_set in resp_elements: 'general_test',
self.assertTrue( 'stopped_test',
test_set_order[test_set] == resp_elements.index(test_set) 'ha_deployment_test',
) 'environment_variables',
)
self.assertSequenceEqual(resp_testsets_ids, test_sets_order)
class TestTestRunsController(base.BaseWSGITest): class TestTestRunsController(base.BaseWSGITest):
def setUp(self): def setUp(self):
super(TestTestRunsController, self).setUp() super(TestTestRunsController, self).setUp()
self.plugin_mock = mock.Mock()
self.request_mock.body = json.dumps([
{
'testset': 'ha_deployment_test',
'metadata': {'cluster_id': 1}
}]
)
self.controller = controllers.TestrunsController()
self.plugin_mock = Mock()
self.plugin_mock.kill.return_value = True self.plugin_mock.kill.return_value = True
self.nose_plugin_patcher = patch( self.nose_plugin_patcher = mock.patch(
'fuel_plugin.ostf_adapter.storage.models.nose_plugin.get_plugin', 'fuel_plugin.ostf_adapter.storage.models.nose_plugin.get_plugin',
lambda *args: self.plugin_mock lambda *args: self.plugin_mock
) )
self.nose_plugin_patcher.start() self.nose_plugin_patcher.start()
self.cluster_id = self.expected['cluster']['id']
self.mock_api_for_cluster(self.cluster_id)
def tearDown(self): def tearDown(self):
super(TestTestRunsController, self).tearDown() super(TestTestRunsController, self).tearDown()
self.nose_plugin_patcher.stop() self.nose_plugin_patcher.stop()
class TestTestRunsPostController(TestTestRunsController):
def test_post(self): def test_post(self):
self.expected['testrun_post'] = { self.expected['testrun_post'] = {
'testset': 'ha_deployment_test', 'testset': 'ha_deployment_test',
@ -127,17 +112,25 @@ class TestTestRunsPostController(TestTestRunsController):
} }
} }
res = self.controller.post()[0] resp = self.app.post_json('/v1/testruns/', (
{
'testset': 'ha_deployment_test',
'metadata': {'cluster_id': self.cluster_id}
},
))
for key in self.expected['testrun_post'].keys(): resp_testrun = resp.json[0]
for key in self.expected['testrun_post']:
if key == 'tests': if key == 'tests':
self.assertTrue( self.assertItemsEqual(
sorted(self.expected['testrun_post'][key]['names']) == self.expected['testrun_post'][key]['names'],
sorted([test['id'] for test in res[key]]) [test['id'] for test in resp_testrun[key]]
) )
else: else:
self.assertTrue( self.assertEqual(
self.expected['testrun_post'][key] == res[key] self.expected['testrun_post'][key],
resp_testrun[key]
) )
self.session.query(models.TestRun)\ self.session.query(models.TestRun)\
@ -152,35 +145,29 @@ class TestTestRunsPostController(TestTestRunsController):
tests_names = [ tests_names = [
test.name for test in testrun_tests test.name for test in testrun_tests
] ]
self.assertTrue( self.assertItemsEqual(
sorted(tests_names) == tests_names,
sorted(self.expected['testrun_post']['tests']['names']) self.expected['testrun_post']['tests']['names']
) )
def test_put_stopped(self):
class TestTestRunsPutController(TestTestRunsController): resp = self.app.post_json('/v1/testruns/', (
{
def setUp(self): 'testset': 'ha_deployment_test',
super(TestTestRunsPutController, self).setUp() 'metadata': {'cluster_id': self.cluster_id}
self.test_run = self.controller.post()[0] },
))
resp_testrun = resp.json[0]
self.session.query(models.Test)\ self.session.query(models.Test)\
.filter_by(test_run_id=int(self.test_run['id']))\ .filter_by(test_run_id=resp_testrun['id'])\
.update({'status': 'running'}) .update({'status': 'running'})
# flush data which test is depend on into db # flush data which test is depend on into db
self.session.commit() self.session.commit()
self.request_mock.body = json.dumps(
[{
'status': 'stopped',
'id': self.test_run['id']
}]
)
def test_put_stopped(self):
self.expected['testrun_put'] = { self.expected['testrun_put'] = {
'id': int(self.test_run['id']), 'id': resp_testrun['id'],
'testset': 'ha_deployment_test', 'testset': 'ha_deployment_test',
'cluster_id': 1, 'cluster_id': 1,
'tests': { 'tests': {
@ -195,17 +182,23 @@ class TestTestRunsPutController(TestTestRunsController):
} }
} }
res = self.controller.put()[0] resp = self.app.put_json('/v1/testruns/', (
{
'status': 'stopped',
'id': resp_testrun['id']
},
))
resp_testrun = resp.json[0]
for key in self.expected['testrun_put'].keys(): for key in self.expected['testrun_put'].keys():
if key == 'tests': if key == 'tests':
self.assertTrue( self.assertItemsEqual(
sorted(self.expected['testrun_put'][key]['names']) == self.expected['testrun_put'][key]['names'],
sorted([test['id'] for test in res[key]]) [test['id'] for test in resp_testrun[key]]
) )
else: else:
self.assertTrue( self.assertEqual(
self.expected['testrun_put'][key] == res[key] self.expected['testrun_put'][key], resp_testrun[key]
) )
testrun_tests = self.session.query(models.Test)\ testrun_tests = self.session.query(models.Test)\
@ -215,9 +208,9 @@ class TestTestRunsPutController(TestTestRunsController):
tests_names = [ tests_names = [
test.name for test in testrun_tests test.name for test in testrun_tests
] ]
self.assertTrue( self.assertItemsEqual(
sorted(tests_names) == tests_names,
sorted(self.expected['testrun_put']['tests']['names']) self.expected['testrun_put']['tests']['names']
) )
self.assertTrue( self.assertTrue(
@ -229,12 +222,14 @@ class TestTestRunsPutController(TestTestRunsController):
class TestClusterRedeployment(base.BaseWSGITest): class TestClusterRedeployment(base.BaseWSGITest):
def setUp(self): @mock.patch('fuel_plugin.ostf_adapter.mixins._get_cluster_depl_tags')
super(TestClusterRedeployment, self).setUp() def test_cluster_redeployment_with_different_tags(self, m_get_depl_tags):
self.controller = controllers.TestsetsController() m_get_depl_tags.return_value = set(
self.controller.get(self.expected['cluster']['id']) ['multinode', 'centos']
)
cluster_id = self.expected['cluster']['id']
self.app.get('/v1/testsets/{0}'.format(cluster_id))
def test_cluster_redeployment_with_different_tags(self):
self.expected = { self.expected = {
'cluster': { 'cluster': {
'id': 1, 'id': 1,
@ -265,14 +260,10 @@ class TestClusterRedeployment(base.BaseWSGITest):
# patch request_to_nailgun function in orded to emulate # patch request_to_nailgun function in orded to emulate
# redeployment of cluster # redeployment of cluster
cluster_data = set( m_get_depl_tags.return_value = set(
['multinode', 'ubuntu', 'nova_network'] ['multinode', 'ubuntu', 'nova_network']
) )
with patch( self.app.get('/v1/testsets/{0}'.format(cluster_id))
('fuel_plugin.ostf_adapter.mixins._get_cluster_depl_tags'),
lambda *args, **kwargs: cluster_data
):
self.controller.get(self.expected['cluster']['id'])
self.assertTrue(self.is_background_working) self.assertTrue(self.is_background_working)

View File

@ -12,39 +12,23 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import json import mock
from mock import patch, MagicMock
from webtest import TestApp
from fuel_plugin.ostf_adapter.wsgi import app from fuel_plugin.ostf_adapter.storage import models
from fuel_plugin.testing.tests import base from fuel_plugin.testing.tests import base
class WsgiInterfaceTests(base.BaseWSGITest): class WsgiInterfaceTest(base.BaseWSGITest):
@classmethod
def setUpClass(cls):
super(WsgiInterfaceTests, cls).setUpClass()
def setUp(self):
super(WsgiInterfaceTests, self).setUp()
self.app = TestApp(app.setup_app())
self.fixture = {
'cluster_id': 1
}
def tearDown(self):
super(WsgiInterfaceTests, self).tearDown()
def test_get_all_tests(self): def test_get_all_tests(self):
self.app.get('/v1/tests/{0}' cluster_id = 1
.format(self.fixture['cluster_id'])) self.mock_api_for_cluster(cluster_id)
self.app.get('/v1/tests/{0}'.format(cluster_id))
def test_get_all_testsets(self): def test_get_all_testsets(self):
self.app.get('/v1/testsets/{0}' cluster_id = 1
.format(self.fixture['cluster_id'])) self.mock_api_for_cluster(cluster_id)
self.app.get('/v1/testsets/{0}'.format(cluster_id))
def test_get_one_testruns(self): def test_get_one_testruns(self):
self.app.get('/v1/testruns/1') self.app.get('/v1/testruns/1')
@ -52,21 +36,23 @@ class WsgiInterfaceTests(base.BaseWSGITest):
def test_get_all_testruns(self): def test_get_all_testruns(self):
self.app.get('/v1/testruns') self.app.get('/v1/testruns')
@patch('fuel_plugin.ostf_adapter.wsgi.controllers.models') @mock.patch.object(models.TestRun, 'start')
def test_post_testruns(self, models): def test_post_testruns(self, mstart):
self.mock_api_for_cluster(3)
self.mock_api_for_cluster(4)
testruns = [ testruns = [
{ {
'testset': 'test_simple', 'testset': 'general_test',
'metadata': {'cluster_id': 3} 'metadata': {'cluster_id': 3}
}, },
{ {
'testset': 'test_simple', 'testset': 'general_test',
'metadata': {'cluster_id': 4} 'metadata': {'cluster_id': 4}
} }
] ]
self.request_mock.body = json.dumps(testruns) mstart.return_value = {}
models.TestRun.start.return_value = {}
self.app.post_json('/v1/testruns', testruns) self.app.post_json('/v1/testruns', testruns)
def test_put_testruns(self): def test_put_testruns(self):
@ -83,11 +69,9 @@ class WsgiInterfaceTests(base.BaseWSGITest):
} }
] ]
self.request_mock.body = json.dumps(testruns)
self.request_mock.storage.get_test_run.return_value = \
MagicMock(frontend={})
self.app.put_json('/v1/testruns', testruns) self.app.put_json('/v1/testruns', testruns)
def test_get_last_testruns(self): def test_get_last_testruns(self):
self.app.get('/v1/testruns/last/{0}' cluster_id = 1
.format(self.fixture['cluster_id'])) self.mock_api_for_cluster(cluster_id)
self.app.get('/v1/testruns/last/{0}'.format(cluster_id))

View File

@ -1,8 +1,8 @@
-r requirements.txt -r requirements.txt
WebTest>=2.0.17
mock==1.0.1 mock==1.0.1
requests-mock>=0.5.1 requests-mock>=0.5.1
flake8 flake8
tox>=1.7.1 tox>=1.7.1
coverage==3.6 coverage==3.6
fabric fabric
bottle