Revert "Add versioning for fuel_health tests"

This reverts commit 6c250763b4.
This commit bvt

Change-Id: I8d4dd55b65aa8ee17049a9a005109508b654597b
Closes-Bug: #1420882
This commit is contained in:
tatyana-leontovich 2015-02-11 17:53:57 +00:00
parent 6c250763b4
commit f9c37d0876
19 changed files with 73 additions and 424 deletions

View File

@ -229,8 +229,7 @@ class CeilometerApiPlatformTests(ceilometermanager.CeilometerBaseTest):
3. Check keystone role notifications.
4. Check keystone group notifications.
Duration: 5 s.
Available since release: 2014.2-6.0
Deployment tags: Ceilometer
Deployment tags: Ceilometer, 2014.2-6.0, 2014.2-6.1
"""
tenant, user, role, group, trust = self.identity_helper()

View File

@ -124,8 +124,7 @@ class VanillaTwoClusterTest(SaharaClusterTest):
7. Delete the cluster template
Duration: 3600 s.
Available since release: 2014.2-6.1
Deployment tags: Sahara
Deployment tags: Sahara, 2014.2-6.1
"""
fail_msg = 'Failed to create cluster template.'

View File

@ -53,8 +53,7 @@ class NetworksTest(nmanager.SanityChecksTest):
2. Confirm that a response is received.
Duration: 20 s.
Available since release: 2014.2-6.0
Deployment tags: neutron
Deployment tags: neutron, 2014.2-6.0, 2014.2-6.1
"""
fail_msg = "Networks list is unavailable. "
networks = self.verify(20, self._list_networks, 1,

View File

@ -71,8 +71,7 @@ class VanillaTwoTemplatesTest(SaharaTemplatesTest):
8. Delete the cluster template
Duration: 80 s.
Available since release: 2014.2-6.1
Deployment tags: Sahara
Deployment tags: Sahara, 2014.2-6.1
"""
fail_msg = 'Failed to create node group template.'
@ -134,8 +133,7 @@ class HDPTwoTemplatesTest(SaharaTemplatesTest):
8. Delete the cluster template
Duration: 80 s.
Available since release: 2014.2-6.1
Deployment tags: Sahara
Deployment tags: Sahara, 2014.2-6.1
"""
fail_msg = 'Failed to create node group template.'

View File

@ -53,7 +53,6 @@ adapter_opts = [
cli_opts = [
cfg.BoolOpt('debug', default=False),
cfg.BoolOpt('clear-db', default=False),
cfg.BoolOpt('after-initialization-environment-hook', default=False),
cfg.StrOpt('debug_tests')
]

View File

@ -28,7 +28,7 @@ LOG = logging.getLogger(__name__)
TEST_REPOSITORY = []
def delete_db_data(session):
def clean_db(session):
LOG.info('Starting clean db action.')
session.query(models.ClusterTestingPattern).delete()
session.query(models.ClusterState).delete()
@ -42,14 +42,12 @@ def cache_test_repository(session):
.options(joinedload('tests'))\
.all()
crucial_tests_attrs = ['name', 'deployment_tags',
'available_since_release']
crucial_tests_attrs = ['name', 'deployment_tags']
for test_set in test_repository:
data_elem = dict()
data_elem['test_set_id'] = test_set.id
data_elem['deployment_tags'] = test_set.deployment_tags
data_elem['available_since_release'] = test_set.available_since_release
data_elem['tests'] = []
for test in test_set.tests:
@ -61,24 +59,26 @@ def cache_test_repository(session):
def discovery_check(session, cluster, token=None):
cluster_attrs = _get_cluster_attrs(cluster, token=token)
cluster_deployment_args = _get_cluster_depl_tags(cluster, token=token)
cluster_data = {
'id': cluster,
'deployment_tags': cluster_attrs['deployment_tags'],
'release_version': cluster_attrs['release_version'],
'cluster_id': cluster,
'deployment_tags': cluster_deployment_args
}
cluster_state = session.query(models.ClusterState)\
.filter_by(id=cluster_data['id'])\
.filter_by(id=cluster_data['cluster_id'])\
.first()
if not cluster_state:
session.add(
models.ClusterState(**cluster_data)
models.ClusterState(
id=cluster_data['cluster_id'],
deployment_tags=list(cluster_data['deployment_tags'])
)
)
# flush data to db, because _add_cluster_testing_pattern
# flush data to db, cuz _add_cluster_testing_pattern
# is dependent on it
session.flush()
@ -100,9 +100,7 @@ def discovery_check(session, cluster, token=None):
session.merge(cluster_state)
def _get_cluster_attrs(cluster_id, token=None):
cluster_attrs = {}
def _get_cluster_depl_tags(cluster_id, token=None):
REQ_SES = requests.Session()
REQ_SES.trust_env = False
@ -132,15 +130,13 @@ def _get_cluster_attrs(cluster_id, token=None):
release_data = REQ_SES.get(release_url).json()
if 'version' in release_data:
cluster_attrs['release_version'] = release_data['version']
# info about deployment type and operating system
mode = 'ha' if 'ha' in response['mode'].lower() else response['mode']
deployment_tags.add(mode)
deployment_tags.add(release_data.get(
'operating_system', 'failed to get os'))
if 'version' in release_data:
deployment_tags.add(release_data['version'])
# networks manager
network_type = response.get('net_provider', 'nova_network')
deployment_tags.add(network_type)
@ -179,11 +175,7 @@ def _get_cluster_attrs(cluster_id, token=None):
if libvrt_data and libvrt_data.get('value'):
deployment_tags.add(libvrt_data['value'])
cluster_attrs['deployment_tags'] = set(
[tag.lower() for tag in deployment_tags]
)
return cluster_attrs
return set([tag.lower() for tag in deployment_tags])
def _add_cluster_testing_pattern(session, cluster_data):
@ -196,14 +188,22 @@ def _add_cluster_testing_pattern(session, cluster_data):
cache_test_repository(session)
for test_set in TEST_REPOSITORY:
if nose_utils.tests_availability_cond(cluster_data, test_set):
testing_pattern = {}
testing_pattern['cluster_id'] = cluster_data['id']
if nose_utils.process_deployment_tags(
cluster_data['deployment_tags'],
test_set['deployment_tags']
):
testing_pattern = dict()
testing_pattern['cluster_id'] = cluster_data['cluster_id']
testing_pattern['test_set_id'] = test_set['test_set_id']
testing_pattern['tests'] = []
for test in test_set['tests']:
if nose_utils.tests_availability_cond(cluster_data, test):
if nose_utils.process_deployment_tags(
cluster_data['deployment_tags'],
test['deployment_tags']
):
testing_pattern['tests'].append(test['name'])
to_database.append(

View File

@ -14,76 +14,11 @@
import logging
from sqlalchemy import create_engine
from sqlalchemy.engine import reflection
from sqlalchemy import MetaData
from sqlalchemy import schema
from fuel_plugin.ostf_adapter.storage import alembic_cli
LOG = logging.getLogger(__name__)
def clear_db(dbpath):
"""Clean database (to prevent issue with changed
head revision script) and upgrade to head revision.
Expect 0 on success by nailgun
Exception is good enough signal that something goes wrong
"""
db_engine = create_engine(dbpath)
conn = db_engine.connect()
trans = conn.begin()
meta = MetaData()
meta.reflect(bind=db_engine)
inspector = reflection.Inspector.from_engine(db_engine)
tbs = []
all_fks = []
for table_name in inspector.get_table_names():
fks = []
for fk in inspector.get_foreign_keys(table_name):
if not fk['name']:
continue
fks.append(
schema.ForeignKeyConstraint((), (), name=fk['name'])
)
t = schema.Table(
table_name,
meta,
*fks,
extend_existing=True
)
tbs.append(t)
all_fks.extend(fks)
for fkc in all_fks:
conn.execute(schema.DropConstraint(fkc))
for table in tbs:
conn.execute(schema.DropTable(table))
custom_types = conn.execute(
"SELECT n.nspname as schema, t.typname as type "
"FROM pg_type t LEFT JOIN pg_catalog.pg_namespace n "
"ON n.oid = t.typnamespace "
"WHERE (t.typrelid = 0 OR (SELECT c.relkind = 'c' "
"FROM pg_catalog.pg_class c WHERE c.oid = t.typrelid)) "
"AND NOT EXISTS(SELECT 1 FROM pg_catalog.pg_type el "
"WHERE el.oid = t.typelem AND el.typarray = t.oid) "
"AND n.nspname NOT IN ('pg_catalog', 'information_schema')"
)
for tp in custom_types:
conn.execute("DROP TYPE {0}".format(tp[1]))
trans.commit()
alembic_cli.drop_migration_meta(db_engine)
conn.close()
db_engine.dispose()
return 0
def after_initialization_environment_hook():
"""Expect 0 on success by nailgun
Exception is good enough signal that something goes wrong

View File

@ -81,17 +81,13 @@ class DiscoveryPlugin(plugins.Plugin):
test_id = test.id()
for test_set_id in self.test_sets.keys():
if self.test_belongs_to_testset(test_id, test_set_id):
test_kwargs = {
"title": "",
"description": "",
"duration": "",
"deployment_tags": [],
"available_since_release": "",
}
data = dict()
test_kwargs.update(nose_utils.get_description(test))
(data['title'], data['description'],
data['duration'], data['deployment_tags']) = \
nose_utils.get_description(test)
test_kwargs.update(
data.update(
{
'test_set_id': test_set_id,
'name': test_id
@ -99,7 +95,7 @@ class DiscoveryPlugin(plugins.Plugin):
)
try:
test_obj = models.Test(**test_kwargs)
test_obj = models.Test(**data)
self.session.merge(test_obj)
# flush tests data into db

View File

@ -20,8 +20,6 @@ import os
import re
import traceback
from distutils import version
from nose import case
from nose.suite import ContextSuite
@ -71,7 +69,6 @@ def get_description(test_obj):
if isinstance(test_obj, case.Test):
docstring = test_obj.test._testMethodDoc
test_data = {}
if docstring:
deployment_tags_pattern = r'Deployment tags:.?(?P<tags>.+)?'
docstring, deployment_tags = _process_docstring(
@ -86,31 +83,21 @@ def get_description(test_obj):
deployment_tags = [
tag.strip().lower() for tag in deployment_tags.split(',')
]
test_data["deployment_tags"] = deployment_tags
rel_vers_pattern = "Available since release:.?(?P<rel_vers>.+)"
docstring, rel_vers = _process_docstring(
docstring,
rel_vers_pattern
)
if rel_vers:
test_data["available_since_release"] = rel_vers
else:
deployment_tags = []
duration_pattern = r'Duration:.?(?P<duration>.+)'
docstring, duration = _process_docstring(
docstring,
duration_pattern
)
if duration:
test_data["duration"] = duration
docstring = docstring.split('\n')
test_data["title"] = docstring.pop(0)
test_data["description"] = \
u'\n'.join(docstring) if docstring else u""
name = docstring.pop(0)
description = u'\n'.join(docstring) if docstring else u""
return test_data
return name, description, duration, deployment_tags
return u"", u"", u"", []
def modify_test_name_for_nose(test_path):
@ -176,7 +163,7 @@ def get_tests_to_update(test):
return tests
def _process_deployment_tags(cluster_depl_tags, test_depl_tags):
def process_deployment_tags(cluster_depl_tags, test_depl_tags):
"""Process alternative deployment tags for testsets and tests
and determines whether current test entity (testset or test)
is appropriate for cluster.
@ -192,46 +179,3 @@ def _process_deployment_tags(cluster_depl_tags, test_depl_tags):
return True
return False
def _compare_release_versions(cluster_release_version, test_release_version):
cl_openstack_ver, cl_fuel_ver = cluster_release_version.split('-')
test_openstack_ver, test_fuel_ver = test_release_version.split('-')
cond = (
(version.StrictVersion(cl_openstack_ver) >=
version.StrictVersion(test_openstack_ver))
and
(version.StrictVersion(cl_fuel_ver) >=
version.StrictVersion(test_fuel_ver))
)
return cond
def tests_availability_cond(cluster_data, test_entity_data):
is_test_available = False
is_rel_ver_suitable = False
# if 'available_since_release' attritube of test entity
# is empty then this test entity is available for cluster
# in other case execute release comparator logic
if not test_entity_data['available_since_release']:
is_rel_ver_suitable = True
else:
is_rel_ver_suitable = _compare_release_versions(
cluster_data['release_version'],
test_entity_data['available_since_release']
)
# if release version of test entity is suitable for cluster
# then check test entity compatibility with cluster
# by deployment tags
if is_rel_ver_suitable:
is_depl_tags_suitable = _process_deployment_tags(
cluster_data['deployment_tags'],
test_entity_data['deployment_tags']
)
if is_depl_tags_suitable:
is_test_available = True
return is_test_available

View File

@ -44,18 +44,12 @@ def main():
root = app.setup_app({})
# completely clean db (drop tables, constraints and types)
# plus drop alembic_version table (needed if, for example, head migration
# script was changed after applying)
if CONF.clear_db:
return nailgun_hooks.clear_db(CONF.adapter.dbpath)
if CONF.after_initialization_environment_hook:
return nailgun_hooks.after_initialization_environment_hook()
with engine.contexted_session(CONF.adapter.dbpath) as session:
# performing cleaning of expired data (if any) in db
mixins.delete_db_data(session)
mixins.clean_db(session)
log.info('Cleaned up database.')
# discover testsets and their tests
CORE_PATH = CONF.debug_tests or 'fuel_health'

View File

@ -33,7 +33,3 @@ def do_apply_migrations():
# apply initial migration
command.upgrade(alembic_conf, 'head')
def drop_migration_meta(engine):
engine.execute("DROP TABLE IF EXISTS alembic_version")

View File

@ -1,46 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""release_version
Revision ID: 495b4125ae83
Revises: 54904076d82d
Create Date: 2015-01-22 17:24:12.963260
"""
# revision identifiers, used by Alembic.
revision = '495b4125ae83'
down_revision = '54904076d82d'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('test_sets', sa.Column('available_since_release',
sa.String(64),
default=""))
op.add_column('tests', sa.Column('available_since_release',
sa.String(64),
default=""))
op.add_column('cluster_state', sa.Column('release_version', sa.String(64)))
def downgrade():
op.drop_column('test_sets', 'available_since_release')
op.drop_column('tests', 'available_since_release')
op.drop_column('cluster_state', 'release_version')

View File

@ -48,7 +48,6 @@ class ClusterState(BASE):
id = sa.Column(sa.Integer, primary_key=True, autoincrement=False)
deployment_tags = sa.Column(ARRAY(sa.String(64)))
release_version = sa.Column(sa.String(64))
class ClusterTestingPattern(BASE):
@ -91,8 +90,6 @@ class TestSet(BASE):
# with current test set
exclusive_testsets = sa.Column(ARRAY(sa.String(128)))
available_since_release = sa.Column(sa.String(64), default="")
tests = relationship(
'Test',
backref='test_set',
@ -138,7 +135,6 @@ class Test(BASE):
time_taken = sa.Column(sa.Float())
meta = sa.Column(fields.JsonField())
deployment_tags = sa.Column(ARRAY(sa.String(64)))
available_since_release = sa.Column(sa.String(64), default="")
test_run_id = sa.Column(
sa.Integer(),

View File

@ -1,48 +0,0 @@
# Copyright 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__profile__ = {
"id": "test_versioning",
"driver": "nose",
"test_path": "fuel_plugin/tests/functional/dummy_tests/test_versioning.py",
"description": "Test suite that contains fake tests for versioning check",
"deployment_tags": ["releases_comparison"],
"test_runs_ordering_priority": 13,
"exclusive_testsets": [],
"available_since_release": "2015.2-6.0",
}
import unittest2
class TestVersioning(unittest2.TestCase):
def test_simple_fake_first(self):
"""This is simple fake test
for versioning checking.
It should be discovered for
releases == of >= 2015.2-6.0
Available since release: 2015.2-6.0
Deployment tags: releases_comparison
"""
self.assertTrue(True)
def test_simple_fake_second(self):
"""This is simple fake test
for versioning checking.
It should be discovered for
releases == of >= 2015.2-6.1
Available since release: 2015.2-6.1
Deployment tags: releases_comparison
"""
self.assertTrue(True)

View File

@ -39,8 +39,7 @@ CLUSTERS = {
'mode': 'ha'
},
'release_data': {
'operating_system': 'rhel',
'version': '2015.2-1.0',
'operating_system': 'rhel'
},
'cluster_attributes': {
'editable': {
@ -55,8 +54,7 @@ CLUSTERS = {
'mode': 'multinode',
},
'release_data': {
'operating_system': 'ubuntu',
'version': '2015.2-1.0',
'operating_system': 'ubuntu'
},
'cluster_attributes': {
'editable': {
@ -71,8 +69,7 @@ CLUSTERS = {
'mode': 'ha'
},
'release_data': {
'operating_system': 'rhel',
'version': '2015.2-1.0',
'operating_system': 'rhel'
},
'cluster_attributes': {
'editable': {
@ -94,8 +91,7 @@ CLUSTERS = {
'mode': 'test_error'
},
'release_data': {
'operating_system': 'none',
'version': '2015.2-1.0',
'operating_system': 'none'
},
'cluster_attributes': {
'editable': {
@ -110,24 +106,7 @@ CLUSTERS = {
'mode': 'dependent_tests'
},
'release_data': {
'operating_system': 'none',
'version': '2015.2-1.0',
},
'cluster_attributes': {
'editable': {
'additional_components': {},
'common': {}
}
}
},
6: {
'cluster_meta': {
'release_id': 6,
'mode': 'releases_comparison'
},
'release_data': {
'operating_system': '',
'version': '2015.2-6.0',
'operating_system': 'none'
},
'cluster_attributes': {
'editable': {

View File

@ -222,13 +222,11 @@ class TestTestRunsController(base.BaseWSGITest):
class TestClusterRedeployment(base.BaseWSGITest):
@mock.patch('fuel_plugin.ostf_adapter.mixins._get_cluster_attrs')
def test_cluster_redeployment_with_different_tags(self,
m_get_cluster_attrs):
m_get_cluster_attrs.return_value = {
'deployment_tags': set(['multinode', 'centos']),
'release_version': '2015.2-1.0'
}
@mock.patch('fuel_plugin.ostf_adapter.mixins._get_cluster_depl_tags')
def test_cluster_redeployment_with_different_tags(self, m_get_depl_tags):
m_get_depl_tags.return_value = set(
['multinode', 'centos']
)
cluster_id = self.expected['cluster']['id']
self.app.get('/v1/testsets/{0}'.format(cluster_id))
@ -262,44 +260,10 @@ class TestClusterRedeployment(base.BaseWSGITest):
# patch request_to_nailgun function in orded to emulate
# redeployment of cluster
m_get_cluster_attrs.return_value = {
'deployment_tags': set(['multinode', 'ubuntu', 'nova_network']),
'release_version': '2015.2-1.0'
}
m_get_depl_tags.return_value = set(
['multinode', 'ubuntu', 'nova_network']
)
self.app.get('/v1/testsets/{0}'.format(cluster_id))
self.assertTrue(self.is_background_working)
class TestVersioning(base.BaseWSGITest):
def test_discover_tests_with_versions(self):
cluster_id = 6
self.mock_api_for_cluster(cluster_id)
self.app.get('/v1/testsets/{0}'.format(cluster_id))
self.expected = {
'cluster': {
'id': 6,
'deployment_tags': set(['releases_comparison'])
},
'test_sets': ['general_test', 'stopped_test', 'test_versioning',
'environment_variables'],
'tests': [self.ext_id + test for test in [
'general_test.Dummy_test.test_fast_pass',
'general_test.Dummy_test.test_long_pass',
'general_test.Dummy_test.test_fast_fail',
'general_test.Dummy_test.test_fast_error',
'general_test.Dummy_test.test_fail_with_step',
'general_test.Dummy_test.test_skip',
'general_test.Dummy_test.test_skip_directly',
'stopped_test.dummy_tests_stopped.test_really_long',
'stopped_test.dummy_tests_stopped.test_one_no_so_long',
'stopped_test.dummy_tests_stopped.test_not_long_at_all',
('test_environment_variables.TestEnvVariables.'
'test_os_credentials_env_variables'),
'test_versioning.TestVersioning.test_simple_fake_first',
]]
}
self.assertTrue(self.is_background_working)

View File

@ -54,16 +54,10 @@ class TestNoseDiscovery(base.BaseUnitTest):
if isinstance(el[0][0], models.Test)
]
def _find_needed_test(self, test_name):
return next(t for t in self.tests if t.name == test_name)
def _find_needed_test_set(self, test_set_id):
return next(t for t in self.test_sets if t.id == test_set_id)
def test_discovery(self):
expected = {
'test_sets_count': 10,
'tests_count': 29
'test_sets_count': 9,
'tests_count': 27
}
self.assertTrue(
@ -121,9 +115,12 @@ class TestNoseDiscovery(base.BaseUnitTest):
'deployment_tags': ['one_tag| another_tag', 'other_tag']
}
}
needed_testset = self._find_needed_test_set(expected['testset']['id'])
needed_test = self._find_needed_test(expected['test']['name'])
needed_testset = [testset for testset in self.test_sets
if testset.id == expected['testset']['id']][0]
needed_test = [test for test in self.tests
if test.name == expected['test']['name']][0]
self.assertEqual(
needed_testset.deployment_tags,
@ -160,40 +157,3 @@ class TestNoseDiscovery(base.BaseUnitTest):
nose_discovery.DiscoveryPlugin.test_belongs_to_testset(
test_id, test_set_id)
)
def test_release_version_attribute(self):
for test_entity in (self.tests, self.test_sets):
self.assertTrue(
all(
[hasattr(t, 'available_since_release')
for t in test_entity]
)
)
expected = {
'test_set': {
'id': 'test_versioning',
'available_since_release': '2015.2-6.0',
},
'tests': [
{'name': ('fuel_plugin.testing.fixture.dummy_tests.'
'test_versioning.TestVersioning.'
'test_simple_fake_first'),
'available_since_release': '2015.2-6.0', },
{'name': ('fuel_plugin.testing.fixture.dummy_tests.'
'test_versioning.TestVersioning.'
'test_simple_fake_second'),
'available_since_release': '2015.2-6.1', },
]
}
needed_test_set = self._find_needed_test_set(
expected['test_set']['id']
)
self.assertEqual(needed_test_set.available_since_release,
expected['test_set']['available_since_release'])
for test in expected['tests']:
needed_test = self._find_needed_test(test['name'])
self.assertEqual(needed_test.available_since_release,
test['available_since_release'])

View File

@ -24,15 +24,13 @@ class TestDeplTagsGetter(base.BaseUnitTest):
def setUp(self):
config.init_config([])
def test_get_cluster_attrs(self):
def test_get_cluster_depl_tags(self):
expected = {
'cluster_id': 3,
'attrs': {
'deployment_tags': set(
['ha', 'rhel', 'additional_components',
'murano', 'nova_network', 'public_on_all_nodes']),
'release_version': '2015.2-1.0'
}
'depl_tags': set(
['ha', 'rhel', 'additional_components',
'murano', 'nova_network', 'public_on_all_nodes']
)
}
with requests_mock.Mocker() as m:
@ -43,6 +41,6 @@ class TestDeplTagsGetter(base.BaseUnitTest):
json=cluster['cluster_attributes'])
m.register_uri('GET', '/api/releases/3',
json=cluster['release_data'])
res = mixins._get_cluster_attrs(expected['cluster_id'])
res = mixins._get_cluster_depl_tags(expected['cluster_id'])
self.assertEqual(res, expected['attrs'])
self.assertEqual(res, expected['depl_tags'])

View File

@ -213,18 +213,6 @@ function syncdb {
}
function cleardb {
local SERVER_SETTINGS=$1
local RUN_CLEARDB="\
ostf-server \
--debug
--clear-db
--config-file $SERVER_SETTINGS"
tox -evenv -- $RUN_CLEARDB > /dev/null
}
function run_integration_tests {
echo "Starting integration tests"
@ -241,7 +229,6 @@ function run_integration_tests {
create_ostf_conf $config $artifacts
cleardb $config
syncdb $config
# run tests