Add unit tests and remove oslo_db
This commit adds unit tests and removes oslo_db dependency. Change-Id: I735044fbebefcea2f73b6431e55b9c67f5508c17
This commit is contained in:
parent
93d3324c9f
commit
e22975ab47
@ -1,3 +1,5 @@
|
||||
include coverage2sql/migrations/alembic.ini
|
||||
|
||||
include AUTHORS
|
||||
include ChangeLog
|
||||
exclude .gitignore
|
||||
|
@ -38,9 +38,12 @@ def setup():
|
||||
|
||||
pool_size = CONF.database.max_pool_size
|
||||
pool_recycle = CONF.database.idle_timeout
|
||||
engine = create_engine(db_uri,
|
||||
pool_size=pool_size,
|
||||
pool_recycle=pool_recycle)
|
||||
if not pool_size and not pool_recycle:
|
||||
engine = create_engine(db_uri)
|
||||
else:
|
||||
engine = create_engine(db_uri,
|
||||
pool_size=pool_size,
|
||||
pool_recycle=pool_recycle)
|
||||
global Session
|
||||
Session = sessionmaker(bind=engine)
|
||||
|
||||
|
68
coverage2sql/migrations/alembic.ini
Normal file
68
coverage2sql/migrations/alembic.ini
Normal file
@ -0,0 +1,68 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = mysql://coverage:coverage@127.0.0.1/coverage
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
@ -15,12 +15,15 @@
|
||||
|
||||
from __future__ import with_statement
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
|
||||
from coverage2sql.db import api as db_api
|
||||
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
coverage2sql_config = config.coverage2sql_config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
@ -50,9 +53,15 @@ def run_migrations_offline():
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True)
|
||||
kwargs = dict()
|
||||
if coverage2sql_config.database.connection:
|
||||
kwargs['url'] = coverage2sql_config.database.connection
|
||||
elif coverage2sql_config.database.engine:
|
||||
kwargs['dialect_name'] = coverage2sql_config.database.engine
|
||||
else:
|
||||
kwargs['url'] = config.get_main_option("sqlalchemy.url")
|
||||
|
||||
context.configure(**kwargs)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
@ -65,10 +74,7 @@ def run_migrations_online():
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
connectable = db_api.get_session().get_bind()
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
|
@ -27,13 +27,21 @@ down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
from alembic import context
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
migration_context = context.get_context()
|
||||
if migration_context.dialect.name == 'sqlite':
|
||||
id_type = sa.Integer
|
||||
else:
|
||||
id_type = sa.BigInteger
|
||||
|
||||
op.create_table('coverages',
|
||||
sa.Column('id', sa.BigInteger(), primary_key=True),
|
||||
sa.Column('id', id_type, autoincrement=True,
|
||||
primary_key=True),
|
||||
sa.Column('project_name', sa.String(256), nullable=False),
|
||||
sa.Column('coverage_rate', sa.Float()),
|
||||
sa.Column('report_time', sa.DateTime()),
|
||||
|
@ -17,7 +17,6 @@ import copy
|
||||
import sys
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_db import options
|
||||
from pbr import version
|
||||
|
||||
from coverage2sql.db import api
|
||||
@ -31,15 +30,20 @@ SHELL_OPTS = [
|
||||
help='project name of the coverage files'),
|
||||
cfg.StrOpt('coverage_file', positional=True,
|
||||
help='A coverage file to put into the database'),
|
||||
cfg.StrOpt('connection'),
|
||||
]
|
||||
|
||||
DATABASE_OPTS = [
|
||||
cfg.StrOpt('connection', default=None),
|
||||
cfg.IntOpt('max_pool_size', default=20),
|
||||
cfg.IntOpt('idle_timeout', default=3600),
|
||||
]
|
||||
|
||||
_version_ = version.VersionInfo('coverage2sql').version_string()
|
||||
|
||||
|
||||
def cli_opts():
|
||||
for opt in SHELL_OPTS:
|
||||
CONF.register_cli_opt(opt)
|
||||
CONF.register_cli_opts(SHELL_OPTS)
|
||||
CONF.register_cli_opts(DATABASE_OPTS, 'database')
|
||||
|
||||
|
||||
def list_opts():
|
||||
@ -51,10 +55,9 @@ def list_opts():
|
||||
return [('DEFAULT', copy.deepcopy(SHELL_OPTS))]
|
||||
|
||||
|
||||
def parse_args(argv, default_config_files=None):
|
||||
cfg.CONF.register_cli_opts(options.database_opts, group='database')
|
||||
cfg.CONF(argv[1:], project='coverage2sql', version=_version_,
|
||||
default_config_files=default_config_files)
|
||||
def parse_args(argv):
|
||||
CONF(argv[1:], project='coverage2sql', version=_version_)
|
||||
CONF(default_config_files=[CONF.config_file])
|
||||
|
||||
|
||||
def process_results(project_name=".", coverage_rate=0.0):
|
||||
|
@ -1,23 +1,37 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslotest import base
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
import testtools
|
||||
|
||||
|
||||
class TestCase(base.BaseTestCase):
|
||||
class TestCase(testtools.TestCase):
|
||||
|
||||
"""Test case base class for all unit tests."""
|
||||
true = ('True', 'true', '1', 'yes')
|
||||
|
||||
def setUp(self):
|
||||
super(TestCase, self).setUp()
|
||||
if os.environ.get('OS_STDOUT_CAPTURE') in self.true:
|
||||
stdout = self.useFixture(fixtures.StringStream('stdout')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
|
||||
if os.environ.get('OS_STDERR_CAPTURE') in self.true:
|
||||
stderr = self.useFixture(fixtures.StringStream('stderr')).stream
|
||||
self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))
|
||||
if (os.environ.get('OS_LOG_CAPTURE') != 'False' and
|
||||
os.environ.get('OS_LOG_CAPTURE') != '0'):
|
||||
self.useFixture(fixtures.LoggerFixture(nuke_handlers=False,
|
||||
level=None))
|
||||
|
196
coverage2sql/tests/coverage2sql_fixtures.py
Normal file
196
coverage2sql/tests/coverage2sql_fixtures.py
Normal file
@ -0,0 +1,196 @@
|
||||
# Copyright 2015 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
import fixtures as fix
|
||||
from oslo_concurrency.fixture import lockutils as lock_fixture
|
||||
from oslo_concurrency import lockutils
|
||||
from oslo_config import cfg
|
||||
from oslo_config import fixture as config_fixture
|
||||
from six.moves.urllib import parse as urlparse
|
||||
|
||||
from coverage2sql.db import api as db_api
|
||||
from coverage2sql.migrations import cli
|
||||
from coverage2sql.tests import db_test_utils
|
||||
|
||||
DB_SCHEMA = ""
|
||||
|
||||
|
||||
def execute_cmd(cmd=None):
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT, shell=True)
|
||||
output = proc.communicate()[0]
|
||||
if proc.returncode != 0:
|
||||
raise Exception('Command failed with output:\n%s' % output)
|
||||
return output
|
||||
|
||||
|
||||
class Database(fix.Fixture):
|
||||
def _cache_schema(self):
|
||||
global DB_SCHEMA
|
||||
if not DB_SCHEMA:
|
||||
db_test_utils.run_migration("head")
|
||||
|
||||
def cleanup(self):
|
||||
engine = db_api.get_session().get_bind()
|
||||
engine.dispose()
|
||||
pass
|
||||
|
||||
def reset(self):
|
||||
self._cache_schema()
|
||||
engine = db_api.get_session().get_bind()
|
||||
engine.dispose()
|
||||
engine.connect()
|
||||
|
||||
def setUp(self):
|
||||
super(Database, self).setUp()
|
||||
self.reset()
|
||||
self.addCleanup(self.cleanup)
|
||||
|
||||
|
||||
class MySQLConfFixture(config_fixture.Config):
|
||||
"""Fixture to manage global conf settings."""
|
||||
def _drop_db(self):
|
||||
addr = urlparse.urlparse(self.url)
|
||||
database = addr.path.strip('/')
|
||||
loc_pieces = addr.netloc.split('@')
|
||||
host = loc_pieces[1]
|
||||
auth_pieces = loc_pieces[0].split(':')
|
||||
user = auth_pieces[0]
|
||||
password = ""
|
||||
if len(auth_pieces) > 1:
|
||||
if auth_pieces[1].strip():
|
||||
password = "-p\"%s\"" % auth_pieces[1]
|
||||
sql = ("drop database if exists %(database)s; create "
|
||||
"database %(database)s;") % {'database': database}
|
||||
cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s "
|
||||
"-e \"%(sql)s\"") % {'user': user, 'password': password,
|
||||
'host': host, 'sql': sql}
|
||||
execute_cmd(cmd)
|
||||
|
||||
def setUp(self):
|
||||
super(MySQLConfFixture, self).setUp()
|
||||
self.register_opt(cfg.IntOpt('max_pool_size', default=20),
|
||||
group='database')
|
||||
self.register_opt(cfg.IntOpt('idle_timeout', default=3600),
|
||||
group='database')
|
||||
self.register_opt(cfg.StrOpt('connection', default=''),
|
||||
group='database')
|
||||
self.url = db_test_utils.get_connect_string("mysql")
|
||||
self.set_default('connection', self.url, group='database')
|
||||
lockutils.set_defaults(lock_path='/tmp')
|
||||
self._drop_db()
|
||||
|
||||
|
||||
class PostgresConfFixture(config_fixture.Config):
|
||||
"""Fixture to manage global conf settings."""
|
||||
def _drop_db(self):
|
||||
addr = urlparse.urlparse(self.url)
|
||||
database = addr.path.strip('/')
|
||||
loc_pieces = addr.netloc.split('@')
|
||||
host = loc_pieces[1]
|
||||
|
||||
auth_pieces = loc_pieces[0].split(':')
|
||||
user = auth_pieces[0]
|
||||
password = ""
|
||||
if len(auth_pieces) > 1:
|
||||
password = auth_pieces[1].strip()
|
||||
pg_file = os.path.join(os.path.expanduser('~'), '.pgpass')
|
||||
if os.path.isfile(pg_file):
|
||||
tmp_path = os.path.join('/tmp', 'pgpass')
|
||||
shutil.move(pg_file, tmp_path)
|
||||
self.addCleanup(shutil.move, tmp_path, pg_file)
|
||||
|
||||
pg_pass = '*:*:*:%(user)s:%(password)s' % {
|
||||
'user': user, 'password': password}
|
||||
with open(pg_file, 'w') as fd:
|
||||
fd.write(pg_pass)
|
||||
os.chmod(pg_file, 384)
|
||||
# note(boris-42): We must create and drop database, we can't
|
||||
# drop database which we have connected to, so for such
|
||||
# operations there is a special database template1.
|
||||
sqlcmd = ('psql -w -U %(user)s -h %(host)s -c'
|
||||
' "%(sql)s" -d template1')
|
||||
|
||||
# NOTE(masayukig): We terminate sessions because some closed
|
||||
# sessions are remaining until here
|
||||
sql = ("select pg_terminate_backend(pg_stat_activity.pid) "
|
||||
"from pg_stat_activity "
|
||||
"where pg_stat_activity.datname = '%(database)s';")
|
||||
sql = sql % {'database': database}
|
||||
term_session = sqlcmd % {'user': user, 'host': host,
|
||||
'sql': sql}
|
||||
execute_cmd(term_session)
|
||||
|
||||
sql = ("drop database if exists %(database)s;")
|
||||
sql = sql % {'database': database}
|
||||
droptable = sqlcmd % {'user': user, 'host': host,
|
||||
'sql': sql}
|
||||
execute_cmd(droptable)
|
||||
sql = ("create database %(database)s;")
|
||||
sql = sql % {'database': database}
|
||||
createtable = sqlcmd % {'user': user, 'host': host,
|
||||
'sql': sql}
|
||||
execute_cmd(createtable)
|
||||
|
||||
def setUp(self):
|
||||
super(PostgresConfFixture, self).setUp()
|
||||
self.register_opt(cfg.StrOpt('connection', default=''),
|
||||
group='database')
|
||||
self.register_opt(cfg.IntOpt('max_pool_size', default=20),
|
||||
group='database')
|
||||
self.register_opt(cfg.IntOpt('idle_timeout', default=3600),
|
||||
group='database')
|
||||
self.register_opts(cli.MIGRATION_OPTS)
|
||||
self.url = db_test_utils.get_connect_string("postgres")
|
||||
self.set_default('connection', self.url, group='database')
|
||||
self.set_default('disable_microsecond_data_migration', False)
|
||||
lockutils.set_defaults(lock_path='/tmp')
|
||||
self._drop_db()
|
||||
|
||||
|
||||
class SqliteConfFixture(config_fixture.Config):
|
||||
"""Fixture to manage global conf settings."""
|
||||
def _drop_db(self):
|
||||
if os.path.exists(db_test_utils.SQLITE_TEST_DATABASE_PATH):
|
||||
os.remove(db_test_utils.SQLITE_TEST_DATABASE_PATH)
|
||||
|
||||
def setUp(self):
|
||||
super(SqliteConfFixture, self).setUp()
|
||||
|
||||
self.register_opt(cfg.StrOpt('connection', default=''),
|
||||
group='database')
|
||||
self.register_opt(cfg.IntOpt('max_pool_size', default=None),
|
||||
group='database')
|
||||
self.register_opt(cfg.IntOpt('idle_timeout', default=None),
|
||||
group='database')
|
||||
self.register_opts(cli.MIGRATION_OPTS)
|
||||
self.url = db_test_utils.get_connect_string("sqlite")
|
||||
self.set_default('connection', self.url, group='database')
|
||||
self.set_default('disable_microsecond_data_migration', False)
|
||||
lockutils.set_defaults(lock_path='/tmp')
|
||||
self._drop_db()
|
||||
self.addCleanup(self.cleanup)
|
||||
|
||||
def cleanup(self):
|
||||
self._drop_db()
|
||||
|
||||
|
||||
class LockFixture(lock_fixture.LockFixture):
|
||||
def __init__(self, name):
|
||||
lockutils.set_defaults(lock_path='/tmp')
|
||||
super(LockFixture, self).__init__(name, 'coverage-db-lock-')
|
0
coverage2sql/tests/db/__init__.py
Normal file
0
coverage2sql/tests/db/__init__.py
Normal file
49
coverage2sql/tests/db/test_api.py
Normal file
49
coverage2sql/tests/db/test_api.py
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import testscenarios
|
||||
|
||||
from coverage2sql.db import api
|
||||
from coverage2sql.tests import base
|
||||
from coverage2sql.tests import coverage2sql_fixtures as fixtures
|
||||
from coverage2sql.tests import db_test_utils
|
||||
|
||||
load_tests = testscenarios.load_tests_apply_scenarios
|
||||
|
||||
|
||||
class TestDatabaseAPI(base.TestCase):
|
||||
|
||||
scenarios = [
|
||||
('mysql', {'dialect': 'mysql'}),
|
||||
('postgresql', {'dialect': 'postgres'}),
|
||||
('sqlite', {'dialect': 'sqlite'})
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
super(TestDatabaseAPI, self).setUp()
|
||||
self.useFixture(fixtures.LockFixture(self.dialect))
|
||||
if not db_test_utils.is_backend_avail(self.dialect):
|
||||
raise self.skipTest('%s is not available' % self.dialect)
|
||||
if self.dialect == 'mysql':
|
||||
self.useFixture(fixtures.MySQLConfFixture())
|
||||
elif self.dialect == 'postgres':
|
||||
self.useFixture(fixtures.PostgresConfFixture())
|
||||
elif self.dialect == 'sqlite':
|
||||
self.useFixture(fixtures.SqliteConfFixture())
|
||||
self.useFixture(fixtures.Database())
|
||||
|
||||
def test_create_coverage(self):
|
||||
cov = api.create_coverage('foo_project')
|
||||
self.assertTrue(cov is not None)
|
||||
self.assertEqual(cov.project_name, 'foo_project')
|
89
coverage2sql/tests/db_test_utils.py
Normal file
89
coverage2sql/tests/db_test_utils.py
Normal file
@ -0,0 +1,89 @@
|
||||
# Copyright 2015 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
from alembic import command
|
||||
from alembic import config as alembic_config
|
||||
from oslo_config import cfg
|
||||
import sqlalchemy
|
||||
|
||||
from coverage2sql.db import api as db_api
|
||||
|
||||
CONF = cfg.CONF
|
||||
SQLITE_TEST_DATABASE_PATH = tempfile.mkstemp('coverage2sql.db')[1]
|
||||
|
||||
script_location = os.path.join(os.path.dirname(os.path.dirname(
|
||||
os.path.abspath(__file__))), 'migrations')
|
||||
|
||||
|
||||
def get_connect_string(backend,
|
||||
user="openstack_citest",
|
||||
passwd="openstack_citest",
|
||||
database="openstack_citest"):
|
||||
"""Generate a db uri for testing locally.
|
||||
|
||||
Try to get a connection with a very specific set of values, if we get
|
||||
these then we'll run the tests, otherwise they are skipped
|
||||
"""
|
||||
if backend == "mysql":
|
||||
backend = "mysql+pymysql"
|
||||
elif backend == "postgres":
|
||||
backend = "postgresql+psycopg2"
|
||||
|
||||
if backend == "sqlite":
|
||||
return "sqlite:///" + SQLITE_TEST_DATABASE_PATH
|
||||
|
||||
return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s"
|
||||
% {'backend': backend, 'user': user, 'passwd': passwd,
|
||||
'database': database})
|
||||
|
||||
|
||||
def is_backend_avail(backend,
|
||||
user="openstack_citest",
|
||||
passwd="openstack_citest",
|
||||
database="openstack_citest"):
|
||||
try:
|
||||
if backend == "mysql":
|
||||
connect_uri = get_connect_string("mysql", user=user,
|
||||
passwd=passwd, database=database)
|
||||
elif backend == "postgres":
|
||||
connect_uri = get_connect_string("postgres", user=user,
|
||||
passwd=passwd, database=database)
|
||||
elif backend == "sqlite":
|
||||
return True
|
||||
engine = sqlalchemy.create_engine(connect_uri)
|
||||
connection = engine.connect()
|
||||
except Exception:
|
||||
# intentionally catch all to handle exceptions even if we don't
|
||||
# have any backend code loaded.
|
||||
return False
|
||||
else:
|
||||
connection.close()
|
||||
engine.dispose()
|
||||
return True
|
||||
|
||||
|
||||
def run_migration(target, engine=None):
|
||||
engine = engine or db_api.get_session().get_bind()
|
||||
engine.connect()
|
||||
config = alembic_config.Config(os.path.join(script_location,
|
||||
'alembic.ini'))
|
||||
config.set_main_option('script_location', 'coverage2sql:migrations')
|
||||
config.coverage2sql_config = CONF
|
||||
with engine.begin() as connection:
|
||||
config.attributes['connection'] = connection
|
||||
command.upgrade(config, target)
|
||||
engine.dispose()
|
0
coverage2sql/tests/migrations/__init__.py
Normal file
0
coverage2sql/tests/migrations/__init__.py
Normal file
17
coverage2sql/tests/migrations/test_migrations.conf
Normal file
17
coverage2sql/tests/migrations/test_migrations.conf
Normal file
@ -0,0 +1,17 @@
|
||||
[unit_tests]
|
||||
# Set up any number of databases to test concurrently.
|
||||
# The "name" used in the test is the config variable key.
|
||||
|
||||
#mysql=mysql+mysqldb://openstack_citest:openstack_citest@localhost/openstack_citest
|
||||
#postgresql=postgresql+psycopg2://openstack_citest:openstack_citest@localhost/openstack_citest
|
||||
|
||||
[migration_dbs]
|
||||
# Migration DB details are listed separately as they can't be connected to
|
||||
# concurrently. These databases can't be the same as above
|
||||
|
||||
#mysql=mysql+mysqldb://user:pass@localhost/test_migrations
|
||||
#postgresql=postgresql+psycopg2://user:pass@localhost/test_migrations
|
||||
|
||||
[walk_style]
|
||||
snake_walk=yes
|
||||
downgrade=yes
|
@ -1,28 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
test_coverage2sql
|
||||
----------------------------------
|
||||
|
||||
Tests for `coverage2sql` module.
|
||||
"""
|
||||
|
||||
from coverage2sql.tests import base
|
||||
|
||||
|
||||
class TestCoverage2sql(base.TestCase):
|
||||
|
||||
def test_something(self):
|
||||
pass
|
121
etc/coverage2sql.conf.sample
Normal file
121
etc/coverage2sql.conf.sample
Normal file
@ -0,0 +1,121 @@
|
||||
[DEFAULT]
|
||||
|
||||
#
|
||||
# From subunit2sql.shell
|
||||
#
|
||||
|
||||
# Location of run artifacts (string value)
|
||||
#artifacts = <None>
|
||||
|
||||
# Dict of metadata about the run(s) (dict value)
|
||||
#run_meta = <None>
|
||||
|
||||
# list of subunit files to put into the database (multi valued)
|
||||
#subunit_files =
|
||||
|
||||
|
||||
[database]
|
||||
|
||||
#
|
||||
# From oslo.db
|
||||
#
|
||||
|
||||
# The back end to use for the database. (string value)
|
||||
# Deprecated group/name - [DEFAULT]/db_backend
|
||||
#backend = sqlalchemy
|
||||
|
||||
# The SQLAlchemy connection string to use to connect to the database.
|
||||
# (string value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_connection
|
||||
# Deprecated group/name - [DATABASE]/sql_connection
|
||||
# Deprecated group/name - [sql]/connection
|
||||
#connection = <None>
|
||||
connection = mysql://coverage:coverage@127.0.0.1/coverage
|
||||
#connection = mysql://query:query@logstash.openstack.org/subunit2sql
|
||||
|
||||
# Verbosity of SQL debugging information: 0=None, 100=Everything.
|
||||
# (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_connection_debug
|
||||
#connection_debug = 0
|
||||
|
||||
# Add Python stack traces to SQL as comment strings. (boolean value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_connection_trace
|
||||
#connection_trace = false
|
||||
|
||||
# If True, increases the interval between database connection retries
|
||||
# up to db_max_retry_interval. (boolean value)
|
||||
#db_inc_retry_interval = true
|
||||
|
||||
# Maximum database connection retries before error is raised. Set to
|
||||
# -1 to specify an infinite retry count. (integer value)
|
||||
#db_max_retries = 20
|
||||
|
||||
# If db_inc_retry_interval is set, the maximum seconds between
|
||||
# database connection retries. (integer value)
|
||||
#db_max_retry_interval = 10
|
||||
|
||||
# Seconds between database connection retries. (integer value)
|
||||
#db_retry_interval = 1
|
||||
|
||||
# Timeout before idle SQL connections are reaped. (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_idle_timeout
|
||||
# Deprecated group/name - [DATABASE]/sql_idle_timeout
|
||||
# Deprecated group/name - [sql]/idle_timeout
|
||||
#idle_timeout = 3600
|
||||
|
||||
# If set, use this value for max_overflow with SQLAlchemy. (integer
|
||||
# value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_max_overflow
|
||||
# Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow
|
||||
#max_overflow = <None>
|
||||
|
||||
# Maximum number of SQL connections to keep open in a pool. (integer
|
||||
# value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_max_pool_size
|
||||
# Deprecated group/name - [DATABASE]/sql_max_pool_size
|
||||
#max_pool_size = <None>
|
||||
|
||||
# Maximum db connection retries during startup. Set to -1 to specify
|
||||
# an infinite retry count. (integer value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_max_retries
|
||||
# Deprecated group/name - [DATABASE]/sql_max_retries
|
||||
#max_retries = 10
|
||||
|
||||
# Minimum number of SQL connections to keep open in a pool. (integer
|
||||
# value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_min_pool_size
|
||||
# Deprecated group/name - [DATABASE]/sql_min_pool_size
|
||||
#min_pool_size = 1
|
||||
|
||||
# The SQL mode to be used for MySQL sessions. This option, including
|
||||
# the default, overrides any server-set SQL mode. To use whatever SQL
|
||||
# mode is set by the server configuration, set this to no value.
|
||||
# Example: mysql_sql_mode= (string value)
|
||||
#mysql_sql_mode = TRADITIONAL
|
||||
|
||||
# If set, use this value for pool_timeout with SQLAlchemy. (integer
|
||||
# value)
|
||||
# Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout
|
||||
#pool_timeout = <None>
|
||||
|
||||
# Interval between retries of opening a SQL connection. (integer
|
||||
# value)
|
||||
# Deprecated group/name - [DEFAULT]/sql_retry_interval
|
||||
# Deprecated group/name - [DATABASE]/reconnect_interval
|
||||
#retry_interval = 10
|
||||
|
||||
# The SQLAlchemy connection string to use to connect to the slave
|
||||
# database. (string value)
|
||||
#slave_connection = <None>
|
||||
|
||||
# The file name to use with SQLite. (string value)
|
||||
# Deprecated group/name - [DEFAULT]/sqlite_db
|
||||
#sqlite_db = oslo.sqlite
|
||||
|
||||
# If True, SQLite uses synchronous mode. (boolean value)
|
||||
# Deprecated group/name - [DEFAULT]/sqlite_synchronous
|
||||
#sqlite_synchronous = true
|
||||
|
||||
# Enable the experimental use of database reconnect on connection
|
||||
# lost. (boolean value)
|
||||
#use_db_reconnect = false
|
@ -7,3 +7,5 @@ SQLAlchemy>=0.8.2
|
||||
alembic>=0.4.1
|
||||
oslo.config>=1.4.0.0a3
|
||||
Babel>=1.3,!=2.3.0,!=2.3.1,!=2.3.2,!=2.3.3 # BSD
|
||||
stevedore>=1.3.0
|
||||
six>=1.5.2
|
||||
|
@ -5,13 +5,16 @@
|
||||
hacking<0.11,>=0.10.0
|
||||
|
||||
coverage>=3.6
|
||||
fixtures>=0.3.14
|
||||
python-subunit>=0.0.18
|
||||
sphinx!=1.2.0,!=1.3b1,<1.3,>=1.1.2
|
||||
oslosphinx>=2.5.0 # Apache-2.0
|
||||
PyMySql
|
||||
oslotest>=1.10.0 # Apache-2.0
|
||||
testrepository>=0.0.18
|
||||
testscenarios>=0.4
|
||||
testtools>=1.4.0
|
||||
PyMySql
|
||||
psycopg2
|
||||
os-testr
|
||||
reno>=0.1.1 # Apache2
|
||||
oslo.concurrency>=3.5.0
|
||||
|
9
tox.ini
9
tox.ini
@ -35,9 +35,14 @@ commands = oslo_debug_helper {posargs}
|
||||
commands = sphinx-build -a -E -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
|
||||
|
||||
[flake8]
|
||||
# E123, E125 skipped as they are invalid PEP-8.
|
||||
# E123 skipped because it is ignored by default in the default pep8
|
||||
# E125 is deliberately excluded. See https://github.com/jcrocholl/pep8/issues/126
|
||||
# E129 skipped because it is too limiting when combined with other rules
|
||||
# H305 skipped because it is inconsistent between python versions
|
||||
# H402 skipped because some docstrings aren't sentences
|
||||
# E711 skipped because sqlalchemy filter() requires using == instead of is
|
||||
|
||||
show-source = True
|
||||
ignore = E123,E125
|
||||
ignore = E123,E125,E129,H305,H402,E711
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,releasenotes
|
||||
|
Loading…
Reference in New Issue
Block a user