Use oslo.db to generate db engine

Berbican has been historically using own implementation with sqlalchemy
to connect to database but this causes some feature gaps with the other
services using oslo.db to generate database engine.

This replaces the own implementation by oslo.db's one so that barbican
can also leverage the features implemented in the shared library.

With this change the deprecated database options are removed, because
the deprecated options were already removed from oslo.db.

Change-Id: I10fe4ab04996885e8aff7fab8ace78a6fe7eb6e7
This commit is contained in:
Takashi Kajinami 2024-04-12 12:32:35 +09:00
parent b6edfda344
commit 2d405a8bfd
6 changed files with 31 additions and 165 deletions

View File

@ -21,6 +21,7 @@ import logging
import os
from oslo_config import cfg
from oslo_db import options as db_options
from oslo_log import log
from oslo_middleware import cors
from oslo_policy import opts as policy_opts
@ -63,68 +64,6 @@ host_opts = [
"'http://localhost:9311'")),
]
core_db_opts = [
cfg.StrOpt('connection',
default="sqlite:///barbican.sqlite",
secret=True,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection',
group='DEFAULT')],
help=u._("SQLAlchemy connection string for the reference "
"implementation registry server. Any valid "
"SQLAlchemy connection string is fine. See: "
"http://www.sqlalchemy.org/docs/05/reference/"
"sqlalchemy/connections.html#sqlalchemy."
"create_engine. Note: For absolute addresses, use "
"'////' slashes after 'sqlite:'.")),
cfg.IntOpt('connection_recycle_time', default=3600,
deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout',
group='DEFAULT')],
help=u._("Period in seconds after which SQLAlchemy should "
"reestablish its connection to the database. MySQL "
"uses a default `wait_timeout` of 8 hours, after "
"which it will drop idle connections. This can result "
"in 'MySQL Gone Away' exceptions. If you notice this, "
"you can lower this value to ensure that SQLAlchemy "
"reconnects before MySQL can drop the connection.")),
cfg.IntOpt('max_retries', default=60,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries',
group='DEFAULT')],
help=u._("Maximum number of database connection retries "
"during startup. Set to -1 to specify an infinite "
"retry count.")),
cfg.IntOpt('retry_interval', default=1,
deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval',
group='DEFAULT')],
help=u._("Interval between retries of opening a SQL "
"connection.")),
cfg.IntOpt('max_pool_size', default=5,
deprecated_opts=[cfg.DeprecatedOpt('sql_pool_size',
group='DEFAULT')],
help=u._("Size of pool used by SQLAlchemy. This is the largest "
"number of connections that will be kept persistently "
"in the pool. Can be set to 0 to indicate no size "
"limit. To disable pooling, use a NullPool with "
"sql_pool_class instead. Comment out to allow "
"SQLAlchemy to select the default.")),
cfg.IntOpt('max_overflow', default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_pool_max_overflow',
group='DEFAULT')],
help=u._("# The maximum overflow size of the pool used by "
"SQLAlchemy. When the number of checked-out "
"connections reaches the size set in max_pool_size, "
"additional connections will be returned up to this "
"limit. It follows then that the total number of "
"simultaneous connections the pool will allow is "
"max_pool_size + max_overflow. Can be set to -1 to "
"indicate no overflow limit, so no limit will be "
"placed on the total number of concurrent "
"connections. Comment out to allow SQLAlchemy to "
"select the default.")),
]
db_opt_group = cfg.OptGroup(name='database',
title='Database Options')
db_opts = [
cfg.BoolOpt('db_auto_create', default=False,
help=u._("Create the Barbican database on service startup.")),
@ -255,7 +194,6 @@ def list_opts():
yield None, common_opts
yield None, host_opts
yield None, db_opts
yield db_opt_group, core_db_opts
yield retry_opt_group, retry_opts
yield queue_opt_group, queue_opts
yield ks_queue_opt_group, ks_queue_opts
@ -288,14 +226,13 @@ def parse_args(conf, args=None, usage=None, default_config_files=None):
def new_config():
conf = cfg.ConfigOpts()
log.register_options(conf)
db_options.set_defaults(conf, connection='sqlite:///barbican.sqlite')
conf.register_opts(context_opts)
conf.register_opts(common_opts)
conf.register_opts(host_opts)
conf.register_opts(db_opts)
conf.register_group(db_opt_group)
conf.register_opts(core_db_opts, group=db_opt_group)
conf.register_group(retry_opt_group)
conf.register_opts(retry_opts, group=retry_opt_group)

View File

@ -26,7 +26,7 @@ import sys
import time
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import session
from oslo_db.sqlalchemy import enginefacade
from oslo_utils import timeutils
from oslo_utils import uuidutils
import sqlalchemy
@ -167,25 +167,9 @@ def get_session():
def _get_engine(engine):
if not engine:
connection = CONF.database.connection
if not connection:
raise exception.BarbicanException(
u._('No SQL connection configured'))
# TODO(jfwood):
# connection_dict = sqlalchemy.engine.url.make_url(_CONNECTION)
engine_args = {
'connection_recycle_time': CONF.database.connection_recycle_time
}
if CONF.database.max_pool_size:
engine_args['max_pool_size'] = CONF.database.max_pool_size
if CONF.database.max_overflow:
engine_args['max_overflow'] = CONF.database.max_overflow
db_connection = None
try:
engine = _create_engine(connection, **engine_args)
engine = _create_engine()
db_connection = engine.connect()
except Exception as err:
msg = u._(
@ -209,6 +193,10 @@ def _get_engine(engine):
return engine
def _create_engine():
return enginefacade.writer.get_engine()
def model_query(model, *args, **kwargs):
"""Query helper for simpler session usage."""
session = kwargs.get('session')
@ -239,22 +227,6 @@ def is_db_connection_error(args):
return False
def _create_engine(connection, **engine_args):
LOG.debug('Sql connection: please check "sql_connection" property in '
'barbican configuration file; Args: %s', engine_args)
engine = session.create_engine(connection, **engine_args)
# TODO(jfwood): if 'mysql' in connection_dict.drivername:
# TODO(jfwood): sqlalchemy.event.listen(_ENGINE, 'checkout',
# TODO(jfwood): ping_listener)
# Wrap the engine's connect method with a retry decorator.
engine.connect = wrap_db_error(engine.connect)
return engine
def _auto_generate_tables(engine, tables):
if tables and 'alembic_version' in tables:
# Upgrade the database to the latest version.
@ -269,35 +241,6 @@ def _auto_generate_tables(engine, tables):
commands.stamp()
def wrap_db_error(f):
"""Retry DB connection. Copied from nova and modified."""
def _wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError as e:
if not is_db_connection_error(e.args[0]):
raise
remaining_attempts = CONF.database.max_retries
while True:
LOG.warning('SQL connection failed. %d attempts left.',
remaining_attempts)
remaining_attempts -= 1
time.sleep(CONF.database.retry_interval)
try:
return f(*args, **kwargs)
except sqlalchemy.exc.OperationalError as e:
if (remaining_attempts <= 0 or not
is_db_connection_error(e.args[0])):
raise
except sqlalchemy.exc.DBAPIError:
raise
except sqlalchemy.exc.DBAPIError:
raise
_wrap.__name__ = f.__name__
return _wrap
def clean_paging_values(offset_arg=0, limit_arg=CONF.default_limit_paging):
"""Cleans and safely limits raw paging offset/limit values."""
offset_arg = offset_arg or 0

View File

@ -14,7 +14,6 @@ from unittest import mock
from alembic import script as alembic_script
from oslo_config import cfg
import sqlalchemy
from barbican.common import config
from barbican.common import exception
@ -194,28 +193,6 @@ class WhenTestingBaseRepository(database_utils.RepositoryTestCase):
str(exception_result))
class WhenTestingWrapDbError(utils.BaseTestCase):
def setUp(self):
super(WhenTestingWrapDbError, self).setUp()
repositories.CONF.set_override("max_retries", 0, "database")
repositories.CONF.set_override("retry_interval", 0, "database")
@mock.patch('barbican.model.repositories.is_db_connection_error')
def test_should_raise_operational_error_is_connection_error(
self, mock_is_db_error):
mock_is_db_error.return_value = True
@repositories.wrap_db_error
def test_function():
raise sqlalchemy.exc.OperationalError(
'statement', 'params', 'orig')
self.assertRaises(
sqlalchemy.exc.OperationalError,
test_function)
class WhenTestingGetEnginePrivate(utils.BaseTestCase):
def setUp(self):
@ -240,6 +217,7 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase):
'Error configuring registry database with supplied '
'database connection. Got error: Abort!',
str(exception_result))
engine.connect.assert_called_once_with()
@mock.patch('barbican.model.repositories._create_engine')
def test_should_complete_with_no_alembic_create_default_configs(
@ -253,12 +231,7 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase):
repositories._get_engine(None)
engine.connect.assert_called_once_with()
mock_create_engine.assert_called_once_with(
'connection',
connection_recycle_time=3600,
max_pool_size=repositories.CONF.database.max_pool_size,
max_overflow=repositories.CONF.database.max_overflow
)
mock_create_engine.assert_called_once_with()
@mock.patch('barbican.model.repositories._create_engine')
def test_should_complete_with_no_alembic_create_pool_configs(
@ -277,12 +250,7 @@ class WhenTestingGetEnginePrivate(utils.BaseTestCase):
repositories._get_engine(None)
engine.connect.assert_called_once_with()
mock_create_engine.assert_called_once_with(
'connection',
connection_recycle_time=3600,
max_pool_size=22,
max_overflow=11
)
mock_create_engine.assert_called_once_with()
class WhenTestingAutoGenerateTables(utils.BaseTestCase):

View File

@ -154,7 +154,7 @@ function configure_barbican {
fi
# Set the database connection url
iniset $BARBICAN_CONF DEFAULT sql_connection `database_connection_url barbican`
iniset $BARBICAN_CONF database connection `database_connection_url barbican`
# Disable auto-migration when deploying Barbican
iniset $BARBICAN_CONF DEFAULT db_auto_create False

View File

@ -10,6 +10,7 @@ namespace = barbican.plugin.secret_store.kmip
namespace = barbican.plugin.secret_store.vault
namespace = keystonemiddleware.audit
namespace = keystonemiddleware.auth_token
namespace = oslo.db
namespace = oslo.log
namespace = oslo.messaging
namespace = oslo.middleware.cors

View File

@ -0,0 +1,17 @@
---
features:
- |
Now Barbican uses oslo.db for database connection. The features implemented
in oslo.db can be now leveraged in Barbican.
upgrade:
- |
The following deprecated database options were effectively removed. Use
the equivalent oslo.db library options instead.
- ``[DEFAULT] sql_connection``
- ``[DEFAULT] sql_idle_timeout``
- ``[DEFAULT] sql_max_retries``
- ``[DEFAULT] sql_retry_interval``
- ``[DEFAULT] sql_pool_size``
- ``[DEFAULT] sql_pool_max_overflow``