Browse Source

Move files out of the namespace package

Move the public API out of oslo.db to oslo_db. Retain the ability to
import from the old namespace package for backwards compatibility for
this release cycle.

Blueprint: drop-namespace-packages

Change-Id: Ie96b482b9fbcb1d85203ad35bb65c1f43e912a44
changes/87/138787/12
Doug Hellmann 8 years ago committed by Roman Podoliaka
parent
commit
7063585c60
  1. 2
      .testr.conf
  2. 4
      doc/source/api/api.rst
  3. 4
      doc/source/api/concurrency.rst
  4. 4
      doc/source/api/exception.rst
  5. 4
      doc/source/api/options.rst
  6. 2
      doc/source/api/sqlalchemy/index.rst
  7. 4
      doc/source/api/sqlalchemy/migration.rst
  8. 4
      doc/source/api/sqlalchemy/models.rst
  9. 4
      doc/source/api/sqlalchemy/provision.rst
  10. 4
      doc/source/api/sqlalchemy/session.rst
  11. 4
      doc/source/api/sqlalchemy/test_base.rst
  12. 4
      doc/source/api/sqlalchemy/test_migrations.rst
  13. 4
      doc/source/api/sqlalchemy/utils.rst
  14. 26
      oslo/db/__init__.py
  15. 216
      oslo/db/api.py
  16. 68
      oslo/db/concurrency.py
  17. 160
      oslo/db/exception.py
  18. 229
      oslo/db/options.py
  19. 23
      oslo/db/sqlalchemy/compat/__init__.py
  20. 17
      oslo/db/sqlalchemy/compat/utils.py
  21. 349
      oslo/db/sqlalchemy/exc_filters.py
  22. 165
      oslo/db/sqlalchemy/migration.py
  23. 18
      oslo/db/sqlalchemy/migration_cli/__init__.py
  24. 115
      oslo/db/sqlalchemy/models.py
  25. 494
      oslo/db/sqlalchemy/provision.py
  26. 834
      oslo/db/sqlalchemy/session.py
  27. 132
      oslo/db/sqlalchemy/test_base.py
  28. 600
      oslo/db/sqlalchemy/test_migrations.py
  29. 999
      oslo/db/sqlalchemy/utils.py
  30. 0
      oslo_db/__init__.py
  31. 0
      oslo_db/_i18n.py
  32. 229
      oslo_db/api.py
  33. 81
      oslo_db/concurrency.py
  34. 173
      oslo_db/exception.py
  35. 220
      oslo_db/options.py
  36. 0
      oslo_db/sqlalchemy/__init__.py
  37. 30
      oslo_db/sqlalchemy/compat/__init__.py
  38. 2
      oslo_db/sqlalchemy/compat/engine_connect.py
  39. 2
      oslo_db/sqlalchemy/compat/handle_error.py
  40. 26
      oslo_db/sqlalchemy/compat/utils.py
  41. 358
      oslo_db/sqlalchemy/exc_filters.py
  42. 160
      oslo_db/sqlalchemy/migration.py
  43. 0
      oslo_db/sqlalchemy/migration_cli/README.rst
  44. 0
      oslo_db/sqlalchemy/migration_cli/__init__.py
  45. 4
      oslo_db/sqlalchemy/migration_cli/ext_alembic.py
  46. 0
      oslo_db/sqlalchemy/migration_cli/ext_base.py
  47. 8
      oslo_db/sqlalchemy/migration_cli/ext_migrate.py
  48. 0
      oslo_db/sqlalchemy/migration_cli/manager.py
  49. 128
      oslo_db/sqlalchemy/models.py
  50. 507
      oslo_db/sqlalchemy/provision.py
  51. 847
      oslo_db/sqlalchemy/session.py
  52. 127
      oslo_db/sqlalchemy/test_base.py
  53. 613
      oslo_db/sqlalchemy/test_migrations.py
  54. 1012
      oslo_db/sqlalchemy/utils.py
  55. 0
      oslo_db/tests/__init__.py
  56. 0
      oslo_db/tests/base.py
  57. 0
      oslo_db/tests/old_import_api/__init__.py
  58. 53
      oslo_db/tests/old_import_api/base.py
  59. 0
      oslo_db/tests/old_import_api/sqlalchemy/__init__.py
  60. 68
      oslo_db/tests/old_import_api/sqlalchemy/test_engine_connect.py
  61. 833
      oslo_db/tests/old_import_api/sqlalchemy/test_exc_filters.py
  62. 194
      oslo_db/tests/old_import_api/sqlalchemy/test_handle_error.py
  63. 2
      oslo_db/tests/old_import_api/sqlalchemy/test_migrate_cli.py
  64. 174
      oslo_db/tests/old_import_api/sqlalchemy/test_migration_common.py
  65. 0
      oslo_db/tests/old_import_api/sqlalchemy/test_migrations.py
  66. 0
      oslo_db/tests/old_import_api/sqlalchemy/test_models.py
  67. 2
      oslo_db/tests/old_import_api/sqlalchemy/test_options.py
  68. 554
      oslo_db/tests/old_import_api/sqlalchemy/test_sqlalchemy.py
  69. 1093
      oslo_db/tests/old_import_api/sqlalchemy/test_utils.py
  70. 6
      oslo_db/tests/old_import_api/test_api.py
  71. 8
      oslo_db/tests/old_import_api/test_concurrency.py
  72. 61
      oslo_db/tests/old_import_api/test_warning.py
  73. 0
      oslo_db/tests/old_import_api/utils.py
  74. 0
      oslo_db/tests/sqlalchemy/__init__.py
  75. 4
      oslo_db/tests/sqlalchemy/test_engine_connect.py
  76. 12
      oslo_db/tests/sqlalchemy/test_exc_filters.py
  77. 8
      oslo_db/tests/sqlalchemy/test_handle_error.py
  78. 222
      oslo_db/tests/sqlalchemy/test_migrate_cli.py
  79. 8
      oslo_db/tests/sqlalchemy/test_migration_common.py
  80. 309
      oslo_db/tests/sqlalchemy/test_migrations.py
  81. 146
      oslo_db/tests/sqlalchemy/test_models.py
  82. 127
      oslo_db/tests/sqlalchemy/test_options.py
  83. 40
      oslo_db/tests/sqlalchemy/test_sqlalchemy.py
  84. 18
      oslo_db/tests/sqlalchemy/test_utils.py
  85. 177
      oslo_db/tests/test_api.py
  86. 108
      oslo_db/tests/test_concurrency.py
  87. 40
      oslo_db/tests/utils.py
  88. 9
      setup.cfg
  89. 5
      tools/run_cross_tests.sh
  90. 2
      tox.ini

2
.testr.conf

@ -2,6 +2,6 @@
test_command=OS_STDOUT_CAPTURE=${OS_STDOUT_CAPTURE:-1} \
OS_STDERR_CAPTURE=${OS_STDERR_CAPTURE:-1} \
OS_TEST_TIMEOUT=${OS_TEST_TIMEOUT:-60} \
${PYTHON:-python} -m subunit.run discover -t ./ ./tests $LISTOPT $IDOPTION
${PYTHON:-python} -m subunit.run discover -t ./ ./oslo_db/tests $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list

4
doc/source/api/api.rst

@ -1,8 +1,8 @@
=============
oslo.db.api
oslo_db.api
=============
.. automodule:: oslo.db.api
.. automodule:: oslo_db.api
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/concurrency.rst

@ -1,8 +1,8 @@
=====================
oslo.db.concurrency
oslo_db.concurrency
=====================
.. automodule:: oslo.db.concurrency
.. automodule:: oslo_db.concurrency
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/exception.rst

@ -1,8 +1,8 @@
===================
oslo.db.exception
oslo_db.exception
===================
.. automodule:: oslo.db.exception
.. automodule:: oslo_db.exception
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/options.rst

@ -1,8 +1,8 @@
=================
oslo.db.options
oslo_db.options
=================
.. automodule:: oslo.db.options
.. automodule:: oslo_db.options
:members:
:undoc-members:
:show-inheritance:

2
doc/source/api/sqlalchemy/index.rst

@ -1,5 +1,5 @@
====================
oslo.db.sqlalchemy
oslo_db.sqlalchemy
====================
.. toctree::

4
doc/source/api/sqlalchemy/migration.rst

@ -1,8 +1,8 @@
==============================
oslo.db.sqlalchemy.migration
oslo_db.sqlalchemy.migration
==============================
.. automodule:: oslo.db.sqlalchemy.migration
.. automodule:: oslo_db.sqlalchemy.migration
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/sqlalchemy/models.rst

@ -1,8 +1,8 @@
===========================
oslo.db.sqlalchemy.models
oslo_db.sqlalchemy.models
===========================
.. automodule:: oslo.db.sqlalchemy.models
.. automodule:: oslo_db.sqlalchemy.models
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/sqlalchemy/provision.rst

@ -1,8 +1,8 @@
==============================
oslo.db.sqlalchemy.provision
oslo_db.sqlalchemy.provision
==============================
.. automodule:: oslo.db.sqlalchemy.provision
.. automodule:: oslo_db.sqlalchemy.provision
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/sqlalchemy/session.rst

@ -1,8 +1,8 @@
============================
oslo.db.sqlalchemy.session
oslo_db.sqlalchemy.session
============================
.. automodule:: oslo.db.sqlalchemy.session
.. automodule:: oslo_db.sqlalchemy.session
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/sqlalchemy/test_base.rst

@ -1,8 +1,8 @@
==============================
oslo.db.sqlalchemy.test_base
oslo_db.sqlalchemy.test_base
==============================
.. automodule:: oslo.db.sqlalchemy.test_base
.. automodule:: oslo_db.sqlalchemy.test_base
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/sqlalchemy/test_migrations.rst

@ -1,8 +1,8 @@
====================================
oslo.db.sqlalchemy.test_migrations
oslo_db.sqlalchemy.test_migrations
====================================
.. automodule:: oslo.db.sqlalchemy.test_migrations
.. automodule:: oslo_db.sqlalchemy.test_migrations
:members:
:undoc-members:
:show-inheritance:

4
doc/source/api/sqlalchemy/utils.rst

@ -1,8 +1,8 @@
==========================
oslo.db.sqlalchemy.utils
oslo_db.sqlalchemy.utils
==========================
.. automodule:: oslo.db.sqlalchemy.utils
.. automodule:: oslo_db.sqlalchemy.utils
:members:
:undoc-members:
:show-inheritance:

26
oslo/db/__init__.py

@ -0,0 +1,26 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import warnings
def deprecated():
new_name = __name__.replace('.', '_')
warnings.warn(
('The oslo namespace package is deprecated. Please use %s instead.' %
new_name),
DeprecationWarning,
stacklevel=3,
)
deprecated()

216
oslo/db/api.py

@ -1,4 +1,3 @@
# Copyright (c) 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -13,217 +12,4 @@
# License for the specific language governing permissions and limitations
# under the License.
"""
=================================
Multiple DB API backend support.
=================================
A DB backend module should implement a method named 'get_backend' which
takes no arguments. The method can return any object that implements DB
API methods.
"""
import logging
import threading
import time
from oslo.utils import importutils
import six
from oslo.db._i18n import _LE
from oslo.db import exception
from oslo.db import options
LOG = logging.getLogger(__name__)
def safe_for_db_retry(f):
"""Indicate api method as safe for re-connection to database.
Database connection retries will be enabled for the decorated api method.
Database connection failure can have many causes, which can be temporary.
In such cases retry may increase the likelihood of connection.
Usage::
@safe_for_db_retry
def api_method(self):
self.engine.connect()
:param f: database api method.
:type f: function.
"""
f.__dict__['enable_retry'] = True
return f
class wrap_db_retry(object):
"""Decorator class. Retry db.api methods, if DBConnectionError() raised.
Retry decorated db.api methods. If we enabled `use_db_reconnect`
in config, this decorator will be applied to all db.api functions,
marked with @safe_for_db_retry decorator.
Decorator catches DBConnectionError() and retries function in a
loop until it succeeds, or until maximum retries count will be reached.
Keyword arguments:
:param retry_interval: seconds between transaction retries
:type retry_interval: int
:param max_retries: max number of retries before an error is raised
:type max_retries: int
:param inc_retry_interval: determine increase retry interval or not
:type inc_retry_interval: bool
:param max_retry_interval: max interval value between retries
:type max_retry_interval: int
"""
def __init__(self, retry_interval, max_retries, inc_retry_interval,
max_retry_interval):
super(wrap_db_retry, self).__init__()
self.retry_interval = retry_interval
self.max_retries = max_retries
self.inc_retry_interval = inc_retry_interval
self.max_retry_interval = max_retry_interval
def __call__(self, f):
@six.wraps(f)
def wrapper(*args, **kwargs):
next_interval = self.retry_interval
remaining = self.max_retries
while True:
try:
return f(*args, **kwargs)
except exception.DBConnectionError as e:
if remaining == 0:
LOG.exception(_LE('DB exceeded retry limit.'))
raise exception.DBError(e)
if remaining != -1:
remaining -= 1
LOG.exception(_LE('DB connection error.'))
# NOTE(vsergeyev): We are using patched time module, so
# this effectively yields the execution
# context to another green thread.
time.sleep(next_interval)
if self.inc_retry_interval:
next_interval = min(
next_interval * 2,
self.max_retry_interval
)
return wrapper
class DBAPI(object):
"""Initialize the chosen DB API backend.
After initialization API methods is available as normal attributes of
``DBAPI`` subclass. Database API methods are supposed to be called as
DBAPI instance methods.
:param backend_name: name of the backend to load
:type backend_name: str
:param backend_mapping: backend name -> module/class to load mapping
:type backend_mapping: dict
:default backend_mapping: None
:param lazy: load the DB backend lazily on the first DB API method call
:type lazy: bool
:default lazy: False
:keyword use_db_reconnect: retry DB transactions on disconnect or not
:type use_db_reconnect: bool
:keyword retry_interval: seconds between transaction retries
:type retry_interval: int
:keyword inc_retry_interval: increase retry interval or not
:type inc_retry_interval: bool
:keyword max_retry_interval: max interval value between retries
:type max_retry_interval: int
:keyword max_retries: max number of retries before an error is raised
:type max_retries: int
"""
def __init__(self, backend_name, backend_mapping=None, lazy=False,
**kwargs):
self._backend = None
self._backend_name = backend_name
self._backend_mapping = backend_mapping or {}
self._lock = threading.Lock()
if not lazy:
self._load_backend()
self.use_db_reconnect = kwargs.get('use_db_reconnect', False)
self.retry_interval = kwargs.get('retry_interval', 1)
self.inc_retry_interval = kwargs.get('inc_retry_interval', True)
self.max_retry_interval = kwargs.get('max_retry_interval', 10)
self.max_retries = kwargs.get('max_retries', 20)
def _load_backend(self):
with self._lock:
if not self._backend:
# Import the untranslated name if we don't have a mapping
backend_path = self._backend_mapping.get(self._backend_name,
self._backend_name)
LOG.debug('Loading backend %(name)r from %(path)r',
{'name': self._backend_name,
'path': backend_path})
backend_mod = importutils.import_module(backend_path)
self._backend = backend_mod.get_backend()
def __getattr__(self, key):
if not self._backend:
self._load_backend()
attr = getattr(self._backend, key)
if not hasattr(attr, '__call__'):
return attr
# NOTE(vsergeyev): If `use_db_reconnect` option is set to True, retry
# DB API methods, decorated with @safe_for_db_retry
# on disconnect.
if self.use_db_reconnect and hasattr(attr, 'enable_retry'):
attr = wrap_db_retry(
retry_interval=self.retry_interval,
max_retries=self.max_retries,
inc_retry_interval=self.inc_retry_interval,
max_retry_interval=self.max_retry_interval)(attr)
return attr
@classmethod
def from_config(cls, conf, backend_mapping=None, lazy=False):
"""Initialize DBAPI instance given a config instance.
:param conf: oslo.config config instance
:type conf: oslo.config.cfg.ConfigOpts
:param backend_mapping: backend name -> module/class to load mapping
:type backend_mapping: dict
:param lazy: load the DB backend lazily on the first DB API method call
:type lazy: bool
"""
conf.register_opts(options.database_opts, 'database')
return cls(backend_name=conf.database.backend,
backend_mapping=backend_mapping,
lazy=lazy,
use_db_reconnect=conf.database.use_db_reconnect,
retry_interval=conf.database.db_retry_interval,
inc_retry_interval=conf.database.db_inc_retry_interval,
max_retry_interval=conf.database.db_max_retry_interval,
max_retries=conf.database.db_max_retries)
from oslo_db.api import * # noqa

68
oslo/db/concurrency.py

@ -1,4 +1,3 @@
# Copyright 2014 Mirantis.inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -13,69 +12,4 @@
# License for the specific language governing permissions and limitations
# under the License.
import copy
import logging
import threading
from oslo.config import cfg
from oslo.db._i18n import _LE
from oslo.db import api
LOG = logging.getLogger(__name__)
tpool_opts = [
cfg.BoolOpt('use_tpool',
default=False,
deprecated_name='dbapi_use_tpool',
deprecated_group='DEFAULT',
help='Enable the experimental use of thread pooling for '
'all DB API calls'),
]
class TpoolDbapiWrapper(object):
"""DB API wrapper class.
This wraps the oslo DB API with an option to be able to use eventlet's
thread pooling. Since the CONF variable may not be loaded at the time
this class is instantiated, we must look at it on the first DB API call.
"""
def __init__(self, conf, backend_mapping):
self._db_api = None
self._backend_mapping = backend_mapping
self._conf = conf
self._conf.register_opts(tpool_opts, 'database')
self._lock = threading.Lock()
@property
def _api(self):
if not self._db_api:
with self._lock:
if not self._db_api:
db_api = api.DBAPI.from_config(
conf=self._conf, backend_mapping=self._backend_mapping)
if self._conf.database.use_tpool:
try:
from eventlet import tpool
except ImportError:
LOG.exception(_LE("'eventlet' is required for "
"TpoolDbapiWrapper."))
raise
self._db_api = tpool.Proxy(db_api)
else:
self._db_api = db_api
return self._db_api
def __getattr__(self, key):
return getattr(self._api, key)
def list_opts():
"""Returns a list of oslo.config options available in this module.
:returns: a list of (group_name, opts) tuples
"""
return [('database', copy.deepcopy(tpool_opts))]
from oslo_db.concurrency import * # noqa

160
oslo/db/exception.py

@ -1,5 +1,3 @@
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
@ -14,160 +12,4 @@
# License for the specific language governing permissions and limitations
# under the License.
"""DB related custom exceptions.
Custom exceptions intended to determine the causes of specific database
errors. This module provides more generic exceptions than the database-specific
driver libraries, and so users of oslo.db can catch these no matter which
database the application is using. Most of the exceptions are wrappers. Wrapper
exceptions take an original exception as positional argument and keep it for
purposes of deeper debug.
Example::
try:
statement(arg)
except sqlalchemy.exc.OperationalError as e:
raise DBDuplicateEntry(e)
This is useful to determine more specific error cases further at execution,
when you need to add some extra information to an error message. Wrapper
exceptions takes care about original error message displaying to not to loose
low level cause of an error. All the database api exceptions wrapped into
the specific exceptions provided belove.
Please use only database related custom exceptions with database manipulations
with `try/except` statement. This is required for consistent handling of
database errors.
"""
import six
from oslo.db._i18n import _
class DBError(Exception):
"""Base exception for all custom database exceptions.
:kwarg inner_exception: an original exception which was wrapped with
DBError or its subclasses.
"""
def __init__(self, inner_exception=None):
self.inner_exception = inner_exception
super(DBError, self).__init__(six.text_type(inner_exception))
class DBDuplicateEntry(DBError):
"""Duplicate entry at unique column error.
Raised when made an attempt to write to a unique column the same entry as
existing one. :attr: `columns` available on an instance of the exception
and could be used at error handling::
try:
instance_type_ref.save()
except DBDuplicateEntry as e:
if 'colname' in e.columns:
# Handle error.
:kwarg columns: a list of unique columns have been attempted to write a
duplicate entry.
:type columns: list
:kwarg value: a value which has been attempted to write. The value will
be None, if we can't extract it for a particular database backend. Only
MySQL and PostgreSQL 9.x are supported right now.
"""
def __init__(self, columns=None, inner_exception=None, value=None):
self.columns = columns or []
self.value = value
super(DBDuplicateEntry, self).__init__(inner_exception)
class DBReferenceError(DBError):
"""Foreign key violation error.
:param table: a table name in which the reference is directed.
:type table: str
:param constraint: a problematic constraint name.
:type constraint: str
:param key: a broken reference key name.
:type key: str
:param key_table: a table name which contains the key.
:type key_table: str
"""
def __init__(self, table, constraint, key, key_table,
inner_exception=None):
self.table = table
self.constraint = constraint
self.key = key
self.key_table = key_table
super(DBReferenceError, self).__init__(inner_exception)
class DBDeadlock(DBError):
"""Database dead lock error.
Deadlock is a situation that occurs when two or more different database
sessions have some data locked, and each database session requests a lock
on the data that another, different, session has already locked.
"""
def __init__(self, inner_exception=None):
super(DBDeadlock, self).__init__(inner_exception)
class DBInvalidUnicodeParameter(Exception):
"""Database unicode error.
Raised when unicode parameter is passed to a database
without encoding directive.
"""
message = _("Invalid Parameter: "
"Encoding directive wasn't provided.")
class DbMigrationError(DBError):
"""Wrapped migration specific exception.
Raised when migrations couldn't be completed successfully.
"""
def __init__(self, message=None):
super(DbMigrationError, self).__init__(message)
class DBConnectionError(DBError):
"""Wrapped connection specific exception.
Raised when database connection is failed.
"""
pass
class InvalidSortKey(Exception):
"""A sort key destined for database query usage is invalid."""
message = _("Sort key supplied was not valid.")
class ColumnError(Exception):
"""Error raised when no column or an invalid column is found."""
class BackendNotAvailable(Exception):
"""Error raised when a particular database backend is not available
within a test suite.
"""
from oslo_db.exception import * # noqa

229
oslo/db/options.py

@ -1,220 +1,15 @@
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
# All Rights Reserved.
#
# http://www.apache.org/licenses/LICENSE-2.0
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo.config import cfg
database_opts = [
cfg.StrOpt('sqlite_db',
deprecated_group='DEFAULT',
default='oslo.sqlite',
help='The file name to use with SQLite.'),
cfg.BoolOpt('sqlite_synchronous',
deprecated_group='DEFAULT',
default=True,
help='If True, SQLite uses synchronous mode.'),
cfg.StrOpt('backend',
default='sqlalchemy',
deprecated_name='db_backend',
deprecated_group='DEFAULT',
help='The back end to use for the database.'),
cfg.StrOpt('connection',
help='The SQLAlchemy connection string to use to connect to '
'the database.',
secret=True,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_connection',
group='DATABASE'),
cfg.DeprecatedOpt('connection',
group='sql'), ]),
cfg.StrOpt('slave_connection',
secret=True,
help='The SQLAlchemy connection string to use to connect to the'
' slave database.'),
cfg.StrOpt('mysql_sql_mode',
default='TRADITIONAL',
help='The SQL mode to be used for MySQL sessions. '
'This option, including the default, overrides any '
'server-set SQL mode. To use whatever SQL mode '
'is set by the server configuration, '
'set this to no value. Example: mysql_sql_mode='),
cfg.IntOpt('idle_timeout',
default=3600,
deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_idle_timeout',
group='DATABASE'),
cfg.DeprecatedOpt('idle_timeout',
group='sql')],
help='Timeout before idle SQL connections are reaped.'),
cfg.IntOpt('min_pool_size',
default=1,
deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_min_pool_size',
group='DATABASE')],
help='Minimum number of SQL connections to keep open in a '
'pool.'),
cfg.IntOpt('max_pool_size',
deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_pool_size',
group='DATABASE')],
help='Maximum number of SQL connections to keep open in a '
'pool.'),
cfg.IntOpt('max_retries',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_retries',
group='DATABASE')],
help='Maximum number of database connection retries '
'during startup. Set to -1 to specify an infinite '
'retry count.'),
cfg.IntOpt('retry_interval',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval',
group='DEFAULT'),
cfg.DeprecatedOpt('reconnect_interval',
group='DATABASE')],
help='Interval between retries of opening a SQL connection.'),
cfg.IntOpt('max_overflow',
deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow',
group='DEFAULT'),
cfg.DeprecatedOpt('sqlalchemy_max_overflow',
group='DATABASE')],
help='If set, use this value for max_overflow with '
'SQLAlchemy.'),
cfg.IntOpt('connection_debug',
default=0,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug',
group='DEFAULT')],
help='Verbosity of SQL debugging information: 0=None, '
'100=Everything.'),
cfg.BoolOpt('connection_trace',
default=False,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace',
group='DEFAULT')],
help='Add Python stack traces to SQL as comment strings.'),
cfg.IntOpt('pool_timeout',
deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout',
group='DATABASE')],
help='If set, use this value for pool_timeout with '
'SQLAlchemy.'),
cfg.BoolOpt('use_db_reconnect',
default=False,
help='Enable the experimental use of database reconnect '
'on connection lost.'),
cfg.IntOpt('db_retry_interval',
default=1,
help='Seconds between database connection retries.'),
cfg.BoolOpt('db_inc_retry_interval',
default=True,
help='If True, increases the interval between database '
'connection retries up to db_max_retry_interval.'),
cfg.IntOpt('db_max_retry_interval',
default=10,
help='If db_inc_retry_interval is set, the '
'maximum seconds between database connection retries.'),
cfg.IntOpt('db_max_retries',
default=20,
help='Maximum database connection retries before error is '
'raised. Set to -1 to specify an infinite retry '
'count.'),
]
def set_defaults(conf, connection=None, sqlite_db=None,
max_pool_size=None, max_overflow=None,
pool_timeout=None):
"""Set defaults for configuration variables.
Overrides default options values.
:param conf: Config instance specified to set default options in it. Using
of instances instead of a global config object prevents conflicts between
options declaration.
:type conf: oslo.config.cfg.ConfigOpts instance.
:keyword connection: SQL connection string.
Valid SQLite URL forms are:
* sqlite:///:memory: (or, sqlite://)
* sqlite:///relative/path/to/file.db
* sqlite:////absolute/path/to/file.db
:type connection: str
:keyword sqlite_db: path to SQLite database file.
:type sqlite_db: str
:keyword max_pool_size: maximum connections pool size. The size of the pool
to be maintained, defaults to 5, will be used if value of the parameter is
`None`. This is the largest number of connections that will be kept
persistently in the pool. Note that the pool begins with no connections;
once this number of connections is requested, that number of connections
will remain.
:type max_pool_size: int
:default max_pool_size: None
:keyword max_overflow: The maximum overflow size of the pool. When the
number of checked-out connections reaches the size set in pool_size,
additional connections will be returned up to this limit. When those
additional connections are returned to the pool, they are disconnected and
discarded. It follows then that the total number of simultaneous
connections the pool will allow is pool_size + max_overflow, and the total
number of "sleeping" connections the pool will allow is pool_size.
max_overflow can be set to -1 to indicate no overflow limit; no limit will
be placed on the total number of concurrent connections. Defaults to 10,
will be used if value of the parameter in `None`.
:type max_overflow: int
:default max_overflow: None
:keyword pool_timeout: The number of seconds to wait before giving up on
returning a connection. Defaults to 30, will be used if value of the
parameter is `None`.
:type pool_timeout: int
:default pool_timeout: None
"""
conf.register_opts(database_opts, group='database')
if connection is not None:
conf.set_default('connection', connection, group='database')
if sqlite_db is not None:
conf.set_default('sqlite_db', sqlite_db, group='database')
if max_pool_size is not None:
conf.set_default('max_pool_size', max_pool_size, group='database')
if max_overflow is not None:
conf.set_default('max_overflow', max_overflow, group='database')
if pool_timeout is not None:
conf.set_default('pool_timeout', pool_timeout, group='database')
def list_opts():
"""Returns a list of oslo.config options available in the library.
The returned list includes all oslo.config options which may be registered
at runtime by the library.
Each element of the list is a tuple. The first element is the name of the
group under which the list of elements in the second element will be
registered. A group name of None corresponds to the [DEFAULT] group in
config files.
The purpose of this is to allow tools like the Oslo sample config file
generator to discover the options exposed to users by this library.
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
:returns: a list of (group_name, opts) tuples
"""
return [('database', copy.deepcopy(database_opts))]
from oslo_db.options import * # noqa

23
oslo/db/sqlalchemy/compat/__init__.py

@ -1,3 +1,5 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@ -9,22 +11,7 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""compatiblity extensions for SQLAlchemy versions.
Elements within this module provide SQLAlchemy features that have been
added at some point but for which oslo.db provides a compatible versions
for previous SQLAlchemy versions.
"""
from oslo.db.sqlalchemy.compat import engine_connect as _e_conn
from oslo.db.sqlalchemy.compat import handle_error as _h_err
# trying to get: "from oslo.db.sqlalchemy import compat; compat.handle_error"
# flake8 won't let me import handle_error directly
engine_connect = _e_conn.engine_connect
handle_error = _h_err.handle_error
handle_connect_context = _h_err.handle_connect_context
__all__ = [
'engine_connect', 'handle_error',
'handle_connect_context']
from oslo_db.sqlalchemy.compat import engine_connect # noqa
from oslo_db.sqlalchemy.compat import handle_error # noqa
from oslo_db.sqlalchemy.compat import utils # noqa

17
oslo/db/sqlalchemy/compat/utils.py

@ -1,3 +1,5 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@ -9,18 +11,5 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import sqlalchemy
_SQLA_VERSION = tuple(
int(num) if re.match(r'^\d+$', num) else num
for num in sqlalchemy.__version__.split(".")
)
sqla_100 = _SQLA_VERSION >= (1, 0, 0)
sqla_097 = _SQLA_VERSION >= (0, 9, 7)
sqla_094 = _SQLA_VERSION >= (0, 9, 4)
sqla_090 = _SQLA_VERSION >= (0, 9, 0)
sqla_08 = _SQLA_VERSION >= (0, 8)
from oslo_db.sqlalchemy.compat.utils import * # noqa

349
oslo/db/sqlalchemy/exc_filters.py

@ -1,3 +1,5 @@
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
@ -9,350 +11,5 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Define exception redefinitions for SQLAlchemy DBAPI exceptions."""
import collections
import logging
import re
from sqlalchemy import exc as sqla_exc
from oslo.db._i18n import _LE
from oslo.db import exception
from oslo.db.sqlalchemy import compat
LOG = logging.getLogger(__name__)
_registry = collections.defaultdict(
lambda: collections.defaultdict(
list
)
)
def filters(dbname, exception_type, regex):
"""Mark a function as receiving a filtered exception.
:param dbname: string database name, e.g. 'mysql'
:param exception_type: a SQLAlchemy database exception class, which
extends from :class:`sqlalchemy.exc.DBAPIError`.
:param regex: a string, or a tuple of strings, that will be processed
as matching regular expressions.
"""
def _receive(fn):
_registry[dbname][exception_type].extend(
(fn, re.compile(reg))
for reg in
((regex,) if not isinstance(regex, tuple) else regex)
)
return fn
return _receive
# NOTE(zzzeek) - for Postgresql, catch both OperationalError, as the
# actual error is
# psycopg2.extensions.TransactionRollbackError(OperationalError),
# as well as sqlalchemy.exc.DBAPIError, as SQLAlchemy will reraise it
# as this until issue #3075 is fixed.
@filters("mysql", sqla_exc.OperationalError, r"^.*\b1213\b.*Deadlock found.*")
@filters("mysql", sqla_exc.OperationalError,
r"^.*\b1205\b.*Lock wait timeout exceeded.*")
@filters("mysql", sqla_exc.InternalError, r"^.*\b1213\b.*Deadlock found.*")
@filters("postgresql", sqla_exc.OperationalError, r"^.*deadlock detected.*")
@filters("postgresql", sqla_exc.DBAPIError, r"^.*deadlock detected.*")
@filters("ibm_db_sa", sqla_exc.DBAPIError, r"^.*SQL0911N.*")
def _deadlock_error(operational_error, match, engine_name, is_disconnect):
"""Filter for MySQL or Postgresql deadlock error.
NOTE(comstud): In current versions of DB backends, Deadlock violation
messages follow the structure:
mysql+mysqldb:
(OperationalError) (1213, 'Deadlock found when trying to get lock; try '
'restarting transaction') <query_str> <query_args>
mysql+mysqlconnector:
(InternalError) 1213 (40001): Deadlock found when trying to get lock; try
restarting transaction
postgresql:
(TransactionRollbackError) deadlock detected <deadlock_details>
ibm_db_sa:
SQL0911N The current transaction has been rolled back because of a
deadlock or timeout <deadlock details>
"""
raise exception.DBDeadlock(operational_error)
@filters("mysql", sqla_exc.IntegrityError,
r"^.*\b1062\b.*Duplicate entry '(?P<value>[^']+)'"
r" for key '(?P<columns>[^']+)'.*$")
# NOTE(pkholkin): the first regex is suitable only for PostgreSQL 9.x versions
# the second regex is suitable for PostgreSQL 8.x versions
@filters("postgresql", sqla_exc.IntegrityError,
(r'^.*duplicate\s+key.*"(?P<columns>[^"]+)"\s*\n.*'
r'Key\s+\((?P<key>.*)\)=\((?P<value>.*)\)\s+already\s+exists.*$',
r"^.*duplicate\s+key.*\"(?P<columns>[^\"]+)\"\s*\n.*$"))
def _default_dupe_key_error(integrity_error, match, engine_name,
is_disconnect):
"""Filter for MySQL or Postgresql duplicate key error.
note(boris-42): In current versions of DB backends unique constraint
violation messages follow the structure:
postgres:
1 column - (IntegrityError) duplicate key value violates unique
constraint "users_c1_key"
N columns - (IntegrityError) duplicate key value violates unique
constraint "name_of_our_constraint"
mysql+mysqldb:
1 column - (IntegrityError) (1062, "Duplicate entry 'value_of_c1' for key
'c1'")
N columns - (IntegrityError) (1062, "Duplicate entry 'values joined
with -' for key 'name_of_our_constraint'")
mysql+mysqlconnector:
1 column - (IntegrityError) 1062 (23000): Duplicate entry 'value_of_c1' for
key 'c1'
N columns - (IntegrityError) 1062 (23000): Duplicate entry 'values
joined with -' for key 'name_of_our_constraint'
"""
columns = match.group('columns')
# note(vsergeyev): UniqueConstraint name convention: "uniq_t0c10c2"
# where `t` it is table name and columns `c1`, `c2`
# are in UniqueConstraint.
uniqbase = "uniq_"
if not columns.startswith(uniqbase):
if engine_name == "postgresql":
columns = [columns[columns.index("_") + 1:columns.rindex("_")]]
else:
columns = [columns]
else:
columns = columns[len(uniqbase):].split("0")[1:]
value = match.groupdict().get('value')
raise exception.DBDuplicateEntry(columns, integrity_error, value)
@filters("sqlite", sqla_exc.IntegrityError,
(r"^.*columns?(?P<columns>[^)]+)(is|are)\s+not\s+unique$",
r"^.*UNIQUE\s+constraint\s+failed:\s+(?P<columns>.+)$",
r"^.*PRIMARY\s+KEY\s+must\s+be\s+unique.*$"))
def _sqlite_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
"""Filter for SQLite duplicate key error.
note(boris-42): In current versions of DB backends unique constraint
violation messages follow the structure:
sqlite:
1 column - (IntegrityError) column c1 is not unique
N columns - (IntegrityError) column c1, c2, ..., N are not unique
sqlite since 3.7.16:
1 column - (IntegrityError) UNIQUE constraint failed: tbl.k1
N columns - (IntegrityError) UNIQUE constraint failed: tbl.k1, tbl.k2
sqlite since 3.8.2:
(IntegrityError) PRIMARY KEY must be unique
"""
columns = []
# NOTE(ochuprykov): We can get here by last filter in which there are no
# groups. Trying to access the substring that matched by
# the group will lead to IndexError. In this case just
# pass empty list to exception.DBDuplicateEntry
try:
columns = match.group('columns')
columns = [c.split('.')[-1] for c in columns.strip().split(", ")]
except IndexError:
pass
raise exception.DBDuplicateEntry(columns, integrity_error)
@filters("sqlite", sqla_exc.IntegrityError,
r"(?i).*foreign key constraint failed")
@filters("postgresql", sqla_exc.IntegrityError,
r".*on table \"(?P<table>[^\"]+)\" violates "
"foreign key constraint \"(?P<constraint>[^\"]+)\"\s*\n"
"DETAIL: Key \((?P<key>.+)\)=\(.+\) "
"is not present in table "
"\"(?P<key_table>[^\"]+)\".")
@filters("mysql", sqla_exc.IntegrityError,
r".* 'Cannot add or update a child row: "
'a foreign key constraint fails \([`"].+[`"]\.[`"](?P<table>.+)[`"], '
'CONSTRAINT [`"](?P<constraint>.+)[`"] FOREIGN KEY '
'\([`"](?P<key>.+)[`"]\) REFERENCES [`"](?P<key_table>.+)[`"] ')
def _foreign_key_error(integrity_error, match, engine_name, is_disconnect):
"""Filter for foreign key errors."""
try:
table = match.group("table")
except IndexError:
table = None
try:
constraint = match.group("constraint")
except IndexError:
constraint = None
try:
key = match.group("key")
except IndexError:
key = None
try:
key_table = match.group("key_table")
except IndexError:
key_table = None
raise exception.DBReferenceError(table, constraint, key, key_table,
integrity_error)
@filters("ibm_db_sa", sqla_exc.IntegrityError, r"^.*SQL0803N.*$")
def _db2_dupe_key_error(integrity_error, match, engine_name, is_disconnect):
"""Filter for DB2 duplicate key errors.
N columns - (IntegrityError) SQL0803N One or more values in the INSERT
statement, UPDATE statement, or foreign key update caused by a
DELETE statement are not valid because the primary key, unique
constraint or unique index identified by "2" constrains table
"NOVA.KEY_PAIRS" from having duplicate values for the index
key.
"""
# NOTE(mriedem): The ibm_db_sa integrity error message doesn't provide the
# columns so we have to omit that from the DBDuplicateEntry error.
raise exception.DBDuplicateEntry([], integrity_error)
@filters("mysql", sqla_exc.DBAPIError, r".*\b1146\b")
def _raise_mysql_table_doesnt_exist_asis(
error, match, engine_name, is_disconnect):
"""Raise MySQL error 1146 as is.
Raise MySQL error 1146 as is, so that it does not conflict with
the MySQL dialect's checking a table not existing.
"""
raise error
@filters("*", sqla_exc.OperationalError, r".*")
def _raise_operational_errors_directly_filter(operational_error,
match, engine_name,
is_disconnect):
"""Filter for all remaining OperationalError classes and apply.
Filter for all remaining OperationalError classes and apply
special rules.
"""
if is_disconnect:
# operational errors that represent disconnect
# should be wrapped
raise exception.DBConnectionError(operational_error)
else:
# NOTE(comstud): A lot of code is checking for OperationalError
# so let's not wrap it for now.
raise operational_error
@filters("mysql", sqla_exc.OperationalError, r".*\(.*(?:2002|2003|2006|2013)")
@filters("ibm_db_sa", sqla_exc.OperationalError, r".*(?:30081)")
def _is_db_connection_error(operational_error, match, engine_name,
is_disconnect):
"""Detect the exception as indicating a recoverable error on connect."""
raise exception.DBConnectionError(operational_error)
@filters("*", sqla_exc.DBAPIError, r".*")
def _raise_for_remaining_DBAPIError(error, match, engine_name, is_disconnect):
"""Filter for remaining DBAPIErrors.
Filter for remaining DBAPIErrors and wrap if they represent
a disconnect error.
"""
if is_disconnect:
raise exception.DBConnectionError(error)
else:
LOG.exception(
_LE('DBAPIError exception wrapped from %s') % error)
raise exception.DBError(error)
@filters('*', UnicodeEncodeError, r".*")
def _raise_for_unicode_encode(error, match, engine_name, is_disconnect):
raise exception.DBInvalidUnicodeParameter()
@filters("*", Exception, r".*")
def _raise_for_all_others(error, match, engine_name, is_disconnect):
LOG.exception(_LE('DB exception wrapped.'))
raise exception.DBError(error)
def handler(context):
"""Iterate through available filters and invoke those which match.
The first one which raises wins. The order in which the filters
are attempted is sorted by specificity - dialect name or "*",
exception class per method resolution order (``__mro__``).
Method resolution order is used so that filter rules indicating a
more specific exception class are attempted first.
"""
def _dialect_registries(engine):
if engine.dialect.name in _registry:
yield _registry[engine.dialect.name]
if '*' in _registry:
yield _registry['*']
for per_dialect in _dialect_registries(context.engine):
for exc in (
context.sqlalchemy_exception,
context.original_exception):
for super_ in exc.__class__.__mro__:
if super_ in per_dialect:
regexp_reg = per_dialect[super_]
for fn, regexp in regexp_reg:
match = regexp.match(exc.args[0])
if match:
try:
fn(
exc,
match,
context.engine.dialect.name,
context.is_disconnect)
except exception.DBConnectionError:
context.is_disconnect = True
raise
def register_engine(engine):
compat.handle_error(engine, handler)
def handle_connect_error(engine):
"""Handle connect error.
Provide a special context that will allow on-connect errors
to be treated within the filtering context.
This routine is dependent on SQLAlchemy version, as version 1.0.0
provides this functionality natively.
"""
with compat.handle_connect_context(handler, engine):
return engine.connect()
from oslo_db.sqlalchemy.exc_filters import * # noqa

165
oslo/db/sqlalchemy/migration.py

@ -1,160 +1,15 @@
# coding=utf-8
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the