Get test suite to full pass with SQLAlchemy 2.0
Remaining issues encountered when running with SQLAlchemy 2.0 for real: * Never call str() on a URL and expect it to be meaningful anymore. The password is aggressively obfuscated now (users absolultely wouldn't let us leave it as is) * More utilities and fixtures that were calling begin() within a block that would have already begun * isnot is now called is_not; mocking "isnot" leads into too many weird compat layers * ORM InstrumentedAttribute and internals use __slots__ now, mock seems to not be able to patch methods. Ideally these tests would use a comparator subclass or something * Connection.connection.connection is now called driver_connection, SQLAlchemy keeps the old name available however oslo.db test suite does not appear to tolerate the deprecation warning emitted, so add a compat layer * mapper() is fully removed from 2.0, not sure if there is another not-yet-committed gerrit that removes mapper() [1] https://docs.sqlalchemy.org/en/20/core/engines.html#sqlalchemy.create_engine.params.pool_pre_ping [2] https://docs.sqlalchemy.org/en/20/changelog/changelog_20.html#change-2fe37eaf2295cebd3bb4ee8e5b8c575c [3] https://github.com/sqlalchemy/sqlalchemy/issues/5648 Change-Id: Ifaca67c07f008d8bc0febeecd3e200cc7ee7a4b0
This commit is contained in:
parent
da4f13e734
commit
1f003bcb0b
36
oslo_db/sqlalchemy/compat/__init__.py
Normal file
36
oslo_db/sqlalchemy/compat/__init__.py
Normal file
@ -0,0 +1,36 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from oslo_utils import versionutils
|
||||
|
||||
from sqlalchemy import __version__
|
||||
|
||||
|
||||
_vers = versionutils.convert_version_to_tuple(__version__)
|
||||
sqla_2 = _vers >= (2, )
|
||||
|
||||
|
||||
def dialect_from_exception_context(ctx):
|
||||
if sqla_2:
|
||||
# SQLAlchemy 2.0 still has context.engine, however if the
|
||||
# exception context is called in the context of a ping handler,
|
||||
# engine is not present. need to use dialect instead
|
||||
return ctx.dialect
|
||||
else:
|
||||
return ctx.engine.dialect
|
||||
|
||||
|
||||
def driver_connection(connection):
|
||||
if sqla_2:
|
||||
return connection.connection.driver_connection
|
||||
else:
|
||||
return connection.connection.connection
|
@ -164,7 +164,6 @@ class _TransactionFactory(object):
|
||||
}
|
||||
self._maker_cfg = {
|
||||
'expire_on_commit': _Default(False),
|
||||
'__autocommit': False,
|
||||
}
|
||||
self._transaction_ctx_cfg = {
|
||||
'rollback_reader_sessions': False,
|
||||
@ -468,7 +467,6 @@ class _TransactionFactory(object):
|
||||
|
||||
def _maker_args_for_conf(self, conf):
|
||||
maker_args = self._args_for_conf(self._maker_cfg, conf)
|
||||
maker_args['autocommit'] = maker_args.pop('__autocommit')
|
||||
return maker_args
|
||||
|
||||
def dispose_pool(self):
|
||||
@ -1238,9 +1236,6 @@ class LegacyEngineFacade(object):
|
||||
:param sqlite_fk: enable foreign keys in SQLite
|
||||
:type sqlite_fk: bool
|
||||
|
||||
:param autocommit: use autocommit mode for created Session instances
|
||||
:type autocommit: bool
|
||||
|
||||
:param expire_on_commit: expire session objects on commit
|
||||
:type expire_on_commit: bool
|
||||
|
||||
@ -1282,22 +1277,14 @@ class LegacyEngineFacade(object):
|
||||
|
||||
"""
|
||||
def __init__(self, sql_connection, slave_connection=None,
|
||||
sqlite_fk=False, autocommit=False,
|
||||
expire_on_commit=False, _conf=None, _factory=None, **kwargs):
|
||||
sqlite_fk=False, expire_on_commit=False, _conf=None,
|
||||
_factory=None, **kwargs):
|
||||
warnings.warn(
|
||||
"EngineFacade is deprecated; please use "
|
||||
"oslo_db.sqlalchemy.enginefacade",
|
||||
warning.OsloDBDeprecationWarning,
|
||||
stacklevel=2)
|
||||
|
||||
if autocommit is True:
|
||||
warnings.warn(
|
||||
'autocommit support will be removed in SQLAlchemy 2.0 and '
|
||||
'should not be relied on; please rework your code to remove '
|
||||
'reliance on this feature',
|
||||
warning.OsloDBDeprecationWarning,
|
||||
stacklevel=2)
|
||||
|
||||
if _factory:
|
||||
self._factory = _factory
|
||||
else:
|
||||
@ -1305,7 +1292,6 @@ class LegacyEngineFacade(object):
|
||||
|
||||
self._factory.configure(
|
||||
sqlite_fk=sqlite_fk,
|
||||
__autocommit=autocommit,
|
||||
expire_on_commit=expire_on_commit,
|
||||
**kwargs
|
||||
)
|
||||
@ -1371,7 +1357,7 @@ class LegacyEngineFacade(object):
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, conf,
|
||||
sqlite_fk=False, autocommit=False, expire_on_commit=False):
|
||||
sqlite_fk=False, expire_on_commit=False):
|
||||
"""Initialize EngineFacade using oslo.config config instance options.
|
||||
|
||||
:param conf: oslo.config config instance
|
||||
@ -1380,9 +1366,6 @@ class LegacyEngineFacade(object):
|
||||
:param sqlite_fk: enable foreign keys in SQLite
|
||||
:type sqlite_fk: bool
|
||||
|
||||
:param autocommit: use autocommit mode for created Session instances
|
||||
:type autocommit: bool
|
||||
|
||||
:param expire_on_commit: expire session objects on commit
|
||||
:type expire_on_commit: bool
|
||||
|
||||
@ -1391,5 +1374,4 @@ class LegacyEngineFacade(object):
|
||||
return cls(
|
||||
None,
|
||||
sqlite_fk=sqlite_fk,
|
||||
autocommit=autocommit,
|
||||
expire_on_commit=expire_on_commit, _conf=conf)
|
||||
|
@ -16,6 +16,7 @@
|
||||
"""Core SQLAlchemy connectivity routines.
|
||||
"""
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import logging
|
||||
import os
|
||||
@ -29,10 +30,11 @@ import sqlalchemy
|
||||
from sqlalchemy import event
|
||||
from sqlalchemy import exc
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.sql.expression import select
|
||||
from sqlalchemy import select
|
||||
|
||||
from oslo_db import exception
|
||||
|
||||
from oslo_db.sqlalchemy import compat
|
||||
from oslo_db.sqlalchemy import exc_filters
|
||||
from oslo_db.sqlalchemy import ndb
|
||||
from oslo_db.sqlalchemy import utils
|
||||
@ -57,6 +59,7 @@ def _connect_ping_listener(connection, branch):
|
||||
|
||||
Ping the server at transaction begin and transparently reconnect
|
||||
if a disconnect exception occurs.
|
||||
|
||||
"""
|
||||
if branch:
|
||||
return
|
||||
@ -94,6 +97,14 @@ def _connect_ping_listener(connection, branch):
|
||||
connection.rollback()
|
||||
|
||||
|
||||
# SQLAlchemy 2.0 is compatible here, however oslo.db's test suite
|
||||
# raises for all deprecation errors, so we have to check for 2.0
|
||||
# and wrap out a parameter that is deprecated
|
||||
if compat.sqla_2:
|
||||
_connect_ping_listener = functools.partial(
|
||||
_connect_ping_listener, branch=False)
|
||||
|
||||
|
||||
def _setup_logging(connection_debug=0):
|
||||
"""setup_logging function maps SQL debug level to Python log level.
|
||||
|
||||
@ -189,7 +200,7 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
|
||||
_vet_url(url)
|
||||
|
||||
engine_args = {
|
||||
"pool_recycle": connection_recycle_time,
|
||||
'pool_recycle': connection_recycle_time,
|
||||
'connect_args': {},
|
||||
'logging_name': logging_name
|
||||
}
|
||||
@ -198,11 +209,13 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
|
||||
|
||||
_init_connection_args(
|
||||
url, engine_args,
|
||||
max_pool_size=max_pool_size,
|
||||
max_overflow=max_overflow,
|
||||
pool_timeout=pool_timeout,
|
||||
json_serializer=json_serializer,
|
||||
json_deserializer=json_deserializer,
|
||||
dict(
|
||||
max_pool_size=max_pool_size,
|
||||
max_overflow=max_overflow,
|
||||
pool_timeout=pool_timeout,
|
||||
json_serializer=json_serializer,
|
||||
json_deserializer=json_deserializer,
|
||||
)
|
||||
)
|
||||
|
||||
engine = sqlalchemy.create_engine(url, **engine_args)
|
||||
@ -224,6 +237,7 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
|
||||
exc_filters.register_engine(engine)
|
||||
|
||||
# register engine connect handler
|
||||
|
||||
event.listen(engine, "engine_connect", _connect_ping_listener)
|
||||
|
||||
# initial connect + test
|
||||
@ -237,9 +251,16 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
|
||||
|
||||
|
||||
@utils.dispatch_for_dialect('*', multiple=True)
|
||||
def _init_connection_args(
|
||||
url, engine_args,
|
||||
max_pool_size=None, max_overflow=None, pool_timeout=None, **kw):
|
||||
def _init_connection_args(url, engine_args, kw):
|
||||
|
||||
# (zzzeek) kw is passed by reference rather than as **kw so that the
|
||||
# init_connection_args routines can modify the contents of what
|
||||
# will be passed to create_engine, including removing arguments that
|
||||
# don't apply. This allows things such as replacing QueuePool with
|
||||
# NUllPool, for example, as the latter pool would reject these parameters.
|
||||
max_pool_size = kw.get("max_pool_size", None)
|
||||
max_overflow = kw.get("max_overflow", None)
|
||||
pool_timeout = kw.get("pool_timeout", None)
|
||||
|
||||
pool_class = url.get_dialect().get_pool_class(url)
|
||||
if issubclass(pool_class, pool.QueuePool):
|
||||
@ -252,17 +273,25 @@ def _init_connection_args(
|
||||
|
||||
|
||||
@_init_connection_args.dispatch_for("sqlite")
|
||||
def _init_connection_args(url, engine_args, **kw):
|
||||
def _init_connection_args(url, engine_args, kw):
|
||||
pool_class = url.get_dialect().get_pool_class(url)
|
||||
# singletonthreadpool is used for :memory: connections;
|
||||
# replace it with StaticPool.
|
||||
if issubclass(pool_class, pool.SingletonThreadPool):
|
||||
# singletonthreadpool is used for :memory: connections;
|
||||
# replace it with StaticPool.
|
||||
engine_args["poolclass"] = pool.StaticPool
|
||||
engine_args['connect_args']['check_same_thread'] = False
|
||||
elif issubclass(pool_class, pool.QueuePool):
|
||||
# SQLAlchemy 2.0 uses QueuePool for sqlite file DBs; put NullPool
|
||||
# back to avoid compatibility issues
|
||||
kw.pop("max_pool_size", None)
|
||||
kw.pop("max_overflow", None)
|
||||
engine_args.pop("max_pool_size", None)
|
||||
engine_args.pop("max_overflow", None)
|
||||
engine_args["poolclass"] = pool.NullPool
|
||||
|
||||
|
||||
@_init_connection_args.dispatch_for("postgresql")
|
||||
def _init_connection_args(url, engine_args, **kw):
|
||||
def _init_connection_args(url, engine_args, kw):
|
||||
if 'client_encoding' not in url.query:
|
||||
# Set encoding using engine_args instead of connect_args since
|
||||
# it's supported for PostgreSQL 8.*. More details at:
|
||||
@ -273,13 +302,13 @@ def _init_connection_args(url, engine_args, **kw):
|
||||
|
||||
|
||||
@_init_connection_args.dispatch_for("mysql")
|
||||
def _init_connection_args(url, engine_args, **kw):
|
||||
def _init_connection_args(url, engine_args, kw):
|
||||
if 'charset' not in url.query:
|
||||
engine_args['connect_args']['charset'] = 'utf8'
|
||||
|
||||
|
||||
@_init_connection_args.dispatch_for("mysql+mysqlconnector")
|
||||
def _init_connection_args(url, engine_args, **kw):
|
||||
def _init_connection_args(url, engine_args, kw):
|
||||
# mysqlconnector engine (<1.0) incorrectly defaults to
|
||||
# raise_on_warnings=True
|
||||
# https://bitbucket.org/zzzeek/sqlalchemy/issue/2515
|
||||
@ -288,8 +317,7 @@ def _init_connection_args(url, engine_args, **kw):
|
||||
|
||||
|
||||
@_init_connection_args.dispatch_for("mysql+mysqldb")
|
||||
@_init_connection_args.dispatch_for("mysql+oursql")
|
||||
def _init_connection_args(url, engine_args, **kw):
|
||||
def _init_connection_args(url, engine_args, kw):
|
||||
# Those drivers require use_unicode=0 to avoid performance drop due
|
||||
# to internal usage of Python unicode objects in the driver
|
||||
# http://docs.sqlalchemy.org/en/rel_0_9/dialects/mysql.html
|
||||
@ -444,7 +472,6 @@ def _add_process_guards(engine):
|
||||
"database connection, "
|
||||
"which is being discarded and recreated.",
|
||||
{"newproc": pid, "orig": connection_record.info['pid']})
|
||||
connection_record.connection = connection_proxy.connection = None
|
||||
raise exc.DisconnectionError(
|
||||
"Connection record belongs to pid %s, "
|
||||
"attempting to check out in pid %s" %
|
||||
|
@ -241,7 +241,6 @@ class Backend(object):
|
||||
:raises: ``BackendNotAvailable`` if the backend is not available.
|
||||
|
||||
"""
|
||||
|
||||
if not self.verified:
|
||||
try:
|
||||
eng = self._ensure_backend_available(self.url)
|
||||
@ -493,7 +492,6 @@ class BackendImpl(object, metaclass=abc.ABCMeta):
|
||||
then emit a command to switch to the named database.
|
||||
|
||||
"""
|
||||
|
||||
url = utils.make_url(base_url)
|
||||
|
||||
# TODO(zzzeek): remove hasattr() conditional in favor of "url.set()"
|
||||
@ -515,16 +513,14 @@ class MySQLBackendImpl(BackendImpl):
|
||||
return "mysql+pymysql://openstack_citest:openstack_citest@localhost/"
|
||||
|
||||
def create_named_database(self, engine, ident, conditional=False):
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
if not conditional or not self.database_exists(conn, ident):
|
||||
with conn.begin():
|
||||
conn.exec_driver_sql("CREATE DATABASE %s" % ident)
|
||||
conn.exec_driver_sql("CREATE DATABASE %s" % ident)
|
||||
|
||||
def drop_named_database(self, engine, ident, conditional=False):
|
||||
with engine.connect() as conn:
|
||||
with engine.begin() as conn:
|
||||
if not conditional or self.database_exists(conn, ident):
|
||||
with conn.begin():
|
||||
conn.exec_driver_sql("DROP DATABASE %s" % ident)
|
||||
conn.exec_driver_sql("DROP DATABASE %s" % ident)
|
||||
|
||||
def database_exists(self, engine, ident):
|
||||
s = sql.text("SHOW DATABASES LIKE :ident")
|
||||
@ -585,19 +581,17 @@ class PostgresqlBackendImpl(BackendImpl):
|
||||
isolation_level="AUTOCOMMIT",
|
||||
) as conn:
|
||||
if not conditional or not self.database_exists(conn, ident):
|
||||
with conn.begin():
|
||||
conn.exec_driver_sql("CREATE DATABASE %s" % ident)
|
||||
conn.exec_driver_sql("CREATE DATABASE %s" % ident)
|
||||
|
||||
def drop_named_database(self, engine, ident, conditional=False):
|
||||
with engine.connect().execution_options(
|
||||
isolation_level="AUTOCOMMIT",
|
||||
) as conn:
|
||||
self._close_out_database_users(conn, ident)
|
||||
with conn.begin():
|
||||
if conditional:
|
||||
conn.exec_driver_sql("DROP DATABASE IF EXISTS %s" % ident)
|
||||
else:
|
||||
conn.exec_driver_sql("DROP DATABASE %s" % ident)
|
||||
if conditional:
|
||||
conn.exec_driver_sql("DROP DATABASE IF EXISTS %s" % ident)
|
||||
else:
|
||||
conn.exec_driver_sql("DROP DATABASE %s" % ident)
|
||||
|
||||
def drop_additional_objects(self, conn):
|
||||
enums = [e['name'] for e in sqlalchemy.inspect(conn).get_enums()]
|
||||
|
@ -393,8 +393,8 @@ def update_returning_pk(query, values, surrogate_key):
|
||||
mapper = inspect(entity).mapper
|
||||
session = query.session
|
||||
|
||||
bind = session.connection(mapper=mapper)
|
||||
if bind.dialect.implicit_returning:
|
||||
bind = session.connection(bind_arguments=dict(mapper=mapper))
|
||||
if bind.dialect.name == "postgresql":
|
||||
pk_strategy = _pk_strategy_returning
|
||||
elif bind.dialect.name == 'mysql' and \
|
||||
len(mapper.primary_key) == 1 and \
|
||||
|
@ -213,7 +213,7 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
|
||||
null_order_by_stmt = {
|
||||
"": None,
|
||||
"nullsfirst": sort_key_attr.is_(None),
|
||||
"nullslast": sort_key_attr.isnot(None),
|
||||
"nullslast": sort_key_attr.is_not(None),
|
||||
}[null_sort_dir]
|
||||
except KeyError:
|
||||
raise ValueError(_("Unknown sort direction, "
|
||||
@ -1016,26 +1016,29 @@ def suspend_fk_constraints_for_col_alter(
|
||||
yield
|
||||
else:
|
||||
with engine.connect() as conn:
|
||||
insp = inspect(conn)
|
||||
fks = []
|
||||
for ref_table_name in referents:
|
||||
for fk in insp.get_foreign_keys(ref_table_name):
|
||||
if not fk.get('name'):
|
||||
raise AssertionError("foreign key hasn't a name.")
|
||||
if fk['referred_table'] == table_name and \
|
||||
column_name in fk['referred_columns']:
|
||||
fk['source_table'] = ref_table_name
|
||||
if 'options' not in fk:
|
||||
fk['options'] = {}
|
||||
fks.append(fk)
|
||||
|
||||
ctx = MigrationContext.configure(conn)
|
||||
op = Operations(ctx)
|
||||
|
||||
with conn.begin():
|
||||
insp = inspect(conn)
|
||||
fks = []
|
||||
for ref_table_name in referents:
|
||||
for fk in insp.get_foreign_keys(ref_table_name):
|
||||
if not fk.get('name'):
|
||||
raise AssertionError("foreign key hasn't a name.")
|
||||
if fk['referred_table'] == table_name and \
|
||||
column_name in fk['referred_columns']:
|
||||
fk['source_table'] = ref_table_name
|
||||
if 'options' not in fk:
|
||||
fk['options'] = {}
|
||||
fks.append(fk)
|
||||
|
||||
ctx = MigrationContext.configure(conn)
|
||||
op = Operations(ctx)
|
||||
|
||||
for fk in fks:
|
||||
op.drop_constraint(
|
||||
fk['name'], fk['source_table'], type_="foreignkey")
|
||||
fk['name'],
|
||||
fk['source_table'],
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
yield
|
||||
|
||||
|
@ -357,11 +357,11 @@ class MockFacadeTest(test_base.BaseTestCase):
|
||||
maker_factories = mock.Mock(side_effect=get_maker)
|
||||
|
||||
maker_factories(
|
||||
autocommit=False, engine=engines.writer,
|
||||
engine=engines.writer,
|
||||
expire_on_commit=False)
|
||||
if self.slave_uri:
|
||||
maker_factories(
|
||||
autocommit=False, engine=engines.async_reader,
|
||||
engine=engines.async_reader,
|
||||
expire_on_commit=False)
|
||||
|
||||
yield makers
|
||||
|
@ -26,6 +26,7 @@ from sqlalchemy.orm import registry
|
||||
from sqlalchemy import sql
|
||||
|
||||
from oslo_db import exception
|
||||
from oslo_db.sqlalchemy import compat
|
||||
from oslo_db.sqlalchemy import engines
|
||||
from oslo_db.sqlalchemy import exc_filters
|
||||
from oslo_db.sqlalchemy import utils
|
||||
@ -139,18 +140,29 @@ class TestsExceptionFilter(_SQLAExceptionMatcher, test_base.BaseTestCase):
|
||||
# statement
|
||||
self.engine.connect().close()
|
||||
|
||||
with test_utils.nested(
|
||||
patches = [
|
||||
mock.patch.object(engine.dialect, "do_execute", do_execute),
|
||||
# replace the whole DBAPI rather than patching "Error"
|
||||
# as some DBAPIs might not be patchable (?)
|
||||
mock.patch.object(engine.dialect,
|
||||
"dbapi",
|
||||
mock.Mock(Error=self.Error)),
|
||||
|
||||
mock.patch.object(engine.dialect, "name", dialect_name),
|
||||
mock.patch.object(engine.dialect,
|
||||
"is_disconnect",
|
||||
lambda *args: is_disconnect)
|
||||
):
|
||||
]
|
||||
if compat.sqla_2:
|
||||
patches.append(
|
||||
mock.patch.object(
|
||||
engine.dialect,
|
||||
"loaded_dbapi",
|
||||
mock.Mock(Error=self.Error),
|
||||
)
|
||||
)
|
||||
|
||||
with test_utils.nested(*patches):
|
||||
yield
|
||||
|
||||
def _run_test(self, dialect_name, statement, raises, expected,
|
||||
@ -754,7 +766,7 @@ class TestExceptionCauseMySQLSavepoint(
|
||||
session.execute(sql.text("select 1"))
|
||||
|
||||
# close underying DB connection
|
||||
session.connection().connection.connection.close()
|
||||
compat.driver_connection(session.connection()).close()
|
||||
|
||||
# alternate approach, but same idea:
|
||||
# conn_id = session.scalar("select connection_id()")
|
||||
@ -779,7 +791,7 @@ class TestExceptionCauseMySQLSavepoint(
|
||||
session.execute(sql.text("select 1"))
|
||||
|
||||
# close underying DB connection
|
||||
session.connection().connection.connection.close()
|
||||
compat.driver_connection(session.connection()).close()
|
||||
|
||||
# alternate approach, but same idea:
|
||||
# conn_id = session.scalar("select connection_id()")
|
||||
@ -947,8 +959,8 @@ class TestDuplicate(TestsExceptionFilter):
|
||||
class TestDeadlock(TestsExceptionFilter):
|
||||
statement = ('SELECT quota_usages.created_at AS '
|
||||
'quota_usages_created_at FROM quota_usages '
|
||||
'WHERE quota_usages.project_id = %(project_id_1)s '
|
||||
'AND quota_usages.deleted = %(deleted_1)s FOR UPDATE')
|
||||
'WHERE quota_usages.project_id = :project_id_1 '
|
||||
'AND quota_usages.deleted = :deleted_1 FOR UPDATE')
|
||||
params = {
|
||||
'project_id_1': '8891d4478bbf48ad992f050cdf55e9b5',
|
||||
'deleted_1': 0
|
||||
|
@ -13,6 +13,7 @@
|
||||
import os
|
||||
from unittest import mock
|
||||
|
||||
from sqlalchemy.engine import url as sqla_url
|
||||
from sqlalchemy import exc as sa_exc
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy import schema
|
||||
@ -156,8 +157,8 @@ class AdHocURLTest(test_base.BaseTestCase):
|
||||
fixture.setUp()
|
||||
|
||||
self.assertEqual(
|
||||
str(enginefacade._context_manager._factory._writer_engine.url),
|
||||
"sqlite:///foo.db"
|
||||
enginefacade._context_manager._factory._writer_engine.url,
|
||||
sqla_url.make_url("sqlite:///foo.db")
|
||||
)
|
||||
|
||||
self.assertTrue(os.path.exists("foo.db"))
|
||||
@ -176,14 +177,14 @@ class AdHocURLTest(test_base.BaseTestCase):
|
||||
self.addCleanup(
|
||||
mysql_backend.drop_named_database, "adhoc_test"
|
||||
)
|
||||
url = str(mysql_backend.provisioned_database_url("adhoc_test"))
|
||||
url = mysql_backend.provisioned_database_url("adhoc_test")
|
||||
|
||||
fixture = test_fixtures.AdHocDbFixture(url)
|
||||
|
||||
fixture.setUp()
|
||||
|
||||
self.assertEqual(
|
||||
str(enginefacade._context_manager._factory._writer_engine.url),
|
||||
enginefacade._context_manager._factory._writer_engine.url,
|
||||
url
|
||||
)
|
||||
|
||||
|
@ -23,10 +23,10 @@ from unittest import mock
|
||||
|
||||
import fixtures
|
||||
from oslo_config import cfg
|
||||
from oslo_utils import versionutils
|
||||
import sqlalchemy
|
||||
from sqlalchemy.engine import base as base_engine
|
||||
from sqlalchemy import exc
|
||||
from sqlalchemy.pool import NullPool
|
||||
from sqlalchemy import sql
|
||||
from sqlalchemy import Column, MetaData, Table
|
||||
from sqlalchemy import Integer, String
|
||||
@ -34,6 +34,7 @@ from sqlalchemy.orm import declarative_base
|
||||
|
||||
from oslo_db import exception
|
||||
from oslo_db import options as db_options
|
||||
from oslo_db.sqlalchemy import compat
|
||||
from oslo_db.sqlalchemy import enginefacade
|
||||
from oslo_db.sqlalchemy import engines
|
||||
from oslo_db.sqlalchemy import models
|
||||
@ -388,9 +389,8 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
|
||||
self.assertIsNot(ses1, ses2)
|
||||
|
||||
def test_get_session_arguments_override_default_settings(self):
|
||||
ses = self.facade.get_session(autocommit=False, expire_on_commit=True)
|
||||
ses = self.facade.get_session(expire_on_commit=True)
|
||||
|
||||
self.assertFalse(ses.autocommit)
|
||||
self.assertTrue(ses.expire_on_commit)
|
||||
|
||||
@mock.patch('oslo_db.sqlalchemy.orm.get_maker')
|
||||
@ -410,7 +410,6 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
|
||||
conf.set_override(optname, optvalue, group='database')
|
||||
|
||||
session.EngineFacade.from_config(conf,
|
||||
autocommit=False,
|
||||
expire_on_commit=True)
|
||||
|
||||
create_engine.assert_called_once_with(
|
||||
@ -435,7 +434,6 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
|
||||
logging_name=mock.ANY,
|
||||
)
|
||||
get_maker.assert_called_once_with(engine=create_engine(),
|
||||
autocommit=False,
|
||||
expire_on_commit=True)
|
||||
|
||||
def test_slave_connection(self):
|
||||
@ -696,22 +694,24 @@ class CreateEngineTest(test_base.BaseTestCase):
|
||||
|
||||
def test_queuepool_args(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("mysql+pymysql://u:p@host/test"), self.args,
|
||||
max_pool_size=10, max_overflow=10)
|
||||
utils.make_url("mysql+pymysql://u:p@host/test"),
|
||||
self.args,
|
||||
{'max_pool_size': 10, 'max_overflow': 10},
|
||||
)
|
||||
self.assertEqual(10, self.args['pool_size'])
|
||||
self.assertEqual(10, self.args['max_overflow'])
|
||||
|
||||
def test_sqlite_memory_pool_args(self):
|
||||
for _url in ("sqlite://", "sqlite:///:memory:"):
|
||||
engines._init_connection_args(
|
||||
utils.make_url(_url), self.args,
|
||||
max_pool_size=10, max_overflow=10)
|
||||
utils.make_url(_url),
|
||||
self.args,
|
||||
{'max_pool_size': 10, 'max_overflow': 10},
|
||||
)
|
||||
|
||||
# queuepool arguments are not peresnet
|
||||
self.assertNotIn(
|
||||
'pool_size', self.args)
|
||||
self.assertNotIn(
|
||||
'max_overflow', self.args)
|
||||
# queuepool arguments are not present
|
||||
self.assertNotIn('pool_size', self.args)
|
||||
self.assertNotIn('max_overflow', self.args)
|
||||
|
||||
self.assertEqual(False,
|
||||
self.args['connect_args']['check_same_thread'])
|
||||
@ -721,8 +721,10 @@ class CreateEngineTest(test_base.BaseTestCase):
|
||||
|
||||
def test_sqlite_file_pool_args(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("sqlite:///somefile.db"), self.args,
|
||||
max_pool_size=10, max_overflow=10)
|
||||
utils.make_url("sqlite:///somefile.db"),
|
||||
self.args,
|
||||
{'max_pool_size': 10, 'max_overflow': 10},
|
||||
)
|
||||
|
||||
# queuepool arguments are not peresnet
|
||||
self.assertNotIn('pool_size', self.args)
|
||||
@ -731,9 +733,12 @@ class CreateEngineTest(test_base.BaseTestCase):
|
||||
|
||||
self.assertFalse(self.args['connect_args'])
|
||||
|
||||
# NullPool is the default for file based connections,
|
||||
# no need to specify this
|
||||
self.assertNotIn('poolclass', self.args)
|
||||
if not compat.sqla_2:
|
||||
# NullPool is the default for file based connections,
|
||||
# no need to specify this
|
||||
self.assertNotIn('poolclass', self.args)
|
||||
else:
|
||||
self.assertIs(self.args["poolclass"], NullPool)
|
||||
|
||||
def _test_mysql_connect_args_default(self, connect_args):
|
||||
self.assertEqual({'charset': 'utf8', 'use_unicode': 1},
|
||||
@ -741,34 +746,29 @@ class CreateEngineTest(test_base.BaseTestCase):
|
||||
|
||||
def test_mysql_connect_args_default(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("mysql://u:p@host/test"), self.args)
|
||||
self._test_mysql_connect_args_default(self.args['connect_args'])
|
||||
|
||||
def test_mysql_oursql_connect_args_default(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("mysql+oursql://u:p@host/test"), self.args)
|
||||
utils.make_url("mysql://u:p@host/test"), self.args, {})
|
||||
self._test_mysql_connect_args_default(self.args['connect_args'])
|
||||
|
||||
def test_mysql_pymysql_connect_args_default(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("mysql+pymysql://u:p@host/test"), self.args)
|
||||
utils.make_url("mysql+pymysql://u:p@host/test"), self.args, {})
|
||||
self.assertEqual({'charset': 'utf8'}, self.args['connect_args'])
|
||||
|
||||
def test_mysql_mysqldb_connect_args_default(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("mysql+mysqldb://u:p@host/test"), self.args)
|
||||
utils.make_url("mysql+mysqldb://u:p@host/test"), self.args, {})
|
||||
self._test_mysql_connect_args_default(self.args['connect_args'])
|
||||
|
||||
def test_postgresql_connect_args_default(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("postgresql://u:p@host/test"), self.args)
|
||||
utils.make_url("postgresql://u:p@host/test"), self.args, {})
|
||||
self.assertEqual('utf8', self.args['client_encoding'])
|
||||
self.assertFalse(self.args['connect_args'])
|
||||
|
||||
def test_mysqlconnector_raise_on_warnings_default(self):
|
||||
engines._init_connection_args(
|
||||
utils.make_url("mysql+mysqlconnector://u:p@host/test"),
|
||||
self.args)
|
||||
self.args, {})
|
||||
self.assertEqual(False, self.args['connect_args']['raise_on_warnings'])
|
||||
|
||||
def test_mysqlconnector_raise_on_warnings_override(self):
|
||||
@ -776,7 +776,7 @@ class CreateEngineTest(test_base.BaseTestCase):
|
||||
utils.make_url(
|
||||
"mysql+mysqlconnector://u:p@host/test"
|
||||
"?raise_on_warnings=true"),
|
||||
self.args
|
||||
self.args, {}
|
||||
)
|
||||
|
||||
self.assertNotIn('raise_on_warnings', self.args['connect_args'])
|
||||
@ -851,18 +851,18 @@ class ProcessGuardTest(db_test_base._DbTestCase):
|
||||
|
||||
with mock.patch("os.getpid", get_parent_pid):
|
||||
with self.engine.connect() as conn:
|
||||
dbapi_id = id(conn.connection.connection)
|
||||
dbapi_id = id(compat.driver_connection(conn))
|
||||
|
||||
with mock.patch("os.getpid", get_child_pid):
|
||||
with self.engine.connect() as conn:
|
||||
new_dbapi_id = id(conn.connection.connection)
|
||||
new_dbapi_id = id(compat.driver_connection(conn))
|
||||
|
||||
self.assertNotEqual(dbapi_id, new_dbapi_id)
|
||||
|
||||
# ensure it doesn't trip again
|
||||
with mock.patch("os.getpid", get_child_pid):
|
||||
with self.engine.connect() as conn:
|
||||
newer_dbapi_id = id(conn.connection.connection)
|
||||
newer_dbapi_id = id(compat.driver_connection(conn))
|
||||
|
||||
self.assertEqual(new_dbapi_id, newer_dbapi_id)
|
||||
|
||||
@ -906,13 +906,12 @@ class MySQLConnectPingListenerTest(db_test_base._MySQLOpportunisticTestCase):
|
||||
with self.engine.begin() as conn:
|
||||
self.assertTrue(isinstance(conn._transaction,
|
||||
base_engine.RootTransaction))
|
||||
engines._connect_ping_listener(conn, False)
|
||||
# TODO(ralonsoh): drop this check once SQLAlchemy minimum
|
||||
# version is 2.0.
|
||||
sqla_version = versionutils.convert_version_to_tuple(
|
||||
sqlalchemy.__version__)
|
||||
if sqla_version[0] >= 2:
|
||||
if compat.sqla_2:
|
||||
engines._connect_ping_listener(conn)
|
||||
self.assertIsNone(conn._transaction)
|
||||
else:
|
||||
engines._connect_ping_listener(conn, False)
|
||||
self.assertTrue(isinstance(conn._transaction,
|
||||
base_engine.RootTransaction))
|
||||
|
@ -26,7 +26,7 @@ from sqlalchemy.dialects.postgresql import psycopg2
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.ext.hybrid import hybrid_property
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.orm import mapper
|
||||
from sqlalchemy.orm import registry
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import PrimaryKeyConstraint
|
||||
from sqlalchemy import sql
|
||||
@ -131,7 +131,8 @@ fake_table = Table(
|
||||
Column('key', String(50))
|
||||
)
|
||||
|
||||
mapper(FakeTableClassicalyMapped, fake_table)
|
||||
reg = registry()
|
||||
reg.map_imperatively(FakeTableClassicalyMapped, fake_table)
|
||||
|
||||
|
||||
class FakeModel(object):
|
||||
@ -311,10 +312,16 @@ class TestPaginateQuery(test_base.BaseTestCase):
|
||||
'another_crit',
|
||||
]
|
||||
|
||||
with mock.patch.object(self.model.user_id, 'isnot') as mock_isnot, \
|
||||
mock.patch.object(self.model.user_id, 'is_') as mock_is_a, \
|
||||
mock.patch.object(self.model.project_id, 'is_') as mock_is_b:
|
||||
mock_isnot.return_value = 'asc_null_1'
|
||||
with mock.patch.object(
|
||||
self.model.user_id.comparator.expression, 'is_not'
|
||||
) as mock_is_not, \
|
||||
mock.patch.object(
|
||||
self.model.user_id.comparator.expression, 'is_'
|
||||
) as mock_is_a, \
|
||||
mock.patch.object(
|
||||
self.model.project_id.comparator.expression, 'is_'
|
||||
) as mock_is_b:
|
||||
mock_is_not.return_value = 'asc_null_1'
|
||||
mock_is_a.side_effect = [
|
||||
'desc_null_filter_1',
|
||||
'desc_null_filter_2',
|
||||
@ -330,7 +337,7 @@ class TestPaginateQuery(test_base.BaseTestCase):
|
||||
sort_dirs=[
|
||||
'asc-nullslast', 'desc-nullsfirst'])
|
||||
|
||||
mock_isnot.assert_called_once_with(None)
|
||||
mock_is_not.assert_called_once_with(None)
|
||||
mock_is_a.assert_has_calls([
|
||||
mock.call(None),
|
||||
mock.call(None),
|
||||
@ -384,11 +391,17 @@ class TestPaginateQuery(test_base.BaseTestCase):
|
||||
mock_and.return_value = 'some_crit'
|
||||
mock_or.side_effect = ['or_1', 'some_f']
|
||||
|
||||
with mock.patch.object(self.model.user_id, 'isnot') as mock_isnot, \
|
||||
mock.patch.object(self.model.updated_at, 'is_') as mock_is_a, \
|
||||
mock.patch.object(self.model.user_id, 'is_') as mock_is_b:
|
||||
with mock.patch.object(
|
||||
self.model.user_id.comparator.expression, 'is_not'
|
||||
) as mock_is_not, \
|
||||
mock.patch.object(
|
||||
self.model.updated_at.comparator.expression, 'is_'
|
||||
) as mock_is_a, \
|
||||
mock.patch.object(
|
||||
self.model.user_id.comparator.expression, 'is_'
|
||||
) as mock_is_b:
|
||||
|
||||
mock_isnot.return_value = 'asc_null_1'
|
||||
mock_is_not.return_value = 'asc_null_1'
|
||||
mock_is_a.return_value = 'desc_null_1'
|
||||
mock_is_b.side_effect = ['asc_null_filter_1', 'asc_null_filter_2']
|
||||
|
||||
@ -397,7 +410,7 @@ class TestPaginateQuery(test_base.BaseTestCase):
|
||||
marker=self.marker,
|
||||
sort_dirs=[
|
||||
'asc-nullslast', 'desc-nullsfirst'])
|
||||
mock_isnot.assert_called_once_with(None)
|
||||
mock_is_not.assert_called_once_with(None)
|
||||
mock_is_a.assert_called_once_with(None)
|
||||
mock_is_b.assert_has_calls([mock.call(None), mock.call(None)])
|
||||
|
||||
@ -445,12 +458,20 @@ class TestPaginateQuery(test_base.BaseTestCase):
|
||||
]
|
||||
self.query.filter.return_value = self.query
|
||||
|
||||
with mock.patch.object(self.model.user_id, 'isnot') as mock_isnot, \
|
||||
mock.patch.object(self.model.updated_at, 'is_') as mock_is_a, \
|
||||
mock.patch.object(self.model.user_id, 'is_') as mock_is_b, \
|
||||
mock.patch.object(self.model.project_id, 'is_') as mock_is_c:
|
||||
with mock.patch.object(
|
||||
self.model.user_id.comparator.expression, 'is_not'
|
||||
) as mock_is_not, \
|
||||
mock.patch.object(
|
||||
self.model.updated_at.comparator.expression, 'is_'
|
||||
) as mock_is_a, \
|
||||
mock.patch.object(
|
||||
self.model.user_id.comparator.expression, 'is_'
|
||||
) as mock_is_b, \
|
||||
mock.patch.object(
|
||||
self.model.project_id.comparator.expression, 'is_'
|
||||
) as mock_is_c:
|
||||
|
||||
mock_isnot.return_value = 'asc_null_1'
|
||||
mock_is_not.return_value = 'asc_null_1'
|
||||
mock_is_a.return_value = 'desc_null_1'
|
||||
mock_is_b.side_effect = ['asc_null_filter_1', 'asc_null_filter_2']
|
||||
mock_is_c.side_effect = ['desc_null_3', 'desc_null_filter_3']
|
||||
@ -461,7 +482,7 @@ class TestPaginateQuery(test_base.BaseTestCase):
|
||||
sort_dirs=['asc-nullslast', 'desc-nullsfirst',
|
||||
'desc-nullsfirst'])
|
||||
|
||||
mock_isnot.assert_called_once_with(None)
|
||||
mock_is_not.assert_called_once_with(None)
|
||||
mock_is_a.assert_called_once_with(None)
|
||||
mock_is_b.assert_has_calls([mock.call(None), mock.call(None)])
|
||||
mock_is_c.assert_has_calls([mock.call(None), mock.call(None)])
|
||||
@ -932,12 +953,12 @@ class TestConnectionUtils(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(TestConnectionUtils, self).setUp()
|
||||
|
||||
self.full_credentials = {'backend': 'postgresql',
|
||||
self.full_credentials = {'backend': 'postgresql+psycopg2',
|
||||
'database': 'test',
|
||||
'user': 'dude',
|
||||
'passwd': 'pass'}
|
||||
|
||||
self.connect_string = 'postgresql://dude:pass@localhost/test'
|
||||
self.connect_string = 'postgresql+psycopg2://dude:pass@localhost/test'
|
||||
|
||||
# NOTE(rpodolyaka): mock the dialect parts, so that we don't depend
|
||||
# on psycopg2 (or any other DBAPI implementation) in these tests
|
||||
@ -945,8 +966,20 @@ class TestConnectionUtils(test_base.BaseTestCase):
|
||||
@classmethod
|
||||
def fake_dbapi(cls):
|
||||
return mock.MagicMock()
|
||||
patch_dbapi = mock.patch.object(psycopg2.PGDialect_psycopg2, 'dbapi',
|
||||
new=fake_dbapi)
|
||||
|
||||
class OurDialect(psycopg2.PGDialect_psycopg2):
|
||||
def dbapi(self):
|
||||
return fake_dbapi
|
||||
|
||||
def import_dbapi(self):
|
||||
return fake_dbapi
|
||||
|
||||
patch_dbapi = mock.patch.object(
|
||||
psycopg2,
|
||||
"PGDialect_psycopg2",
|
||||
new=OurDialect,
|
||||
)
|
||||
|
||||
patch_dbapi.start()
|
||||
self.addCleanup(patch_dbapi.stop)
|
||||
|
||||
@ -965,7 +998,7 @@ class TestConnectionUtils(test_base.BaseTestCase):
|
||||
self.connect_string)
|
||||
|
||||
self.assertIsInstance(eng, sqlalchemy.engine.base.Engine)
|
||||
self.assertEqual(self.connect_string, str(eng.url))
|
||||
self.assertEqual(utils.make_url(self.connect_string), eng.url)
|
||||
|
||||
mock_connect.assert_called_once()
|
||||
fake_connection.close.assert_called_once()
|
||||
@ -982,10 +1015,10 @@ class TestConnectionUtils(test_base.BaseTestCase):
|
||||
provision.Backend._ensure_backend_available,
|
||||
self.connect_string)
|
||||
self.assertEqual(
|
||||
"Backend 'postgresql' is unavailable: "
|
||||
"Backend 'postgresql+psycopg2' is unavailable: "
|
||||
"Could not connect", str(exc))
|
||||
self.assertEqual(
|
||||
"The postgresql backend is unavailable: %s" % err,
|
||||
"The postgresql+psycopg2 backend is unavailable: %s" % err,
|
||||
log.output.strip())
|
||||
|
||||
def test_ensure_backend_available_no_dbapi_raises(self):
|
||||
@ -1003,10 +1036,10 @@ class TestConnectionUtils(test_base.BaseTestCase):
|
||||
utils.make_url(self.connect_string))
|
||||
|
||||
self.assertEqual(
|
||||
"Backend 'postgresql' is unavailable: "
|
||||
"Backend 'postgresql+psycopg2' is unavailable: "
|
||||
"No DBAPI installed", str(exc))
|
||||
self.assertEqual(
|
||||
"The postgresql backend is unavailable: Can't import "
|
||||
"The postgresql+psycopg2 backend is unavailable: Can't import "
|
||||
"DBAPI module foobar", log.output.strip())
|
||||
|
||||
def test_get_db_connection_info(self):
|
||||
|
11
releasenotes/notes/sqlalchemy-20-0a193a01c70f805a.yaml
Normal file
11
releasenotes/notes/sqlalchemy-20-0a193a01c70f805a.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
oslo.db now supports SQLAlchemy 2.0.
|
||||
- |
|
||||
A new ``oslo_db.compat`` module has been added. This provides a number of
|
||||
shims for handling differences between SQLAlchemy 1.x and 2.x.
|
||||
upgrade:
|
||||
- |
|
||||
The ability to create engine facades that used autocommit, first deprecated
|
||||
in 12.1.0, has now been removed. This is not supported in SQLAlchemy 2.x.
|
Loading…
Reference in New Issue
Block a user