Remove MySQL NDB Cluster Support
This has been deprecated for some time now. Change-Id: Ia8b4ed8cd755d283bb773e55293457190b34c482 Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
parent
c42eb7ec07
commit
5363ca11c9
@ -68,20 +68,6 @@ database_opts = [
|
||||
'configure any setting.'
|
||||
),
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
'mysql_enable_ndb',
|
||||
default=False,
|
||||
deprecated_for_removal=True,
|
||||
deprecated_since='12.1.0',
|
||||
deprecated_reason=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been '
|
||||
'deprecated and will be removed in a future release.'
|
||||
),
|
||||
help=(
|
||||
'If True, transparently enables support for handling '
|
||||
'MySQL Cluster (NDB).'
|
||||
),
|
||||
),
|
||||
cfg.IntOpt(
|
||||
'connection_recycle_time',
|
||||
default=3600,
|
||||
|
@ -159,7 +159,6 @@ class _TransactionFactory:
|
||||
'sqlite_fk': _Default(False),
|
||||
'mysql_sql_mode': _Default('TRADITIONAL'),
|
||||
'mysql_wsrep_sync_wait': _Default(),
|
||||
'mysql_enable_ndb': _Default(),
|
||||
'connection_recycle_time': _Default(3600),
|
||||
'connection_debug': _Default(0),
|
||||
'max_pool_size': _Default(),
|
||||
@ -231,7 +230,6 @@ class _TransactionFactory:
|
||||
:param mysql_sql_mode: MySQL SQL mode, defaults to TRADITIONAL
|
||||
:param mysql_wsrep_sync_wait: MySQL wsrep_sync_wait, defaults to None,
|
||||
which indicates no setting will be passed
|
||||
:param mysql_enable_ndb: enable MySQL Cluster (NDB) support
|
||||
:param connection_recycle_time: connection pool recycle time,
|
||||
defaults to 3600. Note the connection does not actually have to be
|
||||
"idle" to be recycled.
|
||||
@ -311,18 +309,11 @@ class _TransactionFactory:
|
||||
self._configure(False, kw)
|
||||
|
||||
def _configure(self, as_defaults, kw):
|
||||
if 'mysql_enable_ndb' in kw:
|
||||
debtcollector.deprecate(
|
||||
(
|
||||
'Support for the MySQL NDB Cluster storage engine has '
|
||||
'been deprecated and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
|
||||
if self._started:
|
||||
raise AlreadyStartedError(
|
||||
"this TransactionFactory is already started")
|
||||
"this TransactionFactory is already started"
|
||||
)
|
||||
|
||||
not_supported = []
|
||||
for k, v in kw.items():
|
||||
for dict_ in (
|
||||
@ -1233,10 +1224,6 @@ class LegacyEngineFacade(object):
|
||||
:keyword mysql_wsrep_sync_wait: value of wsrep_sync_wait for Galera
|
||||
(defaults to None, which indicates no setting
|
||||
will be passed)
|
||||
:keyword mysql_enable_ndb: If True, transparently enables support for
|
||||
handling MySQL Cluster (NDB).
|
||||
(defaults to False)
|
||||
**DEPRECATED**
|
||||
:keyword connection_recycle_time: Time period for connections to be
|
||||
recycled upon checkout (defaults to 3600)
|
||||
:keyword connection_debug: verbosity of SQL debugging information.
|
||||
|
@ -35,7 +35,6 @@ from oslo_db import exception
|
||||
|
||||
from oslo_db.sqlalchemy import compat
|
||||
from oslo_db.sqlalchemy import exc_filters
|
||||
from oslo_db.sqlalchemy import ndb
|
||||
from oslo_db.sqlalchemy import utils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -147,14 +146,6 @@ def _vet_url(url):
|
||||
)
|
||||
|
||||
|
||||
@debtcollector.removals.removed_kwarg(
|
||||
'mysql_enable_ndb',
|
||||
message=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been deprecated '
|
||||
'and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
@debtcollector.renames.renamed_kwarg(
|
||||
'idle_timeout',
|
||||
'connection_recycle_time',
|
||||
@ -162,7 +153,6 @@ def _vet_url(url):
|
||||
)
|
||||
def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
|
||||
mysql_wsrep_sync_wait=None,
|
||||
mysql_enable_ndb=False,
|
||||
connection_recycle_time=3600,
|
||||
connection_debug=0, max_pool_size=None, max_overflow=None,
|
||||
pool_timeout=None, sqlite_synchronous=True,
|
||||
@ -203,9 +193,6 @@ def create_engine(sql_connection, sqlite_fk=False, mysql_sql_mode=None,
|
||||
|
||||
engine = sqlalchemy.create_engine(url, **engine_args)
|
||||
|
||||
if mysql_enable_ndb:
|
||||
ndb.enable_ndb_support(engine)
|
||||
|
||||
_init_events(
|
||||
engine,
|
||||
mysql_sql_mode=mysql_sql_mode,
|
||||
@ -360,9 +347,6 @@ def _init_events(
|
||||
"consider enabling TRADITIONAL or STRICT_ALL_TABLES",
|
||||
realmode)
|
||||
|
||||
if ndb.ndb_status(engine):
|
||||
ndb.init_ndb_events(engine)
|
||||
|
||||
|
||||
@_init_events.dispatch_for("sqlite")
|
||||
def _init_events(engine, sqlite_synchronous=True, sqlite_fk=False, **kw):
|
||||
|
@ -1,150 +0,0 @@
|
||||
# Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Core functions for MySQL Cluster (NDB) Support."""
|
||||
|
||||
import re
|
||||
|
||||
import debtcollector.removals
|
||||
from sqlalchemy import event, schema
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.types import String as _String
|
||||
from sqlalchemy.types import to_instance
|
||||
|
||||
from oslo_db.sqlalchemy.types import String
|
||||
|
||||
|
||||
engine_regex = re.compile("engine=innodb", re.IGNORECASE)
|
||||
trans_regex = re.compile("savepoint|rollback|release savepoint", re.IGNORECASE)
|
||||
|
||||
|
||||
@debtcollector.removals.remove(
|
||||
message=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been deprecated '
|
||||
'and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
def enable_ndb_support(engine):
|
||||
"""Enable NDB Support.
|
||||
|
||||
Function to flag the MySQL engine dialect to support features specific
|
||||
to MySQL Cluster (NDB).
|
||||
"""
|
||||
engine.dialect._oslodb_enable_ndb_support = True
|
||||
|
||||
|
||||
def _ndb_status(engine_or_compiler):
|
||||
"""Test if NDB Support is enabled.
|
||||
|
||||
Function to test if NDB support is enabled or not.
|
||||
|
||||
.. note::
|
||||
|
||||
This is for internal use only while we deprecate and remove ndb
|
||||
support. **Do not use this outside of oslo.db!**
|
||||
"""
|
||||
return getattr(
|
||||
engine_or_compiler.dialect,
|
||||
'_oslodb_enable_ndb_support',
|
||||
False,
|
||||
)
|
||||
|
||||
|
||||
@debtcollector.removals.remove(
|
||||
message=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been deprecated '
|
||||
'and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
def ndb_status(engine_or_compiler):
|
||||
"""Test if NDB Support is enabled.
|
||||
|
||||
Function to test if NDB support is enabled or not.
|
||||
"""
|
||||
return _ndb_status(engine_or_compiler)
|
||||
|
||||
|
||||
@debtcollector.removals.remove(
|
||||
message=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been deprecated '
|
||||
'and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
def init_ndb_events(engine):
|
||||
"""Initialize NDB Events.
|
||||
|
||||
Function starts NDB specific events.
|
||||
"""
|
||||
@event.listens_for(engine, "before_cursor_execute", retval=True)
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context,
|
||||
executemany):
|
||||
"""Listen for specific SQL strings and replace automatically.
|
||||
|
||||
Function will intercept any raw execute calls and automatically
|
||||
convert InnoDB to NDBCLUSTER, drop SAVEPOINT requests, drop
|
||||
ROLLBACK requests, and drop RELEASE SAVEPOINT requests.
|
||||
"""
|
||||
if _ndb_status(engine):
|
||||
statement = engine_regex.sub("ENGINE=NDBCLUSTER", statement)
|
||||
if re.match(trans_regex, statement):
|
||||
statement = "SET @oslo_db_ndb_savepoint_rollback_disabled = 0;"
|
||||
|
||||
return statement, parameters
|
||||
|
||||
|
||||
# TODO(stephenfin): This is effectively deprecated and should be removed when
|
||||
# we remove the rest of this module since it'll be a no-op then.
|
||||
@compiles(schema.CreateTable, "mysql")
|
||||
def prefix_inserts(create_table, compiler, **kw):
|
||||
"""Replace InnoDB with NDBCLUSTER automatically.
|
||||
|
||||
Function will intercept CreateTable() calls and automatically
|
||||
convert InnoDB to NDBCLUSTER. Targets compiler events.
|
||||
"""
|
||||
existing = compiler.visit_create_table(create_table, **kw)
|
||||
if _ndb_status(compiler):
|
||||
existing = engine_regex.sub("ENGINE=NDBCLUSTER", existing)
|
||||
|
||||
return existing
|
||||
|
||||
|
||||
# TODO(stephenfin): This is effectively deprecated and should be removed when
|
||||
# we remove the rest of this module since it'll be a no-op then.
|
||||
@compiles(String, "mysql")
|
||||
def _compile_ndb_string(element, compiler, **kw):
|
||||
"""Process ndb specific overrides for String.
|
||||
|
||||
Function will intercept mysql_ndb_length and mysql_ndb_type
|
||||
arguments to adjust columns automatically.
|
||||
|
||||
mysql_ndb_length argument will adjust the String length
|
||||
to the requested value.
|
||||
|
||||
mysql_ndb_type will change the column type to the requested
|
||||
data type.
|
||||
"""
|
||||
if not _ndb_status(compiler):
|
||||
return compiler.visit_string(element, **kw)
|
||||
|
||||
if element.mysql_ndb_length:
|
||||
effective_type = element.adapt(
|
||||
_String, length=element.mysql_ndb_length)
|
||||
return compiler.visit_string(effective_type, **kw)
|
||||
elif element.mysql_ndb_type:
|
||||
effective_type = to_instance(element.mysql_ndb_type)
|
||||
return compiler.process(effective_type, **kw)
|
||||
else:
|
||||
return compiler.visit_string(element, **kw)
|
@ -12,9 +12,8 @@
|
||||
|
||||
import json
|
||||
|
||||
import debtcollector.removals
|
||||
from sqlalchemy.dialects import mysql
|
||||
from sqlalchemy.types import Integer, Text, TypeDecorator, String as _String
|
||||
from sqlalchemy.types import Integer, Text, TypeDecorator
|
||||
|
||||
|
||||
class JsonEncodedType(TypeDecorator):
|
||||
@ -116,40 +115,3 @@ class SoftDeleteInteger(TypeDecorator):
|
||||
if value is None:
|
||||
return None
|
||||
return int(value)
|
||||
|
||||
|
||||
# NOTE(stephenfin): We deprecate the whole class rather than just the
|
||||
# ndb-related arguments, since without these arguments this is identical to the
|
||||
# upstream SQLAlchemy type
|
||||
@debtcollector.removals.removed_class(
|
||||
'String',
|
||||
message=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been '
|
||||
'deprecated and will be removed in a future release. Use the '
|
||||
'standard String type from sqlalchemy.types'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
class String(_String):
|
||||
"""String subclass that implements oslo_db specific options.
|
||||
|
||||
Initial goal is to support ndb-specific flags.
|
||||
|
||||
mysql_ndb_type is used to override the String with another data type.
|
||||
mysql_ndb_size is used to adjust the length of the String.
|
||||
"""
|
||||
|
||||
cache_ok = True
|
||||
"""This type is safe to cache."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
length,
|
||||
mysql_ndb_length=None,
|
||||
mysql_ndb_type=None,
|
||||
**kw,
|
||||
):
|
||||
"""Initialize options."""
|
||||
super(String, self).__init__(length, **kw)
|
||||
self.mysql_ndb_type = mysql_ndb_type
|
||||
self.mysql_ndb_length = mysql_ndb_length
|
||||
|
@ -18,14 +18,10 @@
|
||||
|
||||
import collections
|
||||
from collections import abc
|
||||
import contextlib
|
||||
import itertools
|
||||
import logging
|
||||
import re
|
||||
|
||||
from alembic.migration import MigrationContext
|
||||
from alembic.operations import Operations
|
||||
import debtcollector.removals
|
||||
from oslo_utils import timeutils
|
||||
import sqlalchemy
|
||||
from sqlalchemy import Boolean
|
||||
@ -45,7 +41,6 @@ from sqlalchemy import Table
|
||||
from oslo_db._i18n import _
|
||||
from oslo_db import exception
|
||||
from oslo_db.sqlalchemy import models
|
||||
from oslo_db.sqlalchemy import ndb
|
||||
|
||||
# NOTE(ochuprykov): Add references for backwards compatibility
|
||||
InvalidSortKey = exception.InvalidSortKey
|
||||
@ -848,45 +843,6 @@ def get_non_innodb_tables(connectable, skip_tables=('migrate_version',
|
||||
return [i[0] for i in noninnodb]
|
||||
|
||||
|
||||
@debtcollector.removals.remove(
|
||||
message=(
|
||||
'Support for the MySQL NDB Cluster storage engine has been deprecated '
|
||||
'and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
def get_non_ndbcluster_tables(connectable, skip_tables=None):
|
||||
"""Get a list of tables which don't use MySQL Cluster (NDB) storage engine.
|
||||
|
||||
:param connectable: a SQLAlchemy Engine or Connection instance
|
||||
:param skip_tables: a list of tables which might have a different
|
||||
storage engine
|
||||
"""
|
||||
query_str = """
|
||||
SELECT table_name
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = :database AND
|
||||
engine != 'ndbcluster'
|
||||
"""
|
||||
|
||||
params = {}
|
||||
if skip_tables:
|
||||
params = dict(
|
||||
('skip_%s' % i, table_name)
|
||||
for i, table_name in enumerate(skip_tables)
|
||||
)
|
||||
|
||||
placeholders = ', '.join(':' + p for p in params)
|
||||
query_str += ' AND table_name NOT IN (%s)' % placeholders
|
||||
|
||||
params['database'] = connectable.engine.url.database
|
||||
query = text(query_str)
|
||||
# TODO(stephenfin): What about if this is already a Connection?
|
||||
with connectable.connect() as conn, conn.begin():
|
||||
nonndbcluster = connectable.execute(query, **params)
|
||||
return [i[0] for i in nonndbcluster]
|
||||
|
||||
|
||||
def get_foreign_key_constraint_name(engine, table_name, column_name):
|
||||
"""Find the name of foreign key in a table, given constrained column name.
|
||||
|
||||
@ -906,93 +862,6 @@ def get_foreign_key_constraint_name(engine, table_name, column_name):
|
||||
return fk['name']
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def suspend_fk_constraints_for_col_alter(
|
||||
engine, table_name, column_name, referents=[],
|
||||
):
|
||||
"""Detect foreign key constraints, drop, and recreate.
|
||||
|
||||
This is used to guard against a column ALTER that on some backends
|
||||
cannot proceed unless foreign key constraints are not present.
|
||||
|
||||
e.g.::
|
||||
|
||||
from oslo_db.sqlalchemy.util import (
|
||||
suspend_fk_constraints_for_col_alter
|
||||
)
|
||||
|
||||
with suspend_fk_constraints_for_col_alter(
|
||||
migrate_engine, "user_table",
|
||||
referents=[
|
||||
"local_user", "nonlocal_user", "project"
|
||||
]):
|
||||
user_table.c.domain_id.alter(nullable=False)
|
||||
|
||||
:param engine: a SQLAlchemy engine (or connection)
|
||||
|
||||
:param table_name: target table name. All foreign key constraints
|
||||
that refer to the table_name / column_name will be dropped and recreated.
|
||||
|
||||
:param column_name: target column name. all foreign key constraints
|
||||
which refer to this column, either partially or fully, will be dropped
|
||||
and recreated.
|
||||
|
||||
:param referents: sequence of string table names to search for foreign
|
||||
key constraints. A future version of this function may no longer
|
||||
require this argument, however for the moment it is required.
|
||||
"""
|
||||
debtcollector.deprecate(
|
||||
(
|
||||
'Support for the MySQL NDB Cluster storage engine has been '
|
||||
'deprecated and will be removed in a future release.'
|
||||
),
|
||||
version='12.1.0',
|
||||
)
|
||||
|
||||
if not ndb._ndb_status(engine):
|
||||
yield
|
||||
else:
|
||||
with engine.connect() as conn:
|
||||
with conn.begin():
|
||||
insp = inspect(conn)
|
||||
fks = []
|
||||
for ref_table_name in referents:
|
||||
for fk in insp.get_foreign_keys(ref_table_name):
|
||||
if not fk.get('name'):
|
||||
raise AssertionError("foreign key hasn't a name.")
|
||||
if fk['referred_table'] == table_name and \
|
||||
column_name in fk['referred_columns']:
|
||||
fk['source_table'] = ref_table_name
|
||||
if 'options' not in fk:
|
||||
fk['options'] = {}
|
||||
fks.append(fk)
|
||||
|
||||
ctx = MigrationContext.configure(conn)
|
||||
op = Operations(ctx)
|
||||
|
||||
for fk in fks:
|
||||
op.drop_constraint(
|
||||
fk['name'],
|
||||
fk['source_table'],
|
||||
type_="foreignkey",
|
||||
)
|
||||
|
||||
yield
|
||||
|
||||
with conn.begin():
|
||||
for fk in fks:
|
||||
op.create_foreign_key(
|
||||
fk['name'], fk['source_table'],
|
||||
fk['referred_table'],
|
||||
fk['constrained_columns'],
|
||||
fk['referred_columns'],
|
||||
onupdate=fk['options'].get('onupdate'),
|
||||
ondelete=fk['options'].get('ondelete'),
|
||||
deferrable=fk['options'].get('deferrable'),
|
||||
initially=fk['options'].get('initially'),
|
||||
)
|
||||
|
||||
|
||||
def make_url(target):
|
||||
"""Return a ``url.URL`` object"""
|
||||
if isinstance(target, (str, sa_url.URL)):
|
||||
|
@ -257,7 +257,6 @@ class MockFacadeTest(test_base.BaseTestCase):
|
||||
sql_connection=self.engine_uri,
|
||||
**{
|
||||
k: mock.ANY for k in self.factory._engine_cfg.keys()
|
||||
if k not in ('mysql_enable_ndb',)
|
||||
},
|
||||
)
|
||||
if self.slave_uri:
|
||||
@ -265,7 +264,6 @@ class MockFacadeTest(test_base.BaseTestCase):
|
||||
sql_connection=self.slave_uri,
|
||||
**{
|
||||
k: mock.ANY for k in self.factory._engine_cfg.keys()
|
||||
if k not in ('mysql_enable_ndb',)
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -1,205 +0,0 @@
|
||||
# Copyright (c) 2017, Oracle and/or its affiliates. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Tests for MySQL Cluster (NDB) Support."""
|
||||
|
||||
import logging
|
||||
from unittest import mock
|
||||
import warnings
|
||||
|
||||
from oslo_db import exception
|
||||
from oslo_db.sqlalchemy import enginefacade
|
||||
from oslo_db.sqlalchemy import engines
|
||||
from oslo_db.sqlalchemy import ndb
|
||||
from oslo_db.sqlalchemy import test_fixtures
|
||||
from oslo_db.sqlalchemy.types import String
|
||||
from oslo_db.sqlalchemy import utils
|
||||
from oslo_db.tests import base as test_base
|
||||
|
||||
from sqlalchemy import Column
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.dialects.mysql import TEXT
|
||||
from sqlalchemy.dialects.mysql import TINYTEXT
|
||||
from sqlalchemy import Integer
|
||||
from sqlalchemy import MetaData
|
||||
from sqlalchemy import schema
|
||||
from sqlalchemy import Table
|
||||
from sqlalchemy import Text
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
_MOCK_CONNECTION = 'mysql+pymysql://'
|
||||
with warnings.catch_warnings(): # hide deprecation warning
|
||||
_TEST_TABLE = Table(
|
||||
"test_ndb",
|
||||
MetaData(),
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('test1', String(255, mysql_ndb_type=TEXT)),
|
||||
Column('test2', String(4096, mysql_ndb_type=TEXT)),
|
||||
Column('test3', String(255, mysql_ndb_length=64)),
|
||||
mysql_engine='InnoDB',
|
||||
)
|
||||
|
||||
|
||||
class NDBMockTestBase(test_base.BaseTestCase):
|
||||
def setUp(self):
|
||||
super(NDBMockTestBase, self).setUp()
|
||||
mock_dbapi = mock.Mock()
|
||||
self.test_engine = test_engine = create_engine(
|
||||
_MOCK_CONNECTION, module=mock_dbapi)
|
||||
test_engine.dialect._oslodb_enable_ndb_support = True
|
||||
|
||||
self.addCleanup(
|
||||
setattr, test_engine.dialect, "_oslodb_enable_ndb_support", False
|
||||
)
|
||||
# hide deprecation warnings
|
||||
with warnings.catch_warnings():
|
||||
ndb.init_ndb_events(test_engine)
|
||||
|
||||
|
||||
class NDBEventTestCase(NDBMockTestBase):
|
||||
|
||||
def test_ndb_createtable_override(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertRegex(
|
||||
str(schema.CreateTable(_TEST_TABLE).compile(
|
||||
dialect=test_engine.dialect)),
|
||||
"ENGINE=NDBCLUSTER")
|
||||
|
||||
def test_ndb_engine_override(self):
|
||||
test_engine = self.test_engine
|
||||
statement = "ENGINE=InnoDB"
|
||||
for fn in test_engine.dispatch.before_cursor_execute:
|
||||
statement, dialect = fn(
|
||||
mock.Mock(), mock.Mock(), statement, {}, mock.Mock(), False)
|
||||
self.assertEqual(statement, "ENGINE=NDBCLUSTER")
|
||||
|
||||
def test_ndb_savepoint_override(self):
|
||||
test_engine = self.test_engine
|
||||
statement = "SAVEPOINT xyx"
|
||||
for fn in test_engine.dispatch.before_cursor_execute:
|
||||
statement, dialect = fn(
|
||||
mock.Mock(), mock.Mock(), statement, {}, mock.Mock(), False)
|
||||
self.assertEqual(statement,
|
||||
"SET @oslo_db_ndb_savepoint_rollback_disabled = 0;")
|
||||
|
||||
def test_ndb_rollback_override(self):
|
||||
test_engine = self.test_engine
|
||||
statement = "ROLLBACK TO SAVEPOINT xyz"
|
||||
for fn in test_engine.dispatch.before_cursor_execute:
|
||||
statement, dialect = fn(
|
||||
mock.Mock(), mock.Mock(), statement, {}, mock.Mock(), False)
|
||||
self.assertEqual(statement,
|
||||
"SET @oslo_db_ndb_savepoint_rollback_disabled = 0;")
|
||||
|
||||
def test_ndb_rollback_release_override(self):
|
||||
test_engine = self.test_engine
|
||||
statement = "RELEASE SAVEPOINT xyz"
|
||||
for fn in test_engine.dispatch.before_cursor_execute:
|
||||
statement, dialect = fn(
|
||||
mock.Mock(), mock.Mock(), statement, {}, mock.Mock(), False)
|
||||
self.assertEqual(statement,
|
||||
"SET @oslo_db_ndb_savepoint_rollback_disabled = 0;")
|
||||
|
||||
|
||||
class NDBDatatypesTestCase(NDBMockTestBase):
|
||||
def test_ndb_string_to_tinytext(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertEqual("TINYTEXT",
|
||||
str(String(255, mysql_ndb_type=TINYTEXT).compile(
|
||||
dialect=test_engine.dialect)))
|
||||
|
||||
def test_ndb_string_to_text(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertEqual("TEXT",
|
||||
str(String(4096, mysql_ndb_type=TEXT).compile(
|
||||
dialect=test_engine.dialect)))
|
||||
|
||||
def test_ndb_string_length(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertEqual('VARCHAR(64)',
|
||||
str(String(255, mysql_ndb_length=64).compile(
|
||||
dialect=test_engine.dialect)))
|
||||
|
||||
|
||||
class NDBDatatypesDefaultTestCase(NDBMockTestBase):
|
||||
def setUp(self):
|
||||
super(NDBMockTestBase, self).setUp()
|
||||
mock_dbapi = mock.Mock()
|
||||
self.test_engine = create_engine(_MOCK_CONNECTION, module=mock_dbapi)
|
||||
|
||||
def test_non_ndb_string_to_text(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertEqual("VARCHAR(255)",
|
||||
str(String(255, mysql_ndb_type=TINYTEXT).compile(
|
||||
dialect=test_engine.dialect)))
|
||||
|
||||
def test_non_ndb_autostringtext(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertEqual("VARCHAR(4096)",
|
||||
str(String(4096, mysql_ndb_type=TEXT).compile(
|
||||
dialect=test_engine.dialect)))
|
||||
|
||||
def test_non_ndb_autostringsize(self):
|
||||
test_engine = self.test_engine
|
||||
self.assertEqual('VARCHAR(255)',
|
||||
str(String(255, mysql_ndb_length=64).compile(
|
||||
dialect=test_engine.dialect)))
|
||||
|
||||
|
||||
class NDBOpportunisticTestCase(
|
||||
test_fixtures.OpportunisticDBTestMixin, test_base.BaseTestCase,
|
||||
):
|
||||
|
||||
FIXTURE = test_fixtures.MySQLOpportunisticFixture
|
||||
|
||||
def init_db(self, use_ndb):
|
||||
# get the MySQL engine created by the opportunistic
|
||||
# provisioning system
|
||||
self.engine = enginefacade.writer.get_engine()
|
||||
if use_ndb:
|
||||
# if we want NDB, make a new local engine that uses the
|
||||
# URL / database / schema etc. of the provisioned engine,
|
||||
# since NDB-ness is a per-table thing
|
||||
with warnings.catch_warnings(): # hide deprecation warnings
|
||||
self.engine = engines.create_engine(
|
||||
self.engine.url, mysql_enable_ndb=True
|
||||
)
|
||||
self.addCleanup(self.engine.dispose)
|
||||
self.test_table = _TEST_TABLE
|
||||
try:
|
||||
self.test_table.create(self.engine)
|
||||
except exception.DBNotSupportedError:
|
||||
self.skipTest("MySQL NDB Cluster not available")
|
||||
|
||||
def test_ndb_enabled(self):
|
||||
self.init_db(True)
|
||||
with warnings.catch_warnings(): # hide deprecation warnings
|
||||
self.assertTrue(ndb.ndb_status(self.engine))
|
||||
self.assertIsInstance(self.test_table.c.test1.type, TINYTEXT)
|
||||
self.assertIsInstance(self.test_table.c.test2.type, Text)
|
||||
self.assertIsInstance(self.test_table.c.test3.type, String)
|
||||
self.assertEqual(64, self.test_table.c.test3.type.length)
|
||||
self.assertEqual([], utils.get_non_ndbcluster_tables(self.engine))
|
||||
|
||||
def test_ndb_disabled(self):
|
||||
self.init_db(False)
|
||||
with warnings.catch_warnings(): # hide deprecation warnings
|
||||
self.assertFalse(ndb.ndb_status(self.engine))
|
||||
self.assertIsInstance(self.test_table.c.test1.type, String)
|
||||
self.assertEqual(255, self.test_table.c.test1.type.length)
|
||||
self.assertIsInstance(self.test_table.c.test2.type, String)
|
||||
self.assertEqual(4096, self.test_table.c.test2.type.length)
|
||||
self.assertIsInstance(self.test_table.c.test3.type, String)
|
||||
self.assertEqual(255, self.test_table.c.test3.type.length)
|
||||
self.assertEqual([], utils.get_non_innodb_tables(self.engine))
|
@ -417,57 +417,6 @@ class EngineFacadeTestCase(test_base.BaseTestCase):
|
||||
max_pool_size=10,
|
||||
mysql_sql_mode='TRADITIONAL',
|
||||
mysql_wsrep_sync_wait=None,
|
||||
# NOTE: mysql_enable_ndb should *not* be passed through since it's
|
||||
# deprecated and not set in the configopts
|
||||
# mysql_enable_ndb=False,
|
||||
sqlite_fk=False,
|
||||
connection_recycle_time=mock.ANY,
|
||||
retry_interval=mock.ANY,
|
||||
max_retries=mock.ANY,
|
||||
max_overflow=mock.ANY,
|
||||
connection_trace=mock.ANY,
|
||||
sqlite_synchronous=mock.ANY,
|
||||
pool_timeout=mock.ANY,
|
||||
thread_checkin=mock.ANY,
|
||||
json_serializer=None,
|
||||
json_deserializer=None,
|
||||
connection_parameters='',
|
||||
logging_name=mock.ANY,
|
||||
)
|
||||
get_maker.assert_called_once_with(
|
||||
engine=create_engine(), expire_on_commit=True,
|
||||
)
|
||||
|
||||
@mock.patch('oslo_db.sqlalchemy.orm.get_maker')
|
||||
@mock.patch('oslo_db.sqlalchemy.engines.create_engine')
|
||||
def test_creation_from_config_with_deprecated_opts(
|
||||
self, create_engine, get_maker,
|
||||
):
|
||||
conf = cfg.ConfigOpts()
|
||||
conf.register_opts(db_options.database_opts, group='database')
|
||||
|
||||
overrides = {
|
||||
'connection': 'sqlite:///:memory:',
|
||||
'slave_connection': None,
|
||||
'connection_debug': 100,
|
||||
'max_pool_size': 10,
|
||||
'mysql_sql_mode': 'TRADITIONAL',
|
||||
'mysql_enable_ndb': True,
|
||||
}
|
||||
for optname, optvalue in overrides.items():
|
||||
conf.set_override(optname, optvalue, group='database')
|
||||
|
||||
session.EngineFacade.from_config(conf, expire_on_commit=True)
|
||||
|
||||
create_engine.assert_called_once_with(
|
||||
sql_connection='sqlite:///:memory:',
|
||||
connection_debug=100,
|
||||
max_pool_size=10,
|
||||
mysql_sql_mode='TRADITIONAL',
|
||||
mysql_wsrep_sync_wait=None,
|
||||
# NOTE: mysql_enable_ndb *should* be passed through since it's set
|
||||
# in the configopts
|
||||
mysql_enable_ndb=True,
|
||||
sqlite_fk=False,
|
||||
connection_recycle_time=mock.ANY,
|
||||
retry_interval=mock.ANY,
|
||||
|
@ -28,7 +28,6 @@ from sqlalchemy.orm import column_property
|
||||
from sqlalchemy.orm import declarative_base
|
||||
from sqlalchemy.orm import registry
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import PrimaryKeyConstraint
|
||||
from sqlalchemy import sql
|
||||
from sqlalchemy.sql.expression import cast
|
||||
from sqlalchemy.sql import select
|
||||
@ -819,103 +818,6 @@ class TestMigrationUtils(db_test_base._DbTestCase):
|
||||
'foreign_id')
|
||||
self.assertEqual(fkc, 'table_name_2_fk1')
|
||||
|
||||
@db_test_base.backend_specific('mysql', 'postgresql')
|
||||
def test_suspend_fk_constraints_for_col_alter(self):
|
||||
|
||||
a = Table(
|
||||
'a', self.meta,
|
||||
Column('id', Integer, primary_key=True)
|
||||
)
|
||||
b = Table(
|
||||
'b', self.meta,
|
||||
Column('key', Integer),
|
||||
Column('archive_id', Integer),
|
||||
Column('aid', ForeignKey('a.id')),
|
||||
PrimaryKeyConstraint("key", "archive_id")
|
||||
)
|
||||
c = Table(
|
||||
'c', self.meta,
|
||||
Column('id', Integer, primary_key=True),
|
||||
Column('aid', ForeignKey('a.id')),
|
||||
Column('key', Integer),
|
||||
Column('archive_id', Integer),
|
||||
ForeignKeyConstraint(
|
||||
['key', 'archive_id'], ['b.key', 'b.archive_id'],
|
||||
name="some_composite_fk")
|
||||
)
|
||||
self.meta.create_all(self.engine, tables=[a, b, c])
|
||||
|
||||
def get_fk_entries():
|
||||
inspector = sqlalchemy.inspect(self.engine)
|
||||
return sorted(
|
||||
inspector.get_foreign_keys('b') +
|
||||
inspector.get_foreign_keys('c'),
|
||||
key=lambda fk: fk['referred_table']
|
||||
)
|
||||
|
||||
def normalize_fk_entries(fks):
|
||||
return [{
|
||||
'name': fk['name'],
|
||||
'referred_columns': fk['referred_columns'],
|
||||
'referred_table': fk['referred_table'],
|
||||
} for fk in fks]
|
||||
|
||||
existing_foreign_keys = get_fk_entries()
|
||||
self.assertEqual(
|
||||
[{'name': mock.ANY,
|
||||
'referred_columns': ['id'], 'referred_table': 'a'},
|
||||
{'name': mock.ANY,
|
||||
'referred_columns': ['id'], 'referred_table': 'a'},
|
||||
{'name': 'some_composite_fk',
|
||||
'referred_columns': ['key', 'archive_id'],
|
||||
'referred_table': 'b'}],
|
||||
normalize_fk_entries(existing_foreign_keys)
|
||||
)
|
||||
|
||||
with mock.patch("oslo_db.sqlalchemy.ndb._ndb_status",
|
||||
mock.Mock(return_value=True)):
|
||||
with utils.suspend_fk_constraints_for_col_alter(
|
||||
self.engine, 'a', 'id', referents=['b', 'c']):
|
||||
no_a_foreign_keys = get_fk_entries()
|
||||
self.assertEqual(
|
||||
[{'name': 'some_composite_fk',
|
||||
'referred_columns': ['key', 'archive_id'],
|
||||
'referred_table': 'b'}],
|
||||
normalize_fk_entries(no_a_foreign_keys)
|
||||
)
|
||||
|
||||
self.assertEqual(existing_foreign_keys, get_fk_entries())
|
||||
|
||||
with mock.patch("oslo_db.sqlalchemy.ndb._ndb_status",
|
||||
mock.Mock(return_value=True)):
|
||||
with utils.suspend_fk_constraints_for_col_alter(
|
||||
self.engine, 'b', 'archive_id', referents=['c']):
|
||||
self.assertEqual(
|
||||
[{'name': mock.ANY,
|
||||
'referred_columns': ['id'], 'referred_table': 'a'},
|
||||
{'name': mock.ANY,
|
||||
'referred_columns': ['id'], 'referred_table': 'a'}],
|
||||
normalize_fk_entries(get_fk_entries())
|
||||
)
|
||||
|
||||
self.assertEqual(existing_foreign_keys, get_fk_entries())
|
||||
|
||||
with utils.suspend_fk_constraints_for_col_alter(
|
||||
self.engine, 'a', 'id', referents=['b', 'c']):
|
||||
self.assertEqual(existing_foreign_keys, get_fk_entries())
|
||||
|
||||
if self.engine.name == 'mysql':
|
||||
self.engine.dialect._oslodb_enable_ndb_support = True
|
||||
|
||||
self.addCleanup(
|
||||
setattr, self.engine.dialect, "_oslodb_enable_ndb_support",
|
||||
False
|
||||
)
|
||||
|
||||
with utils.suspend_fk_constraints_for_col_alter(
|
||||
self.engine, 'a', 'id', referents=['b', 'c']):
|
||||
self.assertEqual(no_a_foreign_keys, get_fk_entries())
|
||||
|
||||
|
||||
class PostgresqlTestMigrations(TestMigrationUtils,
|
||||
db_test_base._PostgreSQLOpportunisticTestCase):
|
||||
|
@ -0,0 +1,4 @@
|
||||
---
|
||||
upgrade:
|
||||
- |
|
||||
MySQL NDB Cluster support has been removed.
|
Loading…
Reference in New Issue
Block a user