Sync db.sqlalchemy from oslo-incubator
This updates db.sqlalchemy from oslo-incubator from hash 6f3e5a57aea5402031c4cc8863e0e35f7f222794 $ python update.py --nodeps --base keystone \ --dest-dir ../keystone --modules db.sqlalchemy Change-Id: I2c8c104b6116f3f0cf0f17df196924ab7c20e072
This commit is contained in:
parent
dbd98a4c85
commit
8ec00322ef
@ -59,7 +59,16 @@ class ModelBase(object):
|
||||
def get(self, key, default=None):
|
||||
return getattr(self, key, default)
|
||||
|
||||
def _get_extra_keys(self):
|
||||
@property
|
||||
def _extra_keys(self):
|
||||
"""Specifies custom fields
|
||||
|
||||
Subclasses can override this property to return a list
|
||||
of custom fields that should be included in their dict
|
||||
representation.
|
||||
|
||||
For reference check tests/db/sqlalchemy/test_models.py
|
||||
"""
|
||||
return []
|
||||
|
||||
def __iter__(self):
|
||||
@ -67,7 +76,7 @@ class ModelBase(object):
|
||||
# NOTE(russellb): Allow models to specify other keys that can be looked
|
||||
# up, beyond the actual db columns. An example would be the 'name'
|
||||
# property for an Instance.
|
||||
columns.extend(self._get_extra_keys())
|
||||
columns.extend(self._extra_keys)
|
||||
self._i = iter(columns)
|
||||
return self
|
||||
|
||||
@ -89,7 +98,7 @@ class ModelBase(object):
|
||||
joined = dict([(k, v) for k, v in six.iteritems(self.__dict__)
|
||||
if not k[0] == '_'])
|
||||
local.update(joined)
|
||||
return local.iteritems()
|
||||
return six.iteritems(local)
|
||||
|
||||
|
||||
class TimestampMixin(object):
|
||||
|
@ -289,7 +289,9 @@ database_opts = [
|
||||
deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout',
|
||||
group='DEFAULT'),
|
||||
cfg.DeprecatedOpt('sql_idle_timeout',
|
||||
group='DATABASE')],
|
||||
group='DATABASE'),
|
||||
cfg.DeprecatedOpt('idle_timeout',
|
||||
group='sql')],
|
||||
help='timeout before idle sql connections are reaped'),
|
||||
cfg.IntOpt('min_pool_size',
|
||||
default=1,
|
||||
@ -601,18 +603,24 @@ def _thread_yield(dbapi_con, con_record):
|
||||
time.sleep(0)
|
||||
|
||||
|
||||
def _ping_listener(dbapi_conn, connection_rec, connection_proxy):
|
||||
"""Ensures that MySQL connections checked out of the pool are alive.
|
||||
def _ping_listener(engine, dbapi_conn, connection_rec, connection_proxy):
|
||||
"""Ensures that MySQL and DB2 connections are alive.
|
||||
|
||||
Borrowed from:
|
||||
http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
|
||||
"""
|
||||
cursor = dbapi_conn.cursor()
|
||||
try:
|
||||
dbapi_conn.cursor().execute('select 1')
|
||||
except dbapi_conn.OperationalError as ex:
|
||||
if ex.args[0] in (2006, 2013, 2014, 2045, 2055):
|
||||
LOG.warning(_('Got mysql server has gone away: %s'), ex)
|
||||
raise sqla_exc.DisconnectionError("Database server went away")
|
||||
ping_sql = 'select 1'
|
||||
if engine.name == 'ibm_db_sa':
|
||||
# DB2 requires a table expression
|
||||
ping_sql = 'select 1 from (values (1)) AS t1'
|
||||
cursor.execute(ping_sql)
|
||||
except Exception as ex:
|
||||
if engine.dialect.is_disconnect(ex, dbapi_conn, cursor):
|
||||
msg = _('Database server has gone away: %s') % ex
|
||||
LOG.warning(msg)
|
||||
raise sqla_exc.DisconnectionError(msg)
|
||||
else:
|
||||
raise
|
||||
|
||||
@ -670,8 +678,9 @@ def create_engine(sql_connection, sqlite_fk=False):
|
||||
|
||||
sqlalchemy.event.listen(engine, 'checkin', _thread_yield)
|
||||
|
||||
if 'mysql' in connection_dict.drivername:
|
||||
sqlalchemy.event.listen(engine, 'checkout', _ping_listener)
|
||||
if engine.name in ['mysql', 'ibm_db_sa']:
|
||||
callback = functools.partial(_ping_listener, engine)
|
||||
sqlalchemy.event.listen(engine, 'checkout', callback)
|
||||
elif 'sqlite' in connection_dict.drivername:
|
||||
if not CONF.sqlite_synchronous:
|
||||
sqlalchemy.event.listen(engine, 'connect',
|
||||
|
@ -23,7 +23,7 @@ import lockfile
|
||||
import sqlalchemy
|
||||
import sqlalchemy.exc
|
||||
|
||||
from keystone.openstack.common.gettextutils import _
|
||||
from keystone.openstack.common.gettextutils import _ # noqa
|
||||
from keystone.openstack.common import log as logging
|
||||
from keystone.openstack.common.py3kcompat import urlutils
|
||||
from keystone.openstack.common import test
|
||||
|
@ -133,9 +133,9 @@ def paginate_query(query, model, limit, sort_keys, marker=None,
|
||||
|
||||
# Build up an array of sort criteria as in the docstring
|
||||
criteria_list = []
|
||||
for i in range(0, len(sort_keys)):
|
||||
for i in range(len(sort_keys)):
|
||||
crit_attrs = []
|
||||
for j in range(0, i):
|
||||
for j in range(i):
|
||||
model_attr = getattr(model, sort_keys[j])
|
||||
crit_attrs.append((model_attr == marker_values[j]))
|
||||
|
||||
@ -215,9 +215,9 @@ def drop_unique_constraint(migrate_engine, table_name, uc_name, *columns,
|
||||
NullType with the correct column type.
|
||||
|
||||
:param migrate_engine: sqlalchemy engine
|
||||
:param table_name: name of table that contains unique constraint.
|
||||
:param uc_name: name of unique constraint that will be dropped.
|
||||
:param columns: columns that are in unique constraint.
|
||||
:param table_name: name of table that contains uniq constraint.
|
||||
:param uc_name: name of uniq constraint that will be dropped.
|
||||
:param columns: columns that are in uniq constraint.
|
||||
:param col_name_col_instance: contains pair column_name=column_instance.
|
||||
column_instance is instance of Column. These params
|
||||
are required only for columns that have unsupported
|
||||
|
Loading…
Reference in New Issue
Block a user