Fix issues with alembic constraint naming
Most constraint where unnamed in CloudKitty, mainly because the default naming convention doesn't do so. Since most primary key and unique are automatically defined by attributes on a column you can't directly specify a name. We've defined a naming convention in a base model to ensure new tables are created with constraint having sensible names. This still won't fix Alembic SQLite migrations that can't reproduce the ON DELETE attribute of the ForeignKey. Creating all sorts of bugs with delete recursivity. Change-Id: I5f0cc1771082e4c53ad678d94fdf68af1323050d
This commit is contained in:
parent
6f9e9c9151
commit
1b278d99f5
31
cloudkitty/common/db/models.py
Normal file
31
cloudkitty/common/db/models.py
Normal file
@ -0,0 +1,31 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2016 Objectif Libre
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
# @author: Stéphane Albert
|
||||
#
|
||||
from sqlalchemy.ext import declarative
|
||||
|
||||
NAMING_CONVENTION = {
|
||||
"ix": 'ix_%(column_0_label)s',
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"ck": "ck_%(table_name)s_%(constraint_name)s",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s"}
|
||||
|
||||
|
||||
def get_base():
|
||||
base = declarative.declarative_base()
|
||||
base.metadata.naming_convention = NAMING_CONVENTION
|
||||
return base
|
@ -0,0 +1,342 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright 2014 Objectif Libre
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
# @author: Stéphane Albert
|
||||
#
|
||||
from oslo_db.sqlalchemy import models
|
||||
import sqlalchemy
|
||||
from sqlalchemy.ext import declarative
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy import schema
|
||||
|
||||
from cloudkitty.common.db import models as ck_models
|
||||
|
||||
Base = ck_models.get_base()
|
||||
|
||||
|
||||
class HashMapBase(models.ModelBase):
|
||||
__table_args__ = {'mysql_charset': "utf8",
|
||||
'mysql_engine': "InnoDB"}
|
||||
fk_to_resolve = {}
|
||||
|
||||
def save(self, session=None):
|
||||
from cloudkitty import db
|
||||
|
||||
if session is None:
|
||||
session = db.get_session()
|
||||
|
||||
super(HashMapBase, self).save(session=session)
|
||||
|
||||
def as_dict(self):
|
||||
d = {}
|
||||
for c in self.__table__.columns:
|
||||
if c.name == 'id':
|
||||
continue
|
||||
d[c.name] = self[c.name]
|
||||
return d
|
||||
|
||||
def _recursive_resolve(self, path):
|
||||
obj = self
|
||||
for attr in path.split('.'):
|
||||
if hasattr(obj, attr):
|
||||
obj = getattr(obj, attr)
|
||||
else:
|
||||
return None
|
||||
return obj
|
||||
|
||||
def export_model(self):
|
||||
res = self.as_dict()
|
||||
for fk, mapping in self.fk_to_resolve.items():
|
||||
res[fk] = self._recursive_resolve(mapping)
|
||||
return res
|
||||
|
||||
|
||||
class HashMapService(Base, HashMapBase):
|
||||
"""An hashmap service.
|
||||
|
||||
Used to describe a CloudKitty service such as compute or volume.
|
||||
"""
|
||||
__tablename__ = 'hashmap_services'
|
||||
|
||||
id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
primary_key=True)
|
||||
service_id = sqlalchemy.Column(
|
||||
sqlalchemy.String(36),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
name = sqlalchemy.Column(
|
||||
sqlalchemy.String(255),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
fields = orm.relationship(
|
||||
'HashMapField',
|
||||
backref=orm.backref(
|
||||
'service',
|
||||
lazy='immediate'))
|
||||
mappings = orm.relationship(
|
||||
'HashMapMapping',
|
||||
backref=orm.backref(
|
||||
'service',
|
||||
lazy='immediate'))
|
||||
thresholds = orm.relationship(
|
||||
'HashMapThreshold',
|
||||
backref=orm.backref(
|
||||
'service',
|
||||
lazy='immediate'))
|
||||
|
||||
def __repr__(self):
|
||||
return ('<HashMapService[{uuid}]: '
|
||||
'service={service}>').format(
|
||||
uuid=self.service_id,
|
||||
service=self.name)
|
||||
|
||||
|
||||
class HashMapField(Base, HashMapBase):
|
||||
"""An hashmap field.
|
||||
|
||||
Used to describe a service metadata such as flavor_id or image_id for
|
||||
compute.
|
||||
"""
|
||||
__tablename__ = 'hashmap_fields'
|
||||
fk_to_resolve = {
|
||||
'service_id': 'service.service_id'}
|
||||
|
||||
@declarative.declared_attr
|
||||
def __table_args__(cls):
|
||||
args = (
|
||||
schema.UniqueConstraint(
|
||||
'field_id',
|
||||
'name',
|
||||
name='uniq_field'),
|
||||
schema.UniqueConstraint(
|
||||
'service_id',
|
||||
'name',
|
||||
name='uniq_map_service_field'),
|
||||
HashMapBase.__table_args__,)
|
||||
return args
|
||||
|
||||
id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
primary_key=True)
|
||||
field_id = sqlalchemy.Column(
|
||||
sqlalchemy.String(36),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
name = sqlalchemy.Column(
|
||||
sqlalchemy.String(255),
|
||||
nullable=False)
|
||||
service_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_services.id',
|
||||
ondelete='CASCADE'),
|
||||
nullable=False)
|
||||
mappings = orm.relationship(
|
||||
'HashMapMapping',
|
||||
backref=orm.backref(
|
||||
'field',
|
||||
lazy='immediate'))
|
||||
thresholds = orm.relationship(
|
||||
'HashMapThreshold',
|
||||
backref=orm.backref(
|
||||
'field',
|
||||
lazy='immediate'))
|
||||
|
||||
def __repr__(self):
|
||||
return ('<HashMapField[{uuid}]: '
|
||||
'field={field}>').format(
|
||||
uuid=self.field_id,
|
||||
field=self.name)
|
||||
|
||||
|
||||
class HashMapGroup(Base, HashMapBase):
|
||||
"""A grouping of hashmap calculations.
|
||||
|
||||
Used to group multiple mappings or thresholds into a single calculation.
|
||||
"""
|
||||
__tablename__ = 'hashmap_groups'
|
||||
|
||||
id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
primary_key=True)
|
||||
group_id = sqlalchemy.Column(
|
||||
sqlalchemy.String(36),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
name = sqlalchemy.Column(
|
||||
sqlalchemy.String(255),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
mappings = orm.relationship(
|
||||
'HashMapMapping',
|
||||
backref=orm.backref(
|
||||
'group',
|
||||
lazy='immediate'))
|
||||
thresholds = orm.relationship(
|
||||
'HashMapThreshold',
|
||||
backref=orm.backref(
|
||||
'group',
|
||||
lazy='immediate'))
|
||||
|
||||
def __repr__(self):
|
||||
return ('<HashMapGroup[{uuid}]: '
|
||||
'name={name}>').format(
|
||||
uuid=self.group_id,
|
||||
name=self.name)
|
||||
|
||||
|
||||
class HashMapMapping(Base, HashMapBase):
|
||||
"""A mapping between a field or service, a value and a type.
|
||||
|
||||
Used to model final equation.
|
||||
"""
|
||||
__tablename__ = 'hashmap_mappings'
|
||||
fk_to_resolve = {
|
||||
'service_id': 'service.service_id',
|
||||
'field_id': 'field.field_id',
|
||||
'group_id': 'group.group_id'}
|
||||
|
||||
@declarative.declared_attr
|
||||
def __table_args__(cls):
|
||||
args = (
|
||||
schema.UniqueConstraint(
|
||||
'value',
|
||||
'field_id',
|
||||
name='uniq_field_mapping'),
|
||||
schema.UniqueConstraint(
|
||||
'value',
|
||||
'service_id',
|
||||
name='uniq_service_mapping'),
|
||||
HashMapBase.__table_args__,)
|
||||
return args
|
||||
|
||||
id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
primary_key=True)
|
||||
mapping_id = sqlalchemy.Column(
|
||||
sqlalchemy.String(36),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
value = sqlalchemy.Column(
|
||||
sqlalchemy.String(255),
|
||||
nullable=True)
|
||||
cost = sqlalchemy.Column(
|
||||
sqlalchemy.Numeric(20, 8),
|
||||
nullable=False)
|
||||
map_type = sqlalchemy.Column(
|
||||
sqlalchemy.Enum(
|
||||
'flat',
|
||||
'rate',
|
||||
name='enum_map_type'),
|
||||
nullable=False)
|
||||
service_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_services.id',
|
||||
ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
field_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_fields.id',
|
||||
ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
group_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_groups.id',
|
||||
ondelete='SET NULL'),
|
||||
nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return ('<HashMapMapping[{uuid}]: '
|
||||
'type={map_type} {value}={cost}>').format(
|
||||
uuid=self.mapping_id,
|
||||
map_type=self.map_type,
|
||||
value=self.value,
|
||||
cost=self.cost)
|
||||
|
||||
|
||||
class HashMapThreshold(Base, HashMapBase):
|
||||
"""A threshold matching a service or a field with a level and a type.
|
||||
|
||||
Used to model final equation.
|
||||
"""
|
||||
__tablename__ = 'hashmap_thresholds'
|
||||
fk_to_resolve = {
|
||||
'service_id': 'service.service_id',
|
||||
'field_id': 'field.field_id',
|
||||
'group_id': 'group.group_id'}
|
||||
|
||||
@declarative.declared_attr
|
||||
def __table_args__(cls):
|
||||
args = (
|
||||
schema.UniqueConstraint(
|
||||
'level',
|
||||
'field_id',
|
||||
name='uniq_field_threshold'),
|
||||
schema.UniqueConstraint(
|
||||
'level',
|
||||
'service_id',
|
||||
name='uniq_service_threshold'),
|
||||
HashMapBase.__table_args__,)
|
||||
return args
|
||||
|
||||
id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
primary_key=True)
|
||||
threshold_id = sqlalchemy.Column(
|
||||
sqlalchemy.String(36),
|
||||
nullable=False,
|
||||
unique=True)
|
||||
level = sqlalchemy.Column(
|
||||
sqlalchemy.Numeric(20, 8),
|
||||
nullable=True)
|
||||
cost = sqlalchemy.Column(
|
||||
sqlalchemy.Numeric(20, 8),
|
||||
nullable=False)
|
||||
map_type = sqlalchemy.Column(
|
||||
sqlalchemy.Enum(
|
||||
'flat',
|
||||
'rate',
|
||||
name='enum_hashmap_type'),
|
||||
nullable=False)
|
||||
service_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_services.id',
|
||||
ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
field_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_fields.id',
|
||||
ondelete='CASCADE'),
|
||||
nullable=True)
|
||||
group_id = sqlalchemy.Column(
|
||||
sqlalchemy.Integer,
|
||||
sqlalchemy.ForeignKey(
|
||||
'hashmap_groups.id',
|
||||
ondelete='SET NULL'),
|
||||
nullable=True)
|
||||
|
||||
def __repr__(self):
|
||||
return ('<HashMapThreshold[{uuid}]: '
|
||||
'type={map_type} {level}={cost}>').format(
|
||||
uuid=self.threshold_id,
|
||||
map_type=self.map_type,
|
||||
level=self.level,
|
||||
cost=self.cost)
|
@ -0,0 +1,244 @@
|
||||
"""Fix unnamed constraints.
|
||||
|
||||
Revision ID: f8c799db4aa0
|
||||
Revises: 10d2738b67df
|
||||
Create Date: 2016-05-18 18:08:19.331412
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f8c799db4aa0'
|
||||
down_revision = '10d2738b67df'
|
||||
import copy
|
||||
|
||||
from alembic import op
|
||||
import six
|
||||
|
||||
from cloudkitty.rating.hash.db.sqlalchemy.alembic.models \
|
||||
import f8c799db4aa0_fix_unnamed_constraints as models
|
||||
|
||||
OPS = {
|
||||
'foreignkey': {
|
||||
'hashmap_fields': [
|
||||
('hashmap_fields_service_id_fkey',
|
||||
'fk_hashmap_fields_service_id_hashmap_services',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_services',
|
||||
['service_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'CASCADE'}})],
|
||||
'hashmap_thresholds': [
|
||||
('hashmap_thresholds_field_id_fkey',
|
||||
'fk_hashmap_thresholds_field_id_hashmap_fields',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_fields',
|
||||
['field_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'CASCADE'}}),
|
||||
('hashmap_thresholds_group_id_fkey',
|
||||
'fk_hashmap_thresholds_group_id_hashmap_groups',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_groups',
|
||||
['group_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'SET NULL'}}),
|
||||
('hashmap_thresholds_service_id_fkey',
|
||||
'fk_hashmap_thresholds_service_id_hashmap_services',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_services',
|
||||
['service_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'CASCADE'}})],
|
||||
'hashmap_mappings': [
|
||||
('hashmap_maps_field_id_fkey',
|
||||
'fk_hashmap_maps_field_id_hashmap_fields',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_fields',
|
||||
['field_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'CASCADE'}}),
|
||||
('hashmap_maps_group_id_fkey',
|
||||
'fk_hashmap_maps_group_id_hashmap_groups',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_groups',
|
||||
['group_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'SET NULL'}}),
|
||||
('hashmap_maps_service_id_fkey',
|
||||
'fk_hashmap_maps_service_id_hashmap_services',
|
||||
{
|
||||
'args': [
|
||||
'hashmap_fields',
|
||||
['field_id'],
|
||||
['id']],
|
||||
'kwargs': {
|
||||
'ondelete': 'CASCADE'}})]
|
||||
},
|
||||
'primary': {
|
||||
'hashmap_services': [
|
||||
('hashmap_services_pkey',
|
||||
'pk_hashmap_services',
|
||||
{'args': [['id']]})],
|
||||
'hashmap_fields': [
|
||||
('hashmap_fields_pkey',
|
||||
'pk_hashmap_fields',
|
||||
{'args': [['id']]})],
|
||||
'hashmap_groups': [
|
||||
('hashmap_groups_pkey',
|
||||
'pk_hashmap_groups',
|
||||
{'args': [['id']]})],
|
||||
'hashmap_mappings': [
|
||||
('hashmap_maps_pkey',
|
||||
'pk_hashmap_maps',
|
||||
{'args': [['id']]})],
|
||||
'hashmap_thresholds': [
|
||||
('hashmap_thresholds_pkey',
|
||||
'pk_hashmap_thresholds',
|
||||
{'args': [['id']]})]
|
||||
},
|
||||
'unique': {
|
||||
'hashmap_services': [
|
||||
('hashmap_services_name_key',
|
||||
'uq_hashmap_services_name',
|
||||
{'args': [['name']]}),
|
||||
('hashmap_services_service_id_key',
|
||||
'uq_hashmap_services_service_id',
|
||||
{'args': [['service_id']]})],
|
||||
'hashmap_fields': [
|
||||
('hashmap_fields_field_id_key',
|
||||
'uq_hashmap_fields_field_id',
|
||||
{'args': [['field_id']]})],
|
||||
'hashmap_groups': [
|
||||
('hashmap_groups_group_id_key',
|
||||
'uq_hashmap_groups_group_id',
|
||||
{'args': [['group_id']]}),
|
||||
('hashmap_groups_name_key',
|
||||
'uq_hashmap_groups_name',
|
||||
{'args': [['name']]})],
|
||||
'hashmap_mappings': [
|
||||
('hashmap_maps_mapping_id_key',
|
||||
'uq_hashmap_maps_mapping_id',
|
||||
{'args': [['mapping_id']]})],
|
||||
'hashmap_thresholds': [
|
||||
('hashmap_thresholds_threshold_id_key',
|
||||
'uq_hashmap_thresholds_threshold_id',
|
||||
{'args': [['threshold_id']]})]}}
|
||||
|
||||
POST_OPS = {
|
||||
'primary': {
|
||||
'hashmap_mappings': [
|
||||
('pk_hashmap_maps',
|
||||
'pk_hashmap_mappings',
|
||||
{'args': [['id']]})]
|
||||
}}
|
||||
|
||||
|
||||
def upgrade_sqlite():
|
||||
# NOTE(sheeprine): Batch automatically recreates tables,
|
||||
# use it as a lazy way to recreate tables and transfer data automagically.
|
||||
for name, table in six.iteritems(models.Base.metadata.tables):
|
||||
with op.batch_alter_table(name, copy_from=table) as batch_op:
|
||||
# NOTE(sheeprine): Dummy operation to force recreate.
|
||||
# Easier than delete and create.
|
||||
batch_op.alter_column('id')
|
||||
|
||||
|
||||
def upgrade_mysql():
|
||||
op.execute('SET FOREIGN_KEY_CHECKS=0;')
|
||||
tables = copy.deepcopy(models.Base.metadata.tables)
|
||||
# Copy first without constraints
|
||||
tables['hashmap_fields'].constraints = set()
|
||||
tables['hashmap_mappings'].constraints = set()
|
||||
tables['hashmap_thresholds'].constraints = set()
|
||||
for name, table in six.iteritems(tables):
|
||||
with op.batch_alter_table(
|
||||
name,
|
||||
copy_from=table,
|
||||
recreate='always') as batch_op:
|
||||
batch_op.alter_column('id')
|
||||
# Final copy with constraints
|
||||
for name, table in six.iteritems(models.Base.metadata.tables):
|
||||
with op.batch_alter_table(
|
||||
name,
|
||||
copy_from=table,
|
||||
recreate='always') as batch_op:
|
||||
batch_op.alter_column('id')
|
||||
op.execute('SET FOREIGN_KEY_CHECKS=1;')
|
||||
|
||||
|
||||
def translate_op(op_, constraint_type, name, table, *args, **kwargs):
|
||||
if op_ == 'drop':
|
||||
op.drop_constraint(name, table, type_=constraint_type)
|
||||
else:
|
||||
if constraint_type == 'primary':
|
||||
func = op.create_primary_key
|
||||
elif constraint_type == 'unique':
|
||||
func = op.create_unique_constraint
|
||||
elif constraint_type == 'foreignkey':
|
||||
func = op.create_foreign_key
|
||||
func(name, table, *args, **kwargs)
|
||||
|
||||
|
||||
def upgrade_postgresql():
|
||||
# NOTE(sheeprine): No automagic stuff here.
|
||||
# Check if tables need additional work
|
||||
conn = op.get_bind()
|
||||
res = conn.execute(
|
||||
"SELECT * FROM INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS"
|
||||
" WHERE CONSTRAINT_NAME = 'hashmap_thresholds_field_id_fkey';")
|
||||
if res.rowcount:
|
||||
ops_list = [OPS, POST_OPS]
|
||||
else:
|
||||
ops_list = [POST_OPS]
|
||||
for cur_ops in ops_list:
|
||||
for constraint_type in ('foreignkey', 'unique', 'primary'):
|
||||
for table_name, constraints in six.iteritems(
|
||||
cur_ops.get(constraint_type, dict())):
|
||||
for constraint in constraints:
|
||||
old_name = constraint[0]
|
||||
translate_op(
|
||||
'drop',
|
||||
constraint_type,
|
||||
old_name,
|
||||
table_name)
|
||||
for constraint_type in ('primary', 'unique', 'foreignkey'):
|
||||
for table_name, constraints in six.iteritems(
|
||||
cur_ops.get(constraint_type, dict())):
|
||||
for constraint in constraints:
|
||||
new_name = constraint[1]
|
||||
params = constraint[2]
|
||||
translate_op(
|
||||
'create',
|
||||
constraint_type,
|
||||
new_name,
|
||||
table_name,
|
||||
*params.get('args', []),
|
||||
**params.get('kwargs', {}))
|
||||
|
||||
|
||||
def upgrade():
|
||||
dialect = op.get_context().dialect
|
||||
if dialect.name == 'sqlite':
|
||||
upgrade_sqlite()
|
||||
elif dialect.name == 'mysql':
|
||||
upgrade_mysql()
|
||||
elif dialect.name == 'postgresql':
|
||||
upgrade_postgresql()
|
||||
|
||||
|
||||
def downgrade():
|
||||
# NOTE(sheeprine): No need to downgrade here as naming conventions will
|
||||
# still apply.
|
||||
pass
|
@ -21,7 +21,9 @@ from sqlalchemy.ext import declarative
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy import schema
|
||||
|
||||
Base = declarative.declarative_base()
|
||||
from cloudkitty.common.db import models as ck_models
|
||||
|
||||
Base = ck_models.get_base()
|
||||
|
||||
|
||||
class HashMapBase(models.ModelBase):
|
||||
|
Loading…
Reference in New Issue
Block a user