diff --git a/doc/source/devref/db_layer.rst b/doc/source/devref/db_layer.rst index 3edad0e88f7..44e1da44609 100644 --- a/doc/source/devref/db_layer.rst +++ b/doc/source/devref/db_layer.rst @@ -76,7 +76,7 @@ many-to-one because each object then needs its own table for the attributes To address this issue, the 'standardattribute' table is available. Any model can add support for this table by inheriting the 'HasStandardAttributes' mixin -in neutron.db.model_base. This mixin will add a standard_attr_id BigInteger +in neutron.db.standard_attr. This mixin will add a standard_attr_id BigInteger column to the model with a foreign key relationship to the 'standardattribute' table. The model will then be able to access any columns of the 'standardattribute' table and any tables related to it. diff --git a/neutron/db/l3_db.py b/neutron/db/l3_db.py index e4f2d044854..79b58ece4d2 100644 --- a/neutron/db/l3_db.py +++ b/neutron/db/l3_db.py @@ -44,6 +44,7 @@ from neutron.db import common_db_mixin from neutron.db import l3_agentschedulers_db as l3_agt from neutron.db import model_base from neutron.db import models_v2 +from neutron.db import standard_attr from neutron.db import standardattrdescription_db as st_attr from neutron.extensions import external_net from neutron.extensions import l3 @@ -90,7 +91,7 @@ class RouterPort(model_base.BASEV2): lazy='joined') -class Router(model_base.HasStandardAttributes, model_base.BASEV2, +class Router(standard_attr.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasProject): """Represents a v2 neutron router.""" @@ -110,7 +111,7 @@ class Router(model_base.HasStandardAttributes, model_base.BASEV2, secondary=l3_agt.RouterL3AgentBinding.__table__) -class FloatingIP(model_base.HasStandardAttributes, model_base.BASEV2, +class FloatingIP(standard_attr.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasProject): """Represents a floating IP address. diff --git a/neutron/db/model_base.py b/neutron/db/model_base.py index 474d9517e55..4e905dc1bb5 100644 --- a/neutron/db/model_base.py +++ b/neutron/db/model_base.py @@ -17,7 +17,6 @@ import debtcollector from oslo_db.sqlalchemy import models from oslo_utils import uuidutils import sqlalchemy as sa -from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext import declarative from sqlalchemy import orm @@ -127,114 +126,6 @@ class NeutronBaseV2(NeutronBase): BASEV2 = declarative.declarative_base(cls=NeutronBaseV2) -class StandardAttribute(BASEV2, models.TimestampMixin): - """Common table to associate all Neutron API resources. - - By having Neutron objects related to this table, we can associate new - tables that apply to many Neutron objects (e.g. timestamps, rbac entries) - to this table to avoid schema duplication while maintaining referential - integrity. - - NOTE(kevinbenton): This table should not have more columns added to it - unless we are absolutely certain the new column will have a value for - every single type of Neutron resource. Otherwise this table will be filled - with NULL entries for combinations that don't make sense. Additionally, - by keeping this table small we can ensure that performance isn't adversely - impacted for queries on objects. - """ - - # sqlite doesn't support auto increment on big integers so we use big int - # for everything but sqlite - id = sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), - primary_key=True, autoincrement=True) - - # NOTE(kevinbenton): this column is redundant information, but it allows - # operators/devs to look at the contents of this table and know which table - # the corresponding object is in. - # 255 was selected as a max just because it's the varchar ceiling in mysql - # before a 2-byte prefix is required. We shouldn't get anywhere near this - # limit with our table names... - resource_type = sa.Column(sa.String(255), nullable=False) - description = sa.Column(sa.String(attr.DESCRIPTION_MAX_LEN)) - - revision_number = sa.Column( - sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), - server_default='0', nullable=False) - - __mapper_args__ = { - # see http://docs.sqlalchemy.org/en/latest/orm/versioning.html for - # details about how this works - "version_id_col": revision_number - } - - -class HasStandardAttributes(object): - @declarative.declared_attr - def standard_attr_id(cls): - return sa.Column( - sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), - sa.ForeignKey(StandardAttribute.id, ondelete="CASCADE"), - unique=True, - nullable=False - ) - - # NOTE(kevinbenton): we have to disable the following pylint check because - # it thinks we are overriding this method in the __init__ method. - #pylint: disable=method-hidden - @declarative.declared_attr - def standard_attr(cls): - return orm.relationship(StandardAttribute, - lazy='joined', - cascade='all, delete-orphan', - single_parent=True, - uselist=False) - - def __init__(self, *args, **kwargs): - standard_attr_keys = ['description', 'created_at', - 'updated_at', 'revision_number'] - standard_attr_kwargs = {} - for key in standard_attr_keys: - if key in kwargs: - standard_attr_kwargs[key] = kwargs.pop(key) - super(HasStandardAttributes, self).__init__(*args, **kwargs) - # here we automatically create the related standard attribute object - self.standard_attr = StandardAttribute( - resource_type=self.__tablename__, **standard_attr_kwargs) - - @declarative.declared_attr - def description(cls): - return association_proxy('standard_attr', 'description') - - @declarative.declared_attr - def created_at(cls): - return association_proxy('standard_attr', 'created_at') - - @declarative.declared_attr - def updated_at(cls): - return association_proxy('standard_attr', 'updated_at') - - def update(self, new_dict): - # ignore the timestamps if they were passed in. For example, this - # happens if code calls update_port with modified results of get_port - new_dict.pop('created_at', None) - new_dict.pop('updated_at', None) - super(HasStandardAttributes, self).update(new_dict) - - @declarative.declared_attr - def revision_number(cls): - return association_proxy('standard_attr', 'revision_number') - - def bump_revision(self): - # SQLAlchemy will bump the version for us automatically if the - # standard attr record is being modified, but we must call this - # for all other modifications or when relevant children are being - # modified (e.g. fixed_ips change should bump port revision) - if self.standard_attr.revision_number is None: - # this is a brand new object uncommited so we don't bump now - return - self.standard_attr.revision_number += 1 - - def get_unique_keys(model): try: constraints = model.__table__.constraints diff --git a/neutron/db/models/securitygroup.py b/neutron/db/models/securitygroup.py index c7d86bff15c..56a63c751bd 100644 --- a/neutron/db/models/securitygroup.py +++ b/neutron/db/models/securitygroup.py @@ -18,9 +18,10 @@ from sqlalchemy import orm from neutron.api.v2 import attributes from neutron.db import model_base from neutron.db import models_v2 +from neutron.db import standard_attr -class SecurityGroup(model_base.HasStandardAttributes, model_base.BASEV2, +class SecurityGroup(standard_attr.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasProject): """Represents a v2 neutron security group.""" @@ -60,7 +61,7 @@ class SecurityGroupPortBinding(model_base.BASEV2): lazy='joined', cascade='delete')) -class SecurityGroupRule(model_base.HasStandardAttributes, model_base.BASEV2, +class SecurityGroupRule(standard_attr.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasProject): """Represents a v2 neutron security group rule.""" diff --git a/neutron/db/models_v2.py b/neutron/db/models_v2.py index 29595f5eecc..55dd7354f86 100644 --- a/neutron/db/models_v2.py +++ b/neutron/db/models_v2.py @@ -22,6 +22,7 @@ from neutron.api.v2 import attributes as attr from neutron.db import model_base from neutron.db.network_dhcp_agent_binding import models as ndab_model from neutron.db import rbac_db_models +from neutron.db import standard_attr # NOTE(kevinbenton): these are here for external projects that expect them @@ -114,7 +115,7 @@ class SubnetRoute(model_base.BASEV2, Route): primary_key=True) -class Port(model_base.HasStandardAttributes, model_base.BASEV2, +class Port(standard_attr.HasStandardAttributes, model_base.BASEV2, HasId, HasTenant): """Represents a port on a Neutron v2 network.""" @@ -173,7 +174,7 @@ class DNSNameServer(model_base.BASEV2): order = sa.Column(sa.Integer, nullable=False, server_default='0') -class Subnet(model_base.HasStandardAttributes, model_base.BASEV2, +class Subnet(standard_attr.HasStandardAttributes, model_base.BASEV2, HasId, HasTenant): """Represents a neutron subnet. @@ -242,7 +243,7 @@ class SubnetPoolPrefix(model_base.BASEV2): primary_key=True) -class SubnetPool(model_base.HasStandardAttributes, model_base.BASEV2, +class SubnetPool(standard_attr.HasStandardAttributes, model_base.BASEV2, HasId, HasTenant): """Represents a neutron subnet pool. """ @@ -264,7 +265,7 @@ class SubnetPool(model_base.HasStandardAttributes, model_base.BASEV2, lazy='joined') -class Network(model_base.HasStandardAttributes, model_base.BASEV2, +class Network(standard_attr.HasStandardAttributes, model_base.BASEV2, HasId, HasTenant): """Represents a v2 neutron network.""" diff --git a/neutron/db/provisioning_blocks.py b/neutron/db/provisioning_blocks.py index ad77ceec25e..e5bcbe054c7 100644 --- a/neutron/db/provisioning_blocks.py +++ b/neutron/db/provisioning_blocks.py @@ -23,6 +23,7 @@ from neutron.callbacks import resources from neutron.db import api as db_api from neutron.db import model_base from neutron.db import models_v2 +from neutron.db import standard_attr LOG = logging.getLogger(__name__) PROVISIONING_COMPLETE = 'provisioning_complete' @@ -36,7 +37,7 @@ class ProvisioningBlock(model_base.BASEV2): # the standard attr id of the thing we want to block standard_attr_id = ( sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), - sa.ForeignKey(model_base.StandardAttribute.id, + sa.ForeignKey(standard_attr.StandardAttribute.id, ondelete="CASCADE"), primary_key=True)) # the entity that wants to block the status change (e.g. L2 Agent) diff --git a/neutron/db/standard_attr.py b/neutron/db/standard_attr.py new file mode 100644 index 00000000000..f4b8149c736 --- /dev/null +++ b/neutron/db/standard_attr.py @@ -0,0 +1,129 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from oslo_db.sqlalchemy import models +import sqlalchemy as sa +from sqlalchemy.ext.associationproxy import association_proxy +from sqlalchemy.ext import declarative + +from neutron.api.v2 import attributes as attr +from neutron.db import model_base + + +class StandardAttribute(model_base.BASEV2, models.TimestampMixin): + """Common table to associate all Neutron API resources. + + By having Neutron objects related to this table, we can associate new + tables that apply to many Neutron objects (e.g. timestamps, rbac entries) + to this table to avoid schema duplication while maintaining referential + integrity. + + NOTE(kevinbenton): This table should not have more columns added to it + unless we are absolutely certain the new column will have a value for + every single type of Neutron resource. Otherwise this table will be filled + with NULL entries for combinations that don't make sense. Additionally, + by keeping this table small we can ensure that performance isn't adversely + impacted for queries on objects. + """ + + # sqlite doesn't support auto increment on big integers so we use big int + # for everything but sqlite + id = sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), + primary_key=True, autoincrement=True) + + # NOTE(kevinbenton): this column is redundant information, but it allows + # operators/devs to look at the contents of this table and know which table + # the corresponding object is in. + # 255 was selected as a max just because it's the varchar ceiling in mysql + # before a 2-byte prefix is required. We shouldn't get anywhere near this + # limit with our table names... + resource_type = sa.Column(sa.String(255), nullable=False) + description = sa.Column(sa.String(attr.DESCRIPTION_MAX_LEN)) + + revision_number = sa.Column( + sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), + server_default='0', nullable=False) + + __mapper_args__ = { + # see http://docs.sqlalchemy.org/en/latest/orm/versioning.html for + # details about how this works + "version_id_col": revision_number + } + + +class HasStandardAttributes(object): + @declarative.declared_attr + def standard_attr_id(cls): + return sa.Column( + sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), + sa.ForeignKey(StandardAttribute.id, ondelete="CASCADE"), + unique=True, + nullable=False + ) + + # NOTE(kevinbenton): we have to disable the following pylint check because + # it thinks we are overriding this method in the __init__ method. + #pylint: disable=method-hidden + @declarative.declared_attr + def standard_attr(cls): + return sa.orm.relationship(StandardAttribute, + lazy='joined', + cascade='all, delete-orphan', + single_parent=True, + uselist=False) + + def __init__(self, *args, **kwargs): + standard_attr_keys = ['description', 'created_at', + 'updated_at', 'revision_number'] + standard_attr_kwargs = {} + for key in standard_attr_keys: + if key in kwargs: + standard_attr_kwargs[key] = kwargs.pop(key) + super(HasStandardAttributes, self).__init__(*args, **kwargs) + # here we automatically create the related standard attribute object + self.standard_attr = StandardAttribute( + resource_type=self.__tablename__, **standard_attr_kwargs) + + @declarative.declared_attr + def description(cls): + return association_proxy('standard_attr', 'description') + + @declarative.declared_attr + def created_at(cls): + return association_proxy('standard_attr', 'created_at') + + @declarative.declared_attr + def updated_at(cls): + return association_proxy('standard_attr', 'updated_at') + + def update(self, new_dict): + # ignore the timestamps if they were passed in. For example, this + # happens if code calls update_port with modified results of get_port + new_dict.pop('created_at', None) + new_dict.pop('updated_at', None) + super(HasStandardAttributes, self).update(new_dict) + + @declarative.declared_attr + def revision_number(cls): + return association_proxy('standard_attr', 'revision_number') + + def bump_revision(self): + # SQLAlchemy will bump the version for us automatically if the + # standard attr record is being modified, but we must call this + # for all other modifications or when relevant children are being + # modified (e.g. fixed_ips change should bump port revision) + if self.standard_attr.revision_number is None: + # this is a brand new object uncommited so we don't bump now + return + self.standard_attr.revision_number += 1 diff --git a/neutron/db/tag_db.py b/neutron/db/tag_db.py index fff63100349..c7480d51eba 100644 --- a/neutron/db/tag_db.py +++ b/neutron/db/tag_db.py @@ -17,12 +17,13 @@ from sqlalchemy import orm from sqlalchemy.orm import aliased from neutron.db import model_base +from neutron.db import standard_attr class Tag(model_base.BASEV2): standard_attr_id = sa.Column( sa.BigInteger().with_variant(sa.Integer(), 'sqlite'), - sa.ForeignKey(model_base.StandardAttribute.id, ondelete="CASCADE"), + sa.ForeignKey(standard_attr.StandardAttribute.id, ondelete="CASCADE"), nullable=False, primary_key=True) tag = sa.Column(sa.String(60), nullable=False, primary_key=True) standard_attr = orm.relationship( diff --git a/neutron/objects/base.py b/neutron/objects/base.py index 485243edc5b..667c59d7759 100644 --- a/neutron/objects/base.py +++ b/neutron/objects/base.py @@ -24,6 +24,7 @@ import six from neutron._i18n import _ from neutron.db import api as db_api from neutron.db import model_base +from neutron.db import standard_attr from neutron.objects.db import api as obj_db_api from neutron.objects.extensions import standardattributes @@ -307,7 +308,8 @@ class NeutronDbObject(NeutronObject): @classmethod def has_standard_attributes(cls): return bool(cls.db_model and - issubclass(cls.db_model, model_base.HasStandardAttributes)) + issubclass(cls.db_model, + standard_attr.HasStandardAttributes)) @classmethod def modify_fields_to_db(cls, fields): diff --git a/neutron/services/auto_allocate/db.py b/neutron/services/auto_allocate/db.py index b1a1e13c920..4eddd2e7e1a 100644 --- a/neutron/services/auto_allocate/db.py +++ b/neutron/services/auto_allocate/db.py @@ -30,8 +30,8 @@ from neutron.db import api as db_api from neutron.db import common_db_mixin from neutron.db import db_base_plugin_v2 from neutron.db import external_net_db -from neutron.db import model_base from neutron.db import models_v2 +from neutron.db import standard_attr from neutron.extensions import l3 from neutron import manager from neutron.plugins.common import constants @@ -227,8 +227,8 @@ class AutoAllocatedTopologyMixin(common_db_mixin.CommonDbMixin): external_net_db.ExternalNetwork). filter_by(is_default=sql.true()). join(models_v2.Network). - join(model_base.StandardAttribute). - order_by(model_base.StandardAttribute.id).all()) + join(standard_attr.StandardAttribute). + order_by(standard_attr.StandardAttribute.id).all()) if not default_external_networks: LOG.error(_LE("Unable to find default external network " diff --git a/neutron/services/revisions/revision_plugin.py b/neutron/services/revisions/revision_plugin.py index dc6b04e624b..2016c26e92b 100644 --- a/neutron/services/revisions/revision_plugin.py +++ b/neutron/services/revisions/revision_plugin.py @@ -18,7 +18,7 @@ from sqlalchemy.orm import session as se from neutron._i18n import _, _LW from neutron.db import db_base_plugin_v2 -from neutron.db import model_base +from neutron.db import standard_attr from neutron.extensions import revisions from neutron.services import service_base @@ -40,7 +40,7 @@ class RevisionPlugin(service_base.ServicePluginBase): def bump_revisions(self, session, context, instances): # bump revision number for any updated objects in the session for obj in session.dirty: - if isinstance(obj, model_base.HasStandardAttributes): + if isinstance(obj, standard_attr.HasStandardAttributes): obj.bump_revision() # see if any created/updated/deleted objects bump the revision diff --git a/neutron/services/timestamp/timestamp_db.py b/neutron/services/timestamp/timestamp_db.py index 08be8ca1acf..ce7659e41c3 100644 --- a/neutron/services/timestamp/timestamp_db.py +++ b/neutron/services/timestamp/timestamp_db.py @@ -23,7 +23,7 @@ from sqlalchemy import exc as sql_exc from sqlalchemy.orm import session as se from neutron._i18n import _LW -from neutron.db import model_base +from neutron.db import standard_attr LOG = log.getLogger(__name__) @@ -58,10 +58,10 @@ class TimeStamp_db_mixin(object): changed_since = (timeutils. normalize_time(changed_since_string)) target_model_class = list(query._mapper_adapter_map.keys())[0] - query = query.join(model_base.StandardAttribute, + query = query.join(standard_attr.StandardAttribute, target_model_class.standard_attr_id == - model_base.StandardAttribute.id).filter( - model_base.StandardAttribute.updated_at + standard_attr.StandardAttribute.id).filter( + standard_attr.StandardAttribute.updated_at >= changed_since) return query @@ -70,17 +70,17 @@ class TimeStamp_db_mixin(object): while objs_list: obj = objs_list.pop() - if (isinstance(obj, model_base.HasStandardAttributes) + if (isinstance(obj, standard_attr.HasStandardAttributes) and obj.standard_attr_id): obj.updated_at = timeutils.utcnow() def register_db_events(self): - event.listen(model_base.StandardAttribute, 'before_insert', + event.listen(standard_attr.StandardAttribute, 'before_insert', self._add_timestamp) event.listen(se.Session, 'before_flush', self.update_timestamp) def unregister_db_events(self): - self._unregister_db_event(model_base.StandardAttribute, + self._unregister_db_event(standard_attr.StandardAttribute, 'before_insert', self._add_timestamp) self._unregister_db_event(se.Session, 'before_flush', self.update_timestamp) diff --git a/neutron/services/trunk/models.py b/neutron/services/trunk/models.py index 6f8bdaaa905..8ed2d74b9ba 100644 --- a/neutron/services/trunk/models.py +++ b/neutron/services/trunk/models.py @@ -19,10 +19,11 @@ from sqlalchemy import sql from neutron.api.v2 import attributes from neutron.db import model_base from neutron.db import models_v2 +from neutron.db import standard_attr from neutron.services.trunk import constants -class Trunk(model_base.HasStandardAttributes, model_base.BASEV2, +class Trunk(standard_attr.HasStandardAttributes, model_base.BASEV2, model_base.HasId, model_base.HasProject): admin_state_up = sa.Column( diff --git a/neutron/tests/unit/db/test_db_base_plugin_v2.py b/neutron/tests/unit/db/test_db_base_plugin_v2.py index 654e466a9d4..0a57513e652 100644 --- a/neutron/tests/unit/db/test_db_base_plugin_v2.py +++ b/neutron/tests/unit/db/test_db_base_plugin_v2.py @@ -52,6 +52,7 @@ from neutron.db import ipam_backend_mixin from neutron.db import l3_db from neutron.db.models import securitygroup as sg_models from neutron.db import models_v2 +from neutron.db import standard_attr from neutron import manager from neutron.tests import base from neutron.tests import tools @@ -6055,8 +6056,8 @@ class DbModelMixin(object): def _get_neutron_attr(self, ctx, attr_id): return ctx.session.query( - models_v2.model_base.StandardAttribute).filter( - models_v2.model_base.StandardAttribute.id == attr_id).one() + standard_attr.StandardAttribute).filter( + standard_attr.StandardAttribute.id == attr_id).one() def _test_standardattr_removed_on_obj_delete(self, ctx, obj): attr_id = obj.standard_attr_id diff --git a/neutron/tests/unit/objects/extensions/test_standardattributes.py b/neutron/tests/unit/objects/extensions/test_standardattributes.py index 3db8895c7bd..ecb6eee2db6 100644 --- a/neutron/tests/unit/objects/extensions/test_standardattributes.py +++ b/neutron/tests/unit/objects/extensions/test_standardattributes.py @@ -17,13 +17,14 @@ from oslo_versionedobjects import fields as obj_fields import sqlalchemy as sa from neutron.db import model_base +from neutron.db import standard_attr from neutron.objects import base as objects_base from neutron.tests.unit.objects import test_base from neutron.tests.unit import testlib_api class FakeDbModelWithStandardAttributes( - model_base.HasStandardAttributes, model_base.BASEV2): + standard_attr.HasStandardAttributes, model_base.BASEV2): id = sa.Column(sa.String(36), primary_key=True, nullable=False) item = sa.Column(sa.String(64))