shim standard_attr db for neutron-lib
This patch shims the attributes of the neutron.db.standard_attr module to reference neutron-lib's implementation to allow consumers to move over to lib's standard_attr at their leisure. Once all consumers are moved over we will remove these neutron shims. Change-Id: Ie3f55432a772b3e51a45252fa57fe5c9fa9cbe40
This commit is contained in:
parent
6d9283c1bc
commit
a962b20ba9
|
@ -12,247 +12,12 @@
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from neutron_lib.db import constants as db_const
|
from neutron_lib.db import standard_attr
|
||||||
from neutron_lib.db import model_base
|
|
||||||
from neutron_lib.db import sqlalchemytypes
|
|
||||||
from oslo_utils import timeutils
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy import event # noqa
|
|
||||||
from sqlalchemy.ext.associationproxy import association_proxy
|
|
||||||
from sqlalchemy.ext import declarative
|
|
||||||
from sqlalchemy.orm import attributes
|
|
||||||
from sqlalchemy.orm import session as se
|
|
||||||
|
|
||||||
from neutron._i18n import _
|
|
||||||
|
|
||||||
|
|
||||||
class StandardAttribute(model_base.BASEV2):
|
# TODO(boden): remove shims when all consumers use lib for this module
|
||||||
"""Common table to associate all Neutron API resources.
|
StandardAttribute = standard_attr.StandardAttribute
|
||||||
|
HasStandardAttributes = standard_attr.HasStandardAttributes
|
||||||
By having Neutron objects related to this table, we can associate new
|
get_standard_attr_resource_model_map = (
|
||||||
tables that apply to many Neutron objects (e.g. timestamps, rbac entries)
|
standard_attr.get_standard_attr_resource_model_map)
|
||||||
to this table to avoid schema duplication while maintaining referential
|
get_tag_resource_parent_map = standard_attr.get_tag_resource_parent_map
|
||||||
integrity.
|
|
||||||
|
|
||||||
NOTE(kevinbenton): This table should not have more columns added to it
|
|
||||||
unless we are absolutely certain the new column will have a value for
|
|
||||||
every single type of Neutron resource. Otherwise this table will be filled
|
|
||||||
with NULL entries for combinations that don't make sense. Additionally,
|
|
||||||
by keeping this table small we can ensure that performance isn't adversely
|
|
||||||
impacted for queries on objects.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# sqlite doesn't support auto increment on big integers so we use big int
|
|
||||||
# for everything but sqlite
|
|
||||||
id = sa.Column(sa.BigInteger().with_variant(sa.Integer(), 'sqlite'),
|
|
||||||
primary_key=True, autoincrement=True)
|
|
||||||
|
|
||||||
# NOTE(kevinbenton): this column is redundant information, but it allows
|
|
||||||
# operators/devs to look at the contents of this table and know which table
|
|
||||||
# the corresponding object is in.
|
|
||||||
# 255 was selected as a max just because it's the varchar ceiling in mysql
|
|
||||||
# before a 2-byte prefix is required. We shouldn't get anywhere near this
|
|
||||||
# limit with our table names...
|
|
||||||
resource_type = sa.Column(sa.String(255), nullable=False)
|
|
||||||
description = sa.Column(sa.String(db_const.DESCRIPTION_FIELD_SIZE))
|
|
||||||
|
|
||||||
revision_number = sa.Column(
|
|
||||||
sa.BigInteger().with_variant(sa.Integer(), 'sqlite'),
|
|
||||||
server_default='0', nullable=False)
|
|
||||||
created_at = sa.Column(sqlalchemytypes.TruncatedDateTime,
|
|
||||||
default=timeutils.utcnow)
|
|
||||||
updated_at = sa.Column(sqlalchemytypes.TruncatedDateTime,
|
|
||||||
onupdate=timeutils.utcnow)
|
|
||||||
|
|
||||||
__mapper_args__ = {
|
|
||||||
# see http://docs.sqlalchemy.org/en/latest/orm/versioning.html for
|
|
||||||
# details about how this works
|
|
||||||
"version_id_col": revision_number,
|
|
||||||
"version_id_generator": False # revision plugin increments manually
|
|
||||||
}
|
|
||||||
|
|
||||||
def bump_revision(self):
|
|
||||||
if self.revision_number is None:
|
|
||||||
# this is a brand new object uncommitted so we don't bump now
|
|
||||||
return
|
|
||||||
self.revision_number += 1
|
|
||||||
|
|
||||||
def _set_updated_revision_number(self, revision_number, updated_at):
|
|
||||||
attributes.set_committed_value(
|
|
||||||
self, "revision_number", revision_number)
|
|
||||||
attributes.set_committed_value(
|
|
||||||
self, "updated_at", updated_at)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _effective_standard_attribute_id(self):
|
|
||||||
return self.id
|
|
||||||
|
|
||||||
|
|
||||||
class HasStandardAttributes(object):
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_api_collections(cls):
|
|
||||||
"""Define the API collection this object will appear under.
|
|
||||||
|
|
||||||
This should return a list of API collections that the object
|
|
||||||
will be exposed under. Most should be exposed in just one
|
|
||||||
collection (e.g. the network model is just exposed under
|
|
||||||
'networks').
|
|
||||||
|
|
||||||
This is used by the standard attr extensions to discover which
|
|
||||||
resources need to be extended with the standard attr fields
|
|
||||||
(e.g. created_at/updated_at/etc).
|
|
||||||
"""
|
|
||||||
# NOTE(kevinbenton): can't use abc because the metaclass conflicts
|
|
||||||
# with the declarative base others inherit from.
|
|
||||||
if hasattr(cls, 'api_collections'):
|
|
||||||
return cls.api_collections
|
|
||||||
raise NotImplementedError(_("%s must define api_collections") % cls)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_api_sub_resources(cls):
|
|
||||||
"""Define the API sub-resources this object will appear under.
|
|
||||||
|
|
||||||
This should return a list of API sub-resources that the object
|
|
||||||
will be exposed under.
|
|
||||||
|
|
||||||
This is used by the standard attr extensions to discover which
|
|
||||||
sub-resources need to be extended with the standard attr fields
|
|
||||||
(e.g. created_at/updated_at/etc).
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return cls.api_sub_resources
|
|
||||||
except AttributeError:
|
|
||||||
return []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_collection_resource_map(cls):
|
|
||||||
try:
|
|
||||||
return cls.collection_resource_map
|
|
||||||
except AttributeError:
|
|
||||||
raise NotImplementedError(_("%s must define "
|
|
||||||
"collection_resource_map") % cls)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def validate_tag_support(cls):
|
|
||||||
return getattr(cls, 'tag_support', False)
|
|
||||||
|
|
||||||
@declarative.declared_attr
|
|
||||||
def standard_attr_id(cls):
|
|
||||||
return sa.Column(
|
|
||||||
sa.BigInteger().with_variant(sa.Integer(), 'sqlite'),
|
|
||||||
sa.ForeignKey(StandardAttribute.id, ondelete="CASCADE"),
|
|
||||||
unique=True,
|
|
||||||
nullable=False
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE(kevinbenton): we have to disable the following pylint check because
|
|
||||||
# it thinks we are overriding this method in the __init__ method.
|
|
||||||
# pylint: disable=method-hidden
|
|
||||||
@declarative.declared_attr
|
|
||||||
def standard_attr(cls):
|
|
||||||
return sa.orm.relationship(StandardAttribute,
|
|
||||||
lazy='joined',
|
|
||||||
cascade='all, delete-orphan',
|
|
||||||
single_parent=True,
|
|
||||||
uselist=False)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _effective_standard_attribute_id(self):
|
|
||||||
return self.standard_attr_id
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
standard_attr_keys = ['description', 'created_at',
|
|
||||||
'updated_at', 'revision_number']
|
|
||||||
standard_attr_kwargs = {}
|
|
||||||
for key in standard_attr_keys:
|
|
||||||
if key in kwargs:
|
|
||||||
standard_attr_kwargs[key] = kwargs.pop(key)
|
|
||||||
super(HasStandardAttributes, self).__init__(*args, **kwargs)
|
|
||||||
# here we automatically create the related standard attribute object
|
|
||||||
self.standard_attr = StandardAttribute(
|
|
||||||
resource_type=self.__tablename__, **standard_attr_kwargs)
|
|
||||||
|
|
||||||
@declarative.declared_attr
|
|
||||||
def description(cls):
|
|
||||||
return association_proxy('standard_attr', 'description')
|
|
||||||
|
|
||||||
@declarative.declared_attr
|
|
||||||
def created_at(cls):
|
|
||||||
return association_proxy('standard_attr', 'created_at')
|
|
||||||
|
|
||||||
@declarative.declared_attr
|
|
||||||
def updated_at(cls):
|
|
||||||
return association_proxy('standard_attr', 'updated_at')
|
|
||||||
|
|
||||||
def update(self, new_dict):
|
|
||||||
# ignore the timestamps if they were passed in. For example, this
|
|
||||||
# happens if code calls update_port with modified results of get_port
|
|
||||||
new_dict.pop('created_at', None)
|
|
||||||
new_dict.pop('updated_at', None)
|
|
||||||
super(HasStandardAttributes, self).update(new_dict)
|
|
||||||
|
|
||||||
@declarative.declared_attr
|
|
||||||
def revision_number(cls):
|
|
||||||
return association_proxy('standard_attr', 'revision_number')
|
|
||||||
|
|
||||||
def bump_revision(self):
|
|
||||||
# SQLAlchemy will bump the version for us automatically if the
|
|
||||||
# standard attr record is being modified, but we must call this
|
|
||||||
# for all other modifications or when relevant children are being
|
|
||||||
# modified (e.g. fixed_ips change should bump port revision)
|
|
||||||
self.standard_attr.bump_revision()
|
|
||||||
|
|
||||||
def _set_updated_revision_number(self, revision_number, updated_at):
|
|
||||||
self.standard_attr._set_updated_revision_number(
|
|
||||||
revision_number, updated_at)
|
|
||||||
|
|
||||||
|
|
||||||
def _resource_model_map_helper(rs_map, resource, subclass):
|
|
||||||
if resource in rs_map:
|
|
||||||
raise RuntimeError(_("Model %(sub)s tried to register for API "
|
|
||||||
"resource %(res)s which conflicts with model "
|
|
||||||
"%(other)s.") %
|
|
||||||
dict(sub=subclass,
|
|
||||||
other=rs_map[resource],
|
|
||||||
res=resource))
|
|
||||||
rs_map[resource] = subclass
|
|
||||||
|
|
||||||
|
|
||||||
def get_standard_attr_resource_model_map(include_resources=True,
|
|
||||||
include_sub_resources=True):
|
|
||||||
rs_map = {}
|
|
||||||
for subclass in HasStandardAttributes.__subclasses__():
|
|
||||||
if include_resources:
|
|
||||||
for resource in subclass.get_api_collections():
|
|
||||||
_resource_model_map_helper(rs_map, resource, subclass)
|
|
||||||
if include_sub_resources:
|
|
||||||
for sub_resource in subclass.get_api_sub_resources():
|
|
||||||
_resource_model_map_helper(rs_map, sub_resource, subclass)
|
|
||||||
return rs_map
|
|
||||||
|
|
||||||
|
|
||||||
def get_tag_resource_parent_map():
|
|
||||||
parent_map = {}
|
|
||||||
for subclass in HasStandardAttributes.__subclasses__():
|
|
||||||
if subclass.validate_tag_support():
|
|
||||||
for collection, resource in (subclass.get_collection_resource_map()
|
|
||||||
.items()):
|
|
||||||
if collection in parent_map:
|
|
||||||
msg = (_("API parent %(collection)s/%(resource)s for "
|
|
||||||
"model %(subclass)s is already registered.") %
|
|
||||||
dict(collection=collection, resource=resource,
|
|
||||||
subclass=subclass))
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
parent_map[collection] = resource
|
|
||||||
return parent_map
|
|
||||||
|
|
||||||
|
|
||||||
@event.listens_for(se.Session, 'after_bulk_delete')
|
|
||||||
def throw_exception_on_bulk_delete_of_listened_for_objects(delete_context):
|
|
||||||
if hasattr(delete_context.mapper.class_, 'revises_on_change'):
|
|
||||||
raise RuntimeError(_("%s may not be deleted in bulk because it "
|
|
||||||
"bumps the revision of other resources via "
|
|
||||||
"SQLAlchemy event handlers, which are not "
|
|
||||||
"compatible with bulk deletes.") %
|
|
||||||
delete_context.mapper.class_)
|
|
||||||
|
|
|
@ -1,152 +0,0 @@
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
# implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import gc
|
|
||||||
|
|
||||||
from neutron_lib import context
|
|
||||||
from sqlalchemy.ext import declarative
|
|
||||||
import testtools
|
|
||||||
|
|
||||||
from neutron.db import standard_attr
|
|
||||||
from neutron.tests import base
|
|
||||||
from neutron.tests.unit import testlib_api
|
|
||||||
|
|
||||||
|
|
||||||
class StandardAttrTestCase(base.BaseTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
super(StandardAttrTestCase, self).setUp()
|
|
||||||
self.addCleanup(gc.collect)
|
|
||||||
|
|
||||||
def _make_decl_base(self):
|
|
||||||
# construct a new base so we don't interfere with the main
|
|
||||||
# base used in the sql test fixtures
|
|
||||||
return declarative.declarative_base(
|
|
||||||
cls=standard_attr.model_base.NeutronBaseV2)
|
|
||||||
|
|
||||||
def test_standard_attr_resource_model_map(self):
|
|
||||||
rs_map = standard_attr.get_standard_attr_resource_model_map()
|
|
||||||
base = self._make_decl_base()
|
|
||||||
|
|
||||||
class MyModel(standard_attr.HasStandardAttributes,
|
|
||||||
standard_attr.model_base.HasId,
|
|
||||||
base):
|
|
||||||
api_collections = ['my_resource', 'my_resource2']
|
|
||||||
api_sub_resources = ['my_subresource']
|
|
||||||
|
|
||||||
rs_map = standard_attr.get_standard_attr_resource_model_map()
|
|
||||||
self.assertEqual(MyModel, rs_map['my_resource'])
|
|
||||||
self.assertEqual(MyModel, rs_map['my_resource2'])
|
|
||||||
self.assertEqual(MyModel, rs_map['my_subresource'])
|
|
||||||
|
|
||||||
sub_rs_map = standard_attr.get_standard_attr_resource_model_map(
|
|
||||||
include_resources=False,
|
|
||||||
include_sub_resources=True)
|
|
||||||
self.assertNotIn('my_resource', sub_rs_map)
|
|
||||||
self.assertNotIn('my_resource2', sub_rs_map)
|
|
||||||
self.assertEqual(MyModel, sub_rs_map['my_subresource'])
|
|
||||||
|
|
||||||
nosub_rs_map = standard_attr.get_standard_attr_resource_model_map(
|
|
||||||
include_resources=True,
|
|
||||||
include_sub_resources=False)
|
|
||||||
self.assertEqual(MyModel, nosub_rs_map['my_resource'])
|
|
||||||
self.assertEqual(MyModel, nosub_rs_map['my_resource2'])
|
|
||||||
self.assertNotIn('my_subresource', nosub_rs_map)
|
|
||||||
|
|
||||||
class Dup(standard_attr.HasStandardAttributes,
|
|
||||||
standard_attr.model_base.HasId,
|
|
||||||
base):
|
|
||||||
api_collections = ['my_resource']
|
|
||||||
|
|
||||||
with testtools.ExpectedException(RuntimeError):
|
|
||||||
standard_attr.get_standard_attr_resource_model_map()
|
|
||||||
|
|
||||||
def test_standard_attr_resource_parent_map(self):
|
|
||||||
base = self._make_decl_base()
|
|
||||||
|
|
||||||
class TagSupportModel(standard_attr.HasStandardAttributes,
|
|
||||||
standard_attr.model_base.HasId,
|
|
||||||
base):
|
|
||||||
collection_resource_map = {'collection_name': 'member_name'}
|
|
||||||
tag_support = True
|
|
||||||
|
|
||||||
class TagUnsupportModel(standard_attr.HasStandardAttributes,
|
|
||||||
standard_attr.model_base.HasId,
|
|
||||||
base):
|
|
||||||
collection_resource_map = {'collection_name2': 'member_name2'}
|
|
||||||
tag_support = False
|
|
||||||
|
|
||||||
class TagUnsupportModel2(standard_attr.HasStandardAttributes,
|
|
||||||
standard_attr.model_base.HasId,
|
|
||||||
base):
|
|
||||||
collection_resource_map = {'collection_name3': 'member_name3'}
|
|
||||||
|
|
||||||
parent_map = standard_attr.get_tag_resource_parent_map()
|
|
||||||
self.assertEqual('member_name', parent_map['collection_name'])
|
|
||||||
self.assertNotIn('collection_name2', parent_map)
|
|
||||||
self.assertNotIn('collection_name3', parent_map)
|
|
||||||
|
|
||||||
class DupTagSupportModel(standard_attr.HasStandardAttributes,
|
|
||||||
standard_attr.model_base.HasId,
|
|
||||||
base):
|
|
||||||
collection_resource_map = {'collection_name': 'member_name'}
|
|
||||||
tag_support = True
|
|
||||||
|
|
||||||
with testtools.ExpectedException(RuntimeError):
|
|
||||||
standard_attr.get_tag_resource_parent_map()
|
|
||||||
|
|
||||||
|
|
||||||
class StandardAttrAPIImapctTestCase(testlib_api.SqlTestCase):
|
|
||||||
"""Test case to determine if a resource has had new fields exposed."""
|
|
||||||
|
|
||||||
def test_api_collections_are_expected(self):
|
|
||||||
# NOTE to reviewers. If this test is being modified, it means the
|
|
||||||
# resources being extended by standard attr extensions have changed.
|
|
||||||
# Ensure that the patch has made this discoverable to API users.
|
|
||||||
# This means a new extension for a new resource or a new extension
|
|
||||||
# indicating that an existing resource now has standard attributes.
|
|
||||||
# Ensure devref list of resources is updated at
|
|
||||||
# doc/source/devref/api_extensions.rst
|
|
||||||
expected = ['subnets', 'trunks', 'routers', 'segments',
|
|
||||||
'security_group_rules', 'networks', 'policies',
|
|
||||||
'subnetpools', 'ports', 'security_groups', 'floatingips',
|
|
||||||
'logs', 'network_segment_ranges']
|
|
||||||
self.assertEqual(
|
|
||||||
set(expected),
|
|
||||||
set(standard_attr.get_standard_attr_resource_model_map().keys())
|
|
||||||
)
|
|
||||||
|
|
||||||
def test_api_tag_support_is_expected(self):
|
|
||||||
# NOTE: If this test is being modified, it means the resources for tag
|
|
||||||
# support are extended. It changes tag support API. The API change
|
|
||||||
# should be exposed in release note for API users. And also it should
|
|
||||||
# be list as other tag support resources in doc/source/devref/tag.rst
|
|
||||||
expected = ['subnets', 'trunks', 'routers', 'networks', 'policies',
|
|
||||||
'subnetpools', 'ports', 'security_groups', 'floatingips',
|
|
||||||
'network_segment_ranges']
|
|
||||||
self.assertEqual(
|
|
||||||
set(expected),
|
|
||||||
set(standard_attr.get_tag_resource_parent_map().keys())
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StandardAttrRevisesBulkDeleteTestCase(testlib_api.SqlTestCase):
|
|
||||||
|
|
||||||
def test_bulk_delete_protection(self):
|
|
||||||
# security group rules increment security groups so they must not be
|
|
||||||
# allowed to be deleted in bulk
|
|
||||||
mm = standard_attr.get_standard_attr_resource_model_map()
|
|
||||||
sg_rule_model = mm['security_group_rules']
|
|
||||||
with testtools.ExpectedException(RuntimeError):
|
|
||||||
ctx = context.get_admin_context()
|
|
||||||
ctx.session.query(sg_rule_model).delete()
|
|
Loading…
Reference in New Issue