Migrate to SQLAlchemy Core

SQLAlchemy ORM is providing no advantages to us, while at the same time
contributing towards the difficutly in implementing complex queries. This
is *not* a SQLAlchemy ORM issue, it's simply our use of the ORM.

Rather than fixing our use of the ORM to be more intelligent, we should
simply drop down to SQLA Core, as the ORM provides no direct benefits.

The PowerDNS backend should also be updated to match, as a separate
review.

Change-Id: Ife5f5740821a23ff119c27c1c22cba3eca837f1b
This commit is contained in:
Kiall Mac Innes 2014-07-26 10:27:11 -07:00
parent 4fcf8cf080
commit de399b7dcf
14 changed files with 821 additions and 751 deletions

View File

@ -131,7 +131,7 @@ class Service(service.Service):
if self.check_for_tlds:
try:
self.storage.find_tld(context, {'name': domain_labels[-1]})
except exceptions.TLDNotFound:
except exceptions.TldNotFound:
raise exceptions.InvalidDomainName('Invalid TLD')
# Now check that the domain name is not the same as a TLD
@ -140,7 +140,7 @@ class Service(service.Service):
self.storage.find_tld(
context,
{'name': stripped_domain_name})
except exceptions.TLDNotFound:
except exceptions.TldNotFound:
pass
else:
raise exceptions.InvalidDomainName(

View File

@ -187,7 +187,7 @@ class DuplicateDomain(Duplicate):
error_type = 'duplicate_domain'
class DuplicateTLD(Duplicate):
class DuplicateTld(Duplicate):
error_type = 'duplicate_tld'
@ -235,7 +235,7 @@ class DomainNotFound(NotFound):
error_type = 'domain_not_found'
class TLDNotFound(NotFound):
class TldNotFound(NotFound):
error_type = 'tld_not_found'

View File

@ -49,8 +49,8 @@ class TLDCommands(base.Commands):
<Error> --> <Line causing the error>
<Error> can be one of the following:
DuplicateTLD - This occurs if the TLD is already present.
InvalidTLD - This occurs if the TLD does not conform to the TLD schema.
DuplicateTld - This occurs if the TLD is already present.
InvalidTld - This occurs if the TLD does not conform to the TLD schema.
InvalidDescription - This occurs if the description does not conform to
the description schema
InvalidLine - This occurs if the line contains more than 2 fields.
@ -81,7 +81,7 @@ class TLDCommands(base.Commands):
def _validate_and_create_tld(self, line, error_lines):
# validate the tld name
if not format.is_tldname(line['name']):
error_lines.append("InvalidTLD --> " +
error_lines.append("InvalidTld --> " +
self._convert_tld_dict_to_str(line))
return 0
# validate the description if there is one
@ -94,8 +94,8 @@ class TLDCommands(base.Commands):
try:
self.central_api.create_tld(self.context, values=line)
return 1
except exceptions.DuplicateTLD:
error_lines.append("DuplicateTLD --> " +
except exceptions.DuplicateTld:
error_lines.append("DuplicateTld --> " +
self._convert_tld_dict_to_str(line))
return 0

View File

@ -367,3 +367,12 @@ class PersistentObjectMixin(object):
This adds the fields that we use in common for all persisent objects.
"""
FIELDS = ['id', 'created_at', 'updated_at', 'version']
class SoftDeleteObjectMixin(object):
"""
Mixin class for Soft-Deleted objects.
This adds the fields that we use in common for all soft-deleted objects.
"""
FIELDS = ['deleted', 'deleted_at']

View File

@ -15,8 +15,8 @@
from designate.objects import base
class Domain(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
class Domain(base.DictObjectMixin, base.SoftDeleteObjectMixin,
base.PersistentObjectMixin, base.DesignateObject):
FIELDS = ['tenant_id', 'name', 'email', 'ttl', 'refresh', 'retry',
'expire', 'minimum', 'parent_domain_id', 'serial', 'description',
'status']

View File

@ -17,6 +17,8 @@ from designate.objects import base
class Record(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
# TODO(kiall): `hash` is an implementation detail of our SQLA driver,
# so we should remove it.
FIELDS = ['data', 'priority', 'domain_id', 'managed',
'managed_resource_type', 'managed_resource_id',
'managed_plugin_name', 'managed_plugin_type', 'hash',

View File

@ -0,0 +1,96 @@
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2010-2011 OpenStack Foundation.
# Copyright 2012 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sqlalchemy
from oslo.db.sqlalchemy import utils
from designate.i18n import _
from designate.i18n import _LW
LOG = logging.getLogger(__name__)
# copy from olso/db/sqlalchemy/utils.py
def paginate_query(query, table, limit, sort_keys, marker=None,
sort_dir=None, sort_dirs=None):
if 'id' not in sort_keys:
# TODO(justinsb): If this ever gives a false-positive, check
# the actual primary key, rather than assuming its id
LOG.warning(_LW('Id not in sort_keys; is sort_keys unique?'))
assert(not (sort_dir and sort_dirs))
# Default the sort direction to ascending
if sort_dirs is None and sort_dir is None:
sort_dir = 'asc'
# Ensure a per-column sort direction
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
assert(len(sort_dirs) == len(sort_keys))
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
try:
sort_dir_func = {
'asc': sqlalchemy.asc,
'desc': sqlalchemy.desc,
}[current_sort_dir]
except KeyError:
raise ValueError(_("Unknown sort direction, "
"must be 'desc' or 'asc'"))
try:
sort_key_attr = getattr(table.c, current_sort_key)
except AttributeError:
raise utils.InvalidSortKey()
query = query.order_by(sort_dir_func(sort_key_attr))
# Add pagination
if marker is not None:
marker_values = []
for sort_key in sort_keys:
v = marker[sort_key]
marker_values.append(v)
# Build up an array of sort criteria as in the docstring
criteria_list = []
for i in range(len(sort_keys)):
crit_attrs = []
for j in range(i):
table_attr = getattr(table.c, sort_keys[j])
crit_attrs.append((table_attr == marker_values[j]))
table_attr = getattr(table.c, sort_keys[i])
if sort_dirs[i] == 'desc':
crit_attrs.append((table_attr < marker_values[i]))
else:
crit_attrs.append((table_attr > marker_values[i]))
criteria = sqlalchemy.sql.and_(*crit_attrs)
criteria_list.append(criteria)
f = sqlalchemy.sql.or_(*criteria_list)
query = query.where(f)
if limit is not None:
query = query.limit(limit)
return query

View File

@ -28,12 +28,12 @@ class Storage(DriverPlugin):
__plugin_type__ = 'storage'
@abc.abstractmethod
def create_quota(self, context, values):
def create_quota(self, context, quota):
"""
Create a Quota.
:param context: RPC Context.
:param values: Values to create the new Quota from.
:param quota: Quota object with the values to be created.
"""
@abc.abstractmethod

File diff suppressed because it is too large Load Diff

View File

@ -1,197 +0,0 @@
# Copyright 2012 Hewlett-Packard Development Company, L.P.
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
# Modified: Patrick Galbraith <patg@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
from oslo.config import cfg
from oslo.db.sqlalchemy import models as oslo_models
from sqlalchemy import (Column, String, Text, Integer, ForeignKey,
Enum, Boolean, Unicode, UniqueConstraint, event)
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
from designate.openstack.common import timeutils
from designate.sqlalchemy.types import UUID
from designate.sqlalchemy import models
from designate import utils
CONF = cfg.CONF
RESOURCE_STATUSES = ['ACTIVE', 'PENDING', 'DELETED']
RECORD_TYPES = ['A', 'AAAA', 'CNAME', 'MX', 'SRV', 'TXT', 'SPF', 'NS', 'PTR',
'SSHFP']
TSIG_ALGORITHMS = ['hmac-md5', 'hmac-sha1', 'hmac-sha224', 'hmac-sha256',
'hmac-sha384', 'hmac-sha512']
class Base(models.Base, oslo_models.TimestampMixin):
id = Column(UUID, default=utils.generate_uuid, primary_key=True)
version = Column(Integer, default=1, nullable=False)
__mapper_args__ = {
'version_id_col': version
}
__table_args__ = {'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
Base = declarative_base(cls=Base)
class Quota(Base):
__tablename__ = 'quotas'
__table_args__ = (
UniqueConstraint('tenant_id', 'resource', name='unique_quota'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
)
tenant_id = Column(String(36), default=None, nullable=True)
resource = Column(String(32), nullable=False)
hard_limit = Column(Integer(), nullable=False)
class Server(Base):
__tablename__ = 'servers'
name = Column(String(255), nullable=False, unique=True)
class Tld(Base):
__tablename__ = 'tlds'
name = Column(String(255), nullable=False, unique=True)
description = Column(Unicode(160), nullable=True)
class Domain(models.SoftDeleteMixin, Base):
__tablename__ = 'domains'
__table_args__ = (
UniqueConstraint('name', 'deleted', name='unique_domain_name'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
)
tenant_id = Column(String(36), default=None, nullable=True)
name = Column(String(255), nullable=False)
email = Column(String(255), nullable=False)
description = Column(Unicode(160), nullable=True)
ttl = Column(Integer, default=CONF.default_ttl, nullable=False)
serial = Column(Integer, default=timeutils.utcnow_ts, nullable=False)
refresh = Column(Integer, default=CONF.default_soa_refresh, nullable=False)
retry = Column(Integer, default=CONF.default_soa_retry, nullable=False)
expire = Column(Integer, default=CONF.default_soa_expire, nullable=False)
minimum = Column(Integer, default=CONF.default_soa_minimum, nullable=False)
status = Column(Enum(name='resource_statuses', *RESOURCE_STATUSES),
nullable=False, server_default='ACTIVE',
default='ACTIVE')
recordsets = relationship('RecordSet',
backref=backref('domain', uselist=False),
cascade="all, delete-orphan",
passive_deletes=True)
parent_domain_id = Column(UUID, ForeignKey('domains.id'), default=None,
nullable=True)
class RecordSet(Base):
__tablename__ = 'recordsets'
__table_args__ = (
UniqueConstraint('domain_id', 'name', 'type', name='unique_recordset'),
{'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8'}
)
tenant_id = Column(String(36), default=None, nullable=True)
domain_id = Column(UUID, ForeignKey('domains.id', ondelete='CASCADE'),
nullable=False)
name = Column(String(255), nullable=False)
type = Column(Enum(name='record_types', *RECORD_TYPES), nullable=False)
ttl = Column(Integer, default=None, nullable=True)
description = Column(Unicode(160), nullable=True)
records = relationship('Record',
backref=backref('recordset', uselist=False),
cascade="all, delete-orphan",
passive_deletes=True)
class Record(Base):
__tablename__ = 'records'
tenant_id = Column(String(36), default=None, nullable=True)
domain_id = Column(UUID, ForeignKey('domains.id', ondelete='CASCADE'),
nullable=False)
recordset_id = Column(UUID,
ForeignKey('recordsets.id', ondelete='CASCADE'),
nullable=False)
data = Column(Text, nullable=False)
priority = Column(Integer, default=None, nullable=True)
description = Column(Unicode(160), nullable=True)
hash = Column(String(32), nullable=False, unique=True)
managed = Column(Boolean, default=False)
managed_extra = Column(Unicode(100), default=None, nullable=True)
managed_plugin_type = Column(Unicode(50), default=None, nullable=True)
managed_plugin_name = Column(Unicode(50), default=None, nullable=True)
managed_resource_type = Column(Unicode(50), default=None, nullable=True)
managed_resource_region = Column(Unicode(100), default=None, nullable=True)
managed_resource_id = Column(UUID, default=None, nullable=True)
managed_tenant_id = Column(Unicode(36), default=None, nullable=True)
status = Column(Enum(name='resource_statuses', *RESOURCE_STATUSES),
nullable=False, server_default='ACTIVE',
default='ACTIVE')
def recalculate_hash(self):
"""
Calculates the hash of the record, used to ensure record uniqueness.
"""
md5 = hashlib.md5()
md5.update("%s:%s:%s" % (self.recordset_id, self.data, self.priority))
self.hash = md5.hexdigest()
@event.listens_for(Record, "before_insert")
def recalculate_record_hash_before_insert(mapper, connection, instance):
instance.recalculate_hash()
@event.listens_for(Record, "before_update")
def recalculate_record_hash_before_update(mapper, connection, instance):
instance.recalculate_hash()
class TsigKey(Base):
__tablename__ = 'tsigkeys'
name = Column(String(255), nullable=False, unique=True)
algorithm = Column(Enum(name='tsig_algorithms', *TSIG_ALGORITHMS),
nullable=False)
secret = Column(String(255), nullable=False)
class Blacklists(Base):
__tablename__ = 'blacklists'
pattern = Column(String(255), nullable=False, unique=True)
description = Column(Unicode(160), nullable=True)

View File

@ -0,0 +1,190 @@
# Copyright 2012-2014 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import (Table, MetaData, Column, String, Text, Integer, CHAR,
DateTime, Enum, Boolean, Unicode, UniqueConstraint,
ForeignKeyConstraint)
from oslo.config import cfg
from designate import utils
from designate.openstack.common import timeutils
from designate.sqlalchemy.types import UUID
CONF = cfg.CONF
RESOURCE_STATUSES = ['ACTIVE', 'PENDING', 'DELETED']
RECORD_TYPES = ['A', 'AAAA', 'CNAME', 'MX', 'SRV', 'TXT', 'SPF', 'NS', 'PTR',
'SSHFP']
TSIG_ALGORITHMS = ['hmac-md5', 'hmac-sha1', 'hmac-sha224', 'hmac-sha256',
'hmac-sha384', 'hmac-sha512']
metadata = MetaData()
quotas = Table('quotas', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('tenant_id', String(36), default=None, nullable=True),
Column('resource', String(32), nullable=False),
Column('hard_limit', Integer(), nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
servers = Table('servers', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('name', String(255), nullable=False, unique=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
tlds = Table('tlds', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('name', String(255), nullable=False, unique=True),
Column('description', Unicode(160), nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
domains = Table('domains', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('deleted', CHAR(32), nullable=False, default='0',
server_default='0'),
Column('deleted_at', DateTime, nullable=True, default=None),
Column('tenant_id', String(36), default=None, nullable=True),
Column('name', String(255), nullable=False),
Column('email', String(255), nullable=False),
Column('description', Unicode(160), nullable=True),
Column('ttl', Integer, default=CONF.default_ttl, nullable=False),
Column('serial', Integer, default=timeutils.utcnow_ts, nullable=False),
Column('refresh', Integer, default=CONF.default_soa_refresh,
nullable=False),
Column('retry', Integer, default=CONF.default_soa_retry, nullable=False),
Column('expire', Integer, default=CONF.default_soa_expire, nullable=False),
Column('minimum', Integer, default=CONF.default_soa_minimum,
nullable=False),
Column('status', Enum(name='resource_statuses', *RESOURCE_STATUSES),
nullable=False, server_default='ACTIVE', default='ACTIVE'),
Column('parent_domain_id', UUID, default=None, nullable=True),
UniqueConstraint('name', 'deleted', name='unique_domain_name'),
ForeignKeyConstraint(['parent_domain_id'],
['domains.id'],
ondelete='SET NULL'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
recordsets = Table('recordsets', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('tenant_id', String(36), default=None, nullable=True),
Column('domain_id', UUID, nullable=False),
Column('name', String(255), nullable=False),
Column('type', Enum(name='record_types', *RECORD_TYPES), nullable=False),
Column('ttl', Integer, default=None, nullable=True),
Column('description', Unicode(160), nullable=True),
UniqueConstraint('domain_id', 'name', 'type', name='unique_recordset'),
ForeignKeyConstraint(['domain_id'], ['domains.id'], ondelete='CASCADE'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
records = Table('records', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('tenant_id', String(36), default=None, nullable=True),
Column('domain_id', UUID, nullable=False),
Column('recordset_id', UUID, nullable=False),
Column('data', Text, nullable=False),
Column('priority', Integer, default=None, nullable=True),
Column('description', Unicode(160), nullable=True),
Column('hash', String(32), nullable=False, unique=True),
Column('managed', Boolean, default=False),
Column('managed_extra', Unicode(100), default=None, nullable=True),
Column('managed_plugin_type', Unicode(50), default=None, nullable=True),
Column('managed_plugin_name', Unicode(50), default=None, nullable=True),
Column('managed_resource_type', Unicode(50), default=None, nullable=True),
Column('managed_resource_region', Unicode(100), default=None,
nullable=True),
Column('managed_resource_id', UUID, default=None, nullable=True),
Column('managed_tenant_id', Unicode(36), default=None, nullable=True),
Column('status', Enum(name='resource_statuses', *RESOURCE_STATUSES),
nullable=False, server_default='ACTIVE', default='ACTIVE'),
ForeignKeyConstraint(['domain_id'], ['domains.id'], ondelete='CASCADE'),
ForeignKeyConstraint(['recordset_id'], ['recordsets.id'],
ondelete='CASCADE'),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
tsigkeys = Table('tsigkeys', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('name', String(255), nullable=False, unique=True),
Column('algorithm', Enum(name='tsig_algorithms', *TSIG_ALGORITHMS),
nullable=False),
Column('secret', String(255), nullable=False),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
blacklists = Table('blacklists', metadata,
Column('id', UUID, default=utils.generate_uuid, primary_key=True),
Column('version', Integer(), default=1, nullable=False),
Column('created_at', DateTime, default=lambda: timeutils.utcnow()),
Column('updated_at', DateTime, onupdate=lambda: timeutils.utcnow()),
Column('pattern', String(255), nullable=False, unique=True),
Column('description', Unicode(160), nullable=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)

View File

@ -42,11 +42,6 @@ from designate.network_api import fake as fake_network_api
from designate import network_api
from designate import objects
# NOTE: If eventlet isn't patched and there's a exc tests block
import eventlet
eventlet.monkey_patch(os=False)
LOG = logging.getLogger(__name__)
cfg.CONF.import_opt('storage_driver', 'designate.central',
@ -447,7 +442,7 @@ class TestCase(base.BaseTestCase):
for index in range(len(self.default_tld_fixtures)):
try:
self.create_default_tld(fixture=index)
except exceptions.DuplicateTLD:
except exceptions.DuplicateTld:
pass
def create_tsigkey(self, **kwargs):

View File

@ -322,7 +322,7 @@ class CentralServiceTest(CentralTestCase):
# Fetch the tld again, ensuring an exception is raised
self.assertRaises(
exceptions.TLDNotFound,
exceptions.TldNotFound,
self.central_service.get_tld,
self.admin_context, tld['id'])

View File

@ -14,6 +14,7 @@
# License for the specific language governing permissions and limitations
# under the License.
import uuid
import math
import testtools
@ -49,15 +50,27 @@ class StorageTestCase(object):
Given an array of created items we iterate through them making sure
they match up to things returned by paged results.
"""
found = method(self.admin_context, limit=5)
x = 0
for i in xrange(0, len(data)):
self.assertEqual(data[i]['id'], found[x]['id'])
x += 1
if x == len(found):
x = 0
found = method(
self.admin_context, limit=5, marker=found[-1:][0]['id'])
results = None
item_number = 0
for current_page in range(0, int(math.ceil(float(len(data)) / 2))):
LOG.debug('Validating results on page %d', current_page)
if results is not None:
results = method(
self.admin_context, limit=2, marker=results[-1]['id'])
else:
results = method(self.admin_context, limit=2)
LOG.critical('Results: %d', len(results))
for result_number, result in enumerate(results):
LOG.debug('Validating result %d on page %d', result_number,
current_page)
self.assertEqual(
data[item_number]['id'], results[result_number]['id'])
item_number += 1
def test_paging_marker_not_found(self):
with testtools.ExpectedException(exceptions.MarkerNotFound):
@ -93,7 +106,7 @@ class StorageTestCase(object):
values = self.get_quota_fixture()
values['tenant_id'] = self.admin_context.tenant
result = self.storage.create_quota(self.admin_context, values=values)
result = self.storage.create_quota(self.admin_context, values)
self.assertIsNotNone(result['id'])
self.assertIsNotNone(result['created_at'])
@ -290,12 +303,12 @@ class StorageTestCase(object):
self.assertEqual(1, len(actual))
self.assertEqual(server['name'], actual[0]['name'])
# Order of found items later will be reverse of the order they are
# created
created = [self.create_server(
name='ns%s.example.org.' % i) for i in xrange(10, 20)]
created.insert(0, server)
def test_find_servers_paging(self):
# Create 10 Servers
created = [self.create_server(name='ns%d.example.org.' % i)
for i in xrange(10)]
# Ensure we can page through the results.
self._ensure_paging(created, self.storage.find_servers)
def test_find_servers_criterion(self):
@ -415,12 +428,12 @@ class StorageTestCase(object):
self.assertEqual(tsig['algorithm'], actual[0]['algorithm'])
self.assertEqual(tsig['secret'], actual[0]['secret'])
# Order of found items later will be reverse of the order they are
# created
created = [self.create_tsigkey(name='tsig%s.' % i)
for i in xrange(10, 20)]
created.insert(0, tsig)
def test_find_tsigkeys_paging(self):
# Create 10 TSIG Keys
created = [self.create_tsigkey(name='tsig-%s' % i)
for i in xrange(10)]
# Ensure we can page through the results.
self._ensure_paging(created, self.storage.find_tsigkeys)
def test_find_tsigkeys_criterion(self):
@ -612,12 +625,12 @@ class StorageTestCase(object):
self.assertEqual(domain['name'], actual[0]['name'])
self.assertEqual(domain['email'], actual[0]['email'])
# Order of found items later will be reverse of the order they are
# created XXXX
created = [self.create_domain(name='x%s.org.' % i)
for i in xrange(10, 20)]
created.insert(0, domain)
def test_find_domains_paging(self):
# Create 10 Domains
created = [self.create_domain(name='example-%d.org.' % i)
for i in xrange(10)]
# Ensure we can page through the results.
self._ensure_paging(created, self.storage.find_domains)
def test_find_domains_criterion(self):
@ -868,12 +881,14 @@ class StorageTestCase(object):
self.assertEqual(recordset_one['name'], actual[0]['name'])
self.assertEqual(recordset_one['type'], actual[0]['type'])
# Order of found items later will be reverse of the order they are
# created
created = [self.create_recordset(
domain, name='test%s' % i + '.%s') for i in xrange(10, 20)]
created.insert(0, recordset_one)
def test_find_recordsets_paging(self):
domain = self.create_domain(name='example.org.')
# Create 10 RecordSets
created = [self.create_recordset(domain, name='r-%d.example.org.' % i)
for i in xrange(10)]
# Ensure we can page through the results.
self._ensure_paging(created, self.storage.find_recordsets)
def test_find_recordsets_criterion(self):
@ -1235,13 +1250,15 @@ class StorageTestCase(object):
self.assertEqual(record['data'], actual[0]['data'])
self.assertIn('status', record)
# Order of found items later will be reverse of the order they are
# created
created = [self.create_record(
domain, recordset, data='192.0.0.%s' % i)
for i in xrange(10, 20)]
created.insert(0, record)
def test_find_records_paging(self):
domain = self.create_domain()
recordset = self.create_recordset(domain, type='A')
# Create 10 Records
created = [self.create_record(domain, recordset, data='192.0.2.%d' % i)
for i in xrange(10)]
# Ensure we can page through the results.
self._ensure_paging(created, self.storage.find_records)
def test_find_records_criterion(self):