Restructured SQL code

We have a lot of abstraction that isn't really needed. This patch
removes some of that abstraction and reorganizes the code accordingly.

- Merged storage and sqlalchemy folders.
- Deprecated [central].storage_driver.
- Minor cleanup to SQL code.
- Cleaned up comments and unit tests.

Change-Id: I8f0508aedcc028b1a6e74c790c5d757a0d6c98d7
This commit is contained in:
Erik Olof Gunnar Andersson 2023-04-15 19:53:11 -07:00
parent 590064e8d9
commit 0c30fc491b
52 changed files with 4049 additions and 4365 deletions

View File

@ -90,9 +90,7 @@ class Service(service.RPCService):
@property
def storage(self):
if not self._storage:
# Get a storage connection
storage_driver = cfg.CONF['service:central'].storage_driver
self._storage = storage.get_storage(storage_driver)
self._storage = storage.get_storage()
return self._storage
@property

View File

@ -18,10 +18,10 @@ from sqlalchemy import MetaData, Table, select, func
import designate.conf
from designate.i18n import _
from designate.sqlalchemy import sql
from designate.storage import sql
# This import is not used, but is needed to register the storage:sqlalchemy
# group.
import designate.storage.impl_sqlalchemy # noqa
import designate.storage.sqlalchemy # noqa
from designate import utils

View File

@ -26,6 +26,9 @@ CENTRAL_OPTS = [
cfg.IntOpt('threads', default=1000,
help='Number of central greenthreads to spawn'),
cfg.StrOpt('storage_driver', default='sqlalchemy',
deprecated_for_removal=True,
deprecated_reason='Alternative storage drivers are no longer'
'supported.',
help='The storage driver to use'),
cfg.IntOpt('max_zone_name_len', default=255,
help="Maximum zone name length"),

View File

@ -31,11 +31,11 @@ LOG = logging.getLogger(__name__)
class DatabaseCommands(base.Commands):
def _get_alembic_config(self, db_url=None, stringio_buffer=sys.stdout):
alembic_dir = os.path.join(os.path.dirname(__file__),
os.pardir, 'storage/impl_sqlalchemy')
os.pardir, 'storage/sqlalchemy')
alembic_cfg = Config(os.path.join(alembic_dir, 'alembic.ini'),
stdout=stringio_buffer)
alembic_cfg.set_main_option(
'script_location', 'designate.storage.impl_sqlalchemy:alembic')
'script_location', 'designate.storage.sqlalchemy:alembic')
if db_url:
alembic_cfg.set_main_option('sqlalchemy.url', db_url)
else:

View File

@ -54,9 +54,7 @@ class Service(service.Service):
@property
def storage(self):
if not self._storage:
self._storage = storage.get_storage(
CONF['service:mdns'].storage_driver
)
self._storage = storage.get_storage()
return self._storage
@property

View File

@ -54,8 +54,7 @@ class Service(service.RPCService):
@property
def storage(self):
if not self._storage:
storage_driver = cfg.CONF['service:producer'].storage_driver
self._storage = storage.get_storage(storage_driver)
self._storage = storage.get_storage()
return self._storage
@property

View File

@ -13,7 +13,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from designate import exceptions
@ -31,10 +30,7 @@ class StorageQuota(base.Quota):
def __init__(self):
super(StorageQuota, self).__init__()
# TODO(kiall): Should this be tied to central's config?
storage_driver = cfg.CONF['service:central'].storage_driver
self.storage = storage.get_storage(storage_driver)
self.storage = storage.get_storage()
def _get_quotas(self, context, tenant_id):
quotas = self.storage.find_quotas(context, {

View File

@ -23,19 +23,17 @@ from oslo_db import exception as db_exception
from oslo_log import log as logging
from oslo_utils import excutils
from designate.sqlalchemy import sql
from designate.storage import base
from designate.storage import sql
from designate.storage import sqlalchemy
LOG = logging.getLogger(__name__)
RETRY_STATE = threading.local()
def get_storage(storage_driver):
"""Return the engine class from the provided engine name"""
cls = base.Storage.get_driver(storage_driver)
return cls()
def get_storage():
"""Return the engine class"""
return sqlalchemy.SQLAlchemyStorage()
def _retry_on_deadlock(exc):

View File

@ -1,858 +0,0 @@
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from designate.plugin import DriverPlugin
class Storage(DriverPlugin, metaclass=abc.ABCMeta):
"""Base class for storage plugins"""
__plugin_ns__ = 'designate.storage'
__plugin_type__ = 'storage'
@abc.abstractmethod
def create_quota(self, context, quota):
"""
Create a Quota.
:param context: RPC Context.
:param quota: Quota object with the values to be created.
"""
@abc.abstractmethod
def get_quota(self, context, quota_id):
"""
Get a Quota via ID.
:param context: RPC Context.
:param quota_id: Quota ID to get.
"""
@abc.abstractmethod
def find_quotas(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find Quotas
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_quota(self, context, criterion):
"""
Find a single Quota.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_quota(self, context, quota):
"""
Update a Quota
:param context: RPC Context.
:param quota: Quota to update.
"""
@abc.abstractmethod
def delete_quota(self, context, quota_id):
"""
Delete a Quota via ID.
:param context: RPC Context.
:param quota_id: Delete a Quota via ID
"""
@abc.abstractmethod
def create_tld(self, context, tld):
"""
Create a TLD.
:param context: RPC Context.
:param tld: Tld object with the values to be created.
"""
@abc.abstractmethod
def get_tld(self, context, tld_id):
"""
Get a TLD via ID.
:param context: RPC Context.
:param tld_id: TLD ID to get.
"""
@abc.abstractmethod
def find_tlds(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find TLDs
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_tld(self, context, criterion):
"""
Find a single TLD.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_tld(self, context, tld):
"""
Update a TLD
:param context: RPC Context.
:param tld: TLD to update.
"""
@abc.abstractmethod
def delete_tld(self, context, tld_id):
"""
Delete a TLD via ID.
:param context: RPC Context.
:param tld_id: Delete a TLD via ID
"""
@abc.abstractmethod
def create_tsigkey(self, context, tsigkey):
"""
Create a TSIG Key.
:param context: RPC Context.
:param tsigkey: TsigKey object with the values to be created.
"""
@abc.abstractmethod
def find_tsigkeys(self, context, criterion=None,
marker=None, limit=None, sort_key=None, sort_dir=None):
"""
Find TSIG Keys.
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def get_tsigkey(self, context, tsigkey_id):
"""
Get a TSIG Key via ID.
:param context: RPC Context.
:param tsigkey_id: Server ID to get.
"""
@abc.abstractmethod
def update_tsigkey(self, context, tsigkey):
"""
Update a TSIG Key
:param context: RPC Context.
:param tsigkey: TSIG Keyto update.
"""
@abc.abstractmethod
def delete_tsigkey(self, context, tsigkey_id):
"""
Delete a TSIG Key via ID.
:param context: RPC Context.
:param tsigkey_id: Delete a TSIG Key via ID
"""
@abc.abstractmethod
def find_tenants(self, context):
"""
Find all Tenants.
:param context: RPC Context.
"""
@abc.abstractmethod
def get_tenant(self, context, tenant_id):
"""
Get all Tenants.
:param context: RPC Context.
:param tenant_id: ID of the Tenant.
"""
@abc.abstractmethod
def count_tenants(self, context):
"""
Count tenants
:param context: RPC Context.
"""
@abc.abstractmethod
def create_zone(self, context, zone):
"""
Create a new Zone.
:param context: RPC Context.
:param zone: Zone object with the values to be created.
"""
@abc.abstractmethod
def get_zone(self, context, zone_id, apply_tenant_criteria=True):
"""
Get a Zone via its ID.
:param context: RPC Context.
:param zone_id: ID of the Zone.
:param apply_tenant_criteria: Whether to filter results by project_id.
"""
@abc.abstractmethod
def find_zones(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find zones
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_zone(self, context, criterion):
"""
Find a single Zone.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_zone(self, context, zone):
"""
Update a Zone
:param context: RPC Context.
:param zone: Zone object.
"""
@abc.abstractmethod
def delete_zone(self, context, zone_id):
"""
Delete a Zone
:param context: RPC Context.
:param zone_id: Zone ID to delete.
"""
@abc.abstractmethod
def purge_zone(self, context, zone):
"""
Purge a Zone
:param context: RPC Context.
:param zone: Zone to delete.
"""
@abc.abstractmethod
def count_zones(self, context, criterion=None):
"""
Count zones
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def share_zone(self, context, shared_zone):
"""
Share zone
:param context: RPC Context.
:param shared_zone: Shared Zone dict
"""
@abc.abstractmethod
def unshare_zone(self, context, zone_id, shared_zone_id):
"""
Unshare zone
:param context: RPC Context.
:param shared_zone_id: Shared Zone Id
"""
@abc.abstractmethod
def find_shared_zones(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find shared zones
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def get_shared_zone(self, context, zone_id, shared_zone_id):
"""
Get a shared zone via ID
:param context: RPC Context.
:param shared_zone_id: Shared Zone Id
"""
@abc.abstractmethod
def is_zone_shared_with_project(self, zone_id, project_id):
"""
Checks if a zone is shared with a project.
:param zone_id: The zone ID to check.
:param project_id: The project ID to check.
:returns: Boolean True/False if the zone is shared with the project.
"""
@abc.abstractmethod
def delete_zone_shares(self, zone_id):
"""
Delete all of the zone shares for a specific zone.
:param zone_id: The zone ID to check.
"""
@abc.abstractmethod
def create_recordset(self, context, zone_id, recordset):
"""
Create a recordset on a given Zone ID
:param context: RPC Context.
:param zone_id: Zone ID to create the recordset in.
:param recordset: RecordSet object with the values to be created.
"""
@abc.abstractmethod
def find_recordsets(self, context, criterion=None, marker=None, limit=None,
sort_key=None, sort_dir=None, force_index=False,
apply_tenant_criteria=True):
"""
Find RecordSets.
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
:param apply_tenant_criteria: Whether to filter results by project_id.
"""
@abc.abstractmethod
def find_recordsets_axfr(self, context, criterion=None):
"""
Find RecordSets.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def find_recordset(self, context, criterion, apply_tenant_criteria=True):
"""
Find a single RecordSet.
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param apply_tenant_criteria: Whether to filter results by project_id.
"""
@abc.abstractmethod
def update_recordset(self, context, recordset):
"""
Update a recordset
:param context: RPC Context.
:param recordset: RecordSet to update
"""
@abc.abstractmethod
def delete_recordset(self, context, recordset_id):
"""
Delete a recordset
:param context: RPC Context.
:param recordset_id: RecordSet ID to delete
"""
@abc.abstractmethod
def count_recordsets(self, context, criterion=None):
"""
Count recordsets
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def create_record(self, context, zone_id, recordset_id, record):
"""
Create a record on a given Zone ID
:param context: RPC Context.
:param zone_id: Zone ID to create the record in.
:param recordset_id: RecordSet ID to create the record in.
:param record: Record object with the values to be created.
"""
@abc.abstractmethod
def get_record(self, context, record_id):
"""
Get a record via ID
:param context: RPC Context.
:param record_id: Record ID to get
"""
@abc.abstractmethod
def find_records(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find Records.
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_record(self, context, criterion):
"""
Find a single Record.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_record(self, context, record):
"""
Update a record
:param context: RPC Context.
:param record: Record to update
"""
@abc.abstractmethod
def delete_record(self, context, record_id):
"""
Delete a record
:param context: RPC Context.
:param record_id: Record ID to delete
"""
@abc.abstractmethod
def count_records(self, context, criterion=None):
"""
Count records
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def create_blacklist(self, context, blacklist):
"""
Create a Blacklist.
:param context: RPC Context.
:param blacklist: Blacklist object with the values to be created.
"""
@abc.abstractmethod
def get_blacklist(self, context, blacklist_id):
"""
Get a Blacklist via ID.
:param context: RPC Context.
:param blacklist_id: Blacklist ID to get.
"""
@abc.abstractmethod
def find_blacklists(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find Blacklists
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_blacklist(self, context, criterion):
"""
Find a single Blacklist.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_blacklist(self, context, blacklist):
"""
Update a Blacklist
:param context: RPC Context.
:param blacklist: Blacklist to update.
"""
@abc.abstractmethod
def delete_blacklist(self, context, blacklist_id):
"""
Delete a Blacklist via ID.
:param context: RPC Context.
:param blacklist_id: Delete a Blacklist via ID
"""
@abc.abstractmethod
def create_pool(self, context, pool):
"""
Create a Pool.
:param context: RPC Context.
:param pool: Pool object with the values to be created.
"""
@abc.abstractmethod
def find_pools(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find all Pools
:param context: RPC Context.
:param criterion: Criteria by which to filter
:param marker: Resource ID used by paging. The next page will start
at the next resource after the marker
:param limit: Integer limit of objects on the page
:param sort_key: Key used to sort the returned list
:param sort_dir: Directions to sort after using sort_key
"""
@abc.abstractmethod
def find_pool(self, context, criterion):
"""
Find a single Pool.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def get_pool(self, context, pool_id):
"""
Get a Pool via the id
:param context: RPC Context.
:param pool_id: The ID of the pool to get
"""
@abc.abstractmethod
def update_pool(self, context, pool):
"""
Update the specified pool
:param context: RPC Context.
:param pool: Pool to update.
"""
@abc.abstractmethod
def delete_pool(self, context, pool_id):
"""
Delete the pool with the matching id
:param context: RPC Context.
:param pool_id: The ID of the pool to be deleted
"""
@abc.abstractmethod
def create_pool_attribute(self, context, pool_id, pool_attribute):
"""
Create a PoolAttribute.
:param context: RPC Context.
:param pool_id: The ID of the pool to which the attribute belongs.
:param pool_attribute: PoolAttribute object with the values created.
"""
@abc.abstractmethod
def find_pool_attributes(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find all PoolAttributes
:param context: RPC Context
:param criterion: Criteria by which to filer
:param marker: Resource ID used by paging. The next page will start
at the next resource after the marker
:param limit: Integer limit of objects on the page
:param sort_key: Key used to sort the returned list
:param sort_dir: Directions to sort after using sort_key
"""
@abc.abstractmethod
def find_pool_attribute(self, context, criterion):
"""
Find a single PoolAttribute
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def get_pool_attribute(self, context, pool_attribute_id):
"""
Get a PoolAttribute via the ID
:param context: RPC Context.
:param pool_attribute_id: The ID of the PoolAttribute to get
"""
@abc.abstractmethod
def update_pool_attribute(self, context, pool_attribute):
"""
Update the specified pool
:param context: RPC Context.
:param pool_attribute: PoolAttribute to update
"""
@abc.abstractmethod
def delete_pool_attribute(self, context, pool_attribute_id):
"""
Delete the pool with the matching id
:param context: RPC Context.
:param pool_attribute_id: The ID of the PoolAttribute to be deleted
"""
@abc.abstractmethod
def create_zone_import(self, context, zone_import):
"""
Create a Zone Import.
:param context: RPC Context.
:param zone_import: Zone Import object with the values to be created.
"""
@abc.abstractmethod
def get_zone_import(self, context, zone_import_id):
"""
Get a Zone Import via ID.
:param context: RPC Context.
:param zone_import_id: Zone Import ID to get.
"""
@abc.abstractmethod
def find_zone_imports(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find Zone Imports
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_zone_import(self, context, criterion):
"""
Find a single Zone Import.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_zone_import(self, context, zone_import):
"""
Update a Zone Import
:param context: RPC Context.
:param zone_import: Zone Import to update.
"""
@abc.abstractmethod
def increment_serial(self, context, zone_id):
"""
Increment serial of a Zone
:param context: RPC Context.
:param zone_id: ID of the Zone.
"""
@abc.abstractmethod
def delete_zone_import(self, context, zone_import_id):
"""
Delete a Zone Import via ID.
:param context: RPC Context.
:param zone_import_id: Delete a Zone Import via ID
"""
@abc.abstractmethod
def create_zone_export(self, context, zone_export):
"""
Create a Zone Export.
:param context: RPC Context.
:param zone_export: Zone Export object with the values to be created.
"""
@abc.abstractmethod
def get_zone_export(self, context, zone_export_id):
"""
Get a Zone Export via ID.
:param context: RPC Context.
:param zone_export_id: Zone Export ID to get.
"""
@abc.abstractmethod
def find_zone_exports(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Find Zone Exports
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_zone_export(self, context, criterion):
"""
Find a single Zone Export.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_zone_export(self, context, zone_export):
"""
Update a Zone Export
:param context: RPC Context.
:param zone_export: Zone Export to update.
"""
@abc.abstractmethod
def delete_zone_export(self, context, zone_export_id):
"""
Delete a Zone Export via ID.
:param context: RPC Context.
:param zone_export_id: Delete a Zone Export via ID
"""
@abc.abstractmethod
def find_service_statuses(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""
Retrieve status for services
:param context: RPC Context.
:param criterion: Criteria to filter by.
:param marker: Resource ID from which after the requested page will
start after
:param limit: Integer limit of objects of the page size after the
marker
:param sort_key: Key from which to sort after.
:param sort_dir: Direction to sort after using sort_key.
"""
@abc.abstractmethod
def find_service_status(self, context, criterion):
"""
Find a single Service Status.
:param context: RPC Context.
:param criterion: Criteria to filter by.
"""
@abc.abstractmethod
def update_service_status(self, context, service_status):
"""
Update the Service status for a service.
:param context: RPC Context.
:param service_status: Set the status for a service.
"""

View File

@ -1,5 +1,5 @@
Please use the "designate-manage database" command for database management.
Developers adding new migrations can run 'alembic -m "<migration title>"' from
the designate/storage/impl_sqlalchemy directory where the alembic.ini file is
the designate/storage/sqlalchemy directory where the alembic.ini file is
located.

View File

@ -24,8 +24,8 @@ from alembic import op
from oslo_utils import timeutils
import sqlalchemy as sa
from designate.sqlalchemy.types import UUID
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.types import UUID
from designate import utils
# revision identifiers, used by Alembic.

View File

@ -22,7 +22,7 @@ Create Date: 2022-08-01 16:53:34.612019
"""
from alembic import op
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = '15b34ff3ecb8'

View File

@ -23,8 +23,8 @@ Create Date: 2022-08-01 16:41:55.139558
from alembic import op
import sqlalchemy as sa
from designate.sqlalchemy.types import UUID
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.types import UUID
from designate import utils
# revision identifiers, used by Alembic.

View File

@ -22,7 +22,7 @@ Create Date: 2022-08-01 17:13:01.429689
"""
from alembic import op
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = '7977deaa5167'

View File

@ -23,8 +23,8 @@ Create Date: 2022-07-29 18:41:19.427853
from alembic import op
import sqlalchemy as sa
from designate.sqlalchemy.types import UUID
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.types import UUID
# revision identifiers, used by Alembic.
revision = '867a331ce1fc'

View File

@ -23,7 +23,7 @@ Create Date: 2022-08-01 17:32:21.386556
from alembic import op
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = '91eb1eb7c882'

View File

@ -25,7 +25,7 @@ from alembic import op
from oslo_log import log as logging
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = '93a00a815f07'

View File

@ -23,7 +23,7 @@ Create Date: 2022-07-29 21:30:12.127816
from alembic import op
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = 'a69b45715cc1'

View File

@ -20,7 +20,7 @@ Create Date: 2022-09-22 20:50:03.056609
from alembic import op
import sqlalchemy as sa
from designate.sqlalchemy.types import UUID
from designate.storage.sqlalchemy.types import UUID
from designate import utils
# revision identifiers, used by Alembic.

View File

@ -23,7 +23,7 @@ Create Date: 2022-08-01 17:25:33.058845
from alembic import op
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = 'b8999fd10721'

View File

@ -26,7 +26,7 @@ from alembic import op
from oslo_log import log as logging
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = 'bfcfc4a07487'

View File

@ -26,8 +26,8 @@ from oslo_utils import timeutils
import sqlalchemy as sa
from designate.conf import central
from designate.sqlalchemy.types import UUID
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.types import UUID
# revision identifiers, used by Alembic.
revision = 'c9f427f7180a'

View File

@ -24,8 +24,8 @@ from alembic import op
from oslo_utils import timeutils
import sqlalchemy as sa
from designate.sqlalchemy.types import UUID
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.types import UUID
from designate import utils
# revision identifiers, used by Alembic.

View File

@ -22,7 +22,7 @@ Create Date: 2022-07-29 20:41:51.855014
"""
from alembic import op
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = 'd9a1883e93e9'

View File

@ -23,7 +23,7 @@ Create Date: 2022-08-01 17:34:45.569101
from alembic import op
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = 'e5e2199ed76e'

View File

@ -23,7 +23,7 @@ Create Date: 2022-07-29 21:18:35.403634
from alembic import op
import sqlalchemy as sa
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
# revision identifiers, used by Alembic.
revision = 'f9f969f9d85e'

View File

@ -25,12 +25,80 @@ from sqlalchemy import select, or_, between, func, distinct
from designate import exceptions
from designate import objects
from designate.sqlalchemy import sql
from designate.sqlalchemy import utils
from designate.storage import sql
from designate.storage.sqlalchemy import tables
from designate.storage.sqlalchemy import utils
LOG = logging.getLogger(__name__)
RECORDSET_QUERY_TABLES = (
# RS Info
tables.recordsets.c.id, # 0 - RS ID
tables.recordsets.c.version, # 1 - RS Version
tables.recordsets.c.created_at, # 2 - RS Created
tables.recordsets.c.updated_at, # 3 - RS Updated
tables.recordsets.c.tenant_id, # 4 - RS Tenant
tables.recordsets.c.zone_id, # 5 - RS Zone
tables.recordsets.c.name, # 6 - RS Name
tables.recordsets.c.type, # 7 - RS Type
tables.recordsets.c.ttl, # 8 - RS TTL
tables.recordsets.c.description, # 9 - RS Desc
# R Info
tables.records.c.id, # 10 - R ID
tables.records.c.version, # 11 - R Version
tables.records.c.created_at, # 12 - R Created
tables.records.c.updated_at, # 13 - R Updated
tables.records.c.tenant_id, # 14 - R Tenant
tables.records.c.zone_id, # 15 - R Zone
tables.records.c.recordset_id, # 16 - R RSet
tables.records.c.data, # 17 - R Data
tables.records.c.description, # 18 - R Desc
tables.records.c.hash, # 19 - R Hash
tables.records.c.managed, # 20 - R Mngd Flg
tables.records.c.managed_plugin_name, # 21 - R Mngd Plg
tables.records.c.managed_resource_type, # 22 - R Mngd Type
tables.records.c.managed_resource_region, # 23 - R Mngd Rgn
tables.records.c.managed_resource_id, # 24 - R Mngd ID
tables.records.c.managed_tenant_id, # 25 - R Mngd T ID
tables.records.c.status, # 26 - R Status
tables.records.c.action, # 27 - R Action
tables.records.c.serial # 28 - R Serial
)
RECORDSET_MAP = {
'id': 0,
'version': 1,
'created_at': 2,
'updated_at': 3,
'tenant_id': 4,
'zone_id': 5,
'name': 6,
'type': 7,
'ttl': 8,
'description': 9,
}
RECORD_MAP = {
'id': 10,
'version': 11,
'created_at': 12,
'updated_at': 13,
'tenant_id': 14,
'zone_id': 15,
'recordset_id': 16,
'data': 17,
'description': 18,
'hash': 19,
'managed': 20,
'managed_plugin_name': 21,
'managed_resource_type': 22,
'managed_resource_region': 23,
'managed_resource_id': 24,
'managed_tenant_id': 25,
'status': 26,
'action': 27,
'serial': 28,
}
def _set_object_from_model(obj, model, **extra):
"""Update a DesignateObject with the values from a SQLA Model"""
@ -230,13 +298,11 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
except ValueError as value_error:
raise exceptions.ValueError(str(value_error))
def _find_recordsets_with_records(self, context, criterion, zones_table,
recordsets_table, records_table,
one=False, marker=None, limit=None,
sort_key=None, sort_dir=None, query=None,
def _find_recordsets_with_records(self, context, criterion,
marker=None, limit=None,
sort_key=None, sort_dir=None,
apply_tenant_criteria=True,
force_index=False):
sort_key = sort_key or 'created_at'
sort_dir = sort_dir or 'asc'
data = criterion.pop('data', None)
@ -247,37 +313,39 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
# needs to use the correct table index for different sort keys
index_hint = utils.get_rrset_index(sort_key) if force_index else None
rzjoin = recordsets_table.join(
zones_table,
recordsets_table.c.zone_id == zones_table.c.id)
rzjoin = tables.recordsets.join(
tables.zones,
tables.recordsets.c.zone_id == tables.zones.c.id
)
if filtering_records:
rzjoin = rzjoin.join(
records_table,
recordsets_table.c.id == records_table.c.recordset_id)
tables.records,
tables.recordsets.c.id == tables.records.c.recordset_id
)
inner_q = (
select(recordsets_table.c.id, # 0 - RS ID
zones_table.c.name). # 1 - ZONE NAME
select(tables.recordsets.c.id, # 0 - RS ID
tables.zones.c.name). # 1 - ZONE NAME
select_from(rzjoin).
where(zones_table.c.deleted == '0')
where(tables.zones.c.deleted == '0')
)
count_q = (
select(func.count(distinct(recordsets_table.c.id))).
select_from(rzjoin).where(zones_table.c.deleted == '0')
select(func.count(distinct(tables.recordsets.c.id))).
select_from(rzjoin).where(tables.zones.c.deleted == '0')
)
if index_hint:
inner_q = inner_q.with_hint(recordsets_table, index_hint,
inner_q = inner_q.with_hint(tables.recordsets, index_hint,
dialect_name='mysql')
if marker is not None:
marker = utils.check_marker(recordsets_table, marker)
marker = utils.check_marker(tables.recordsets, marker)
try:
inner_q = utils.paginate_query(
inner_q, recordsets_table, limit,
inner_q, tables.recordsets, limit,
[sort_key, 'id'], marker=marker,
sort_dir=sort_dir)
@ -292,26 +360,26 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
if apply_tenant_criteria:
inner_q = self._apply_tenant_criteria(
context, recordsets_table, inner_q,
context, tables.recordsets, inner_q,
include_null_tenant=False)
count_q = self._apply_tenant_criteria(context, recordsets_table,
count_q = self._apply_tenant_criteria(context, tables.recordsets,
count_q,
include_null_tenant=False)
inner_q = self._apply_criterion(recordsets_table, inner_q, criterion)
count_q = self._apply_criterion(recordsets_table, count_q, criterion)
inner_q = self._apply_criterion(tables.recordsets, inner_q, criterion)
count_q = self._apply_criterion(tables.recordsets, count_q, criterion)
if filtering_records:
records_criterion = dict((k, v) for k, v in (
('data', data), ('status', status)) if v is not None)
inner_q = self._apply_criterion(records_table, inner_q,
inner_q = self._apply_criterion(tables.records, inner_q,
records_criterion)
count_q = self._apply_criterion(records_table, count_q,
count_q = self._apply_criterion(tables.records, count_q,
records_criterion)
inner_q = self._apply_deleted_criteria(context, recordsets_table,
inner_q = self._apply_deleted_criteria(context, tables.recordsets,
inner_q)
count_q = self._apply_deleted_criteria(context, recordsets_table,
count_q = self._apply_deleted_criteria(context, tables.recordsets,
count_q)
# Get the list of IDs needed.
@ -339,87 +407,18 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
total_count = 0 if result is None else result[0]
# Join the 2 required tables
rjoin = recordsets_table.outerjoin(
records_table,
records_table.c.recordset_id == recordsets_table.c.id)
query = select(
# RS Info
recordsets_table.c.id, # 0 - RS ID
recordsets_table.c.version, # 1 - RS Version
recordsets_table.c.created_at, # 2 - RS Created
recordsets_table.c.updated_at, # 3 - RS Updated
recordsets_table.c.tenant_id, # 4 - RS Tenant
recordsets_table.c.zone_id, # 5 - RS Zone
recordsets_table.c.name, # 6 - RS Name
recordsets_table.c.type, # 7 - RS Type
recordsets_table.c.ttl, # 8 - RS TTL
recordsets_table.c.description, # 9 - RS Desc
# R Info
records_table.c.id, # 10 - R ID
records_table.c.version, # 11 - R Version
records_table.c.created_at, # 12 - R Created
records_table.c.updated_at, # 13 - R Updated
records_table.c.tenant_id, # 14 - R Tenant
records_table.c.zone_id, # 15 - R Zone
records_table.c.recordset_id, # 16 - R RSet
records_table.c.data, # 17 - R Data
records_table.c.description, # 18 - R Desc
records_table.c.hash, # 19 - R Hash
records_table.c.managed, # 20 - R Mngd Flg
records_table.c.managed_plugin_name, # 21 - R Mngd Plg
records_table.c.managed_resource_type, # 22 - R Mngd Type
records_table.c.managed_resource_region, # 23 - R Mngd Rgn
records_table.c.managed_resource_id, # 24 - R Mngd ID
records_table.c.managed_tenant_id, # 25 - R Mngd T ID
records_table.c.status, # 26 - R Status
records_table.c.action, # 27 - R Action
records_table.c.serial # 28 - R Serial
).select_from(rjoin)
query = query.where(
recordsets_table.c.id.in_(formatted_ids)
rjoin = tables.recordsets.outerjoin(
tables.records,
tables.records.c.recordset_id == tables.recordsets.c.id
)
# These make looking up indexes for the Raw Rows much easier,
# and maintainable
query = select(RECORDSET_QUERY_TABLES).select_from(rjoin)
rs_map = {
"id": 0,
"version": 1,
"created_at": 2,
"updated_at": 3,
"tenant_id": 4,
"zone_id": 5,
"name": 6,
"type": 7,
"ttl": 8,
"description": 9,
}
query = query.where(
tables.recordsets.c.id.in_(formatted_ids)
)
r_map = {
"id": 10,
"version": 11,
"created_at": 12,
"updated_at": 13,
"tenant_id": 14,
"zone_id": 15,
"recordset_id": 16,
"data": 17,
"description": 18,
"hash": 19,
"managed": 20,
"managed_plugin_name": 21,
"managed_resource_type": 22,
"managed_resource_region": 23,
"managed_resource_id": 24,
"managed_tenant_id": 25,
"status": 26,
"action": 27,
"serial": 28,
}
query, sort_dirs = utils.sort_query(query, recordsets_table,
query, sort_dirs = utils.sort_query(query, tables.recordsets,
[sort_key, 'id'],
sort_dir=sort_dir)
@ -447,11 +446,11 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
# Set up a new rrset
current_rrset = objects.RecordSet()
rrset_id = record[rs_map['id']]
rrset_id = record[RECORDSET_MAP['id']]
# Add all the loaded vars into RecordSet object
for key, value in rs_map.items():
for key, value in RECORDSET_MAP.items():
setattr(current_rrset, key, record[value])
current_rrset.zone_name = id_zname_map[current_rrset.id]
@ -459,20 +458,20 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
current_rrset.records = objects.RecordList()
if record[r_map['id']] is not None:
if record[RECORD_MAP['id']] is not None:
rrdata = objects.Record()
for key, value in r_map.items():
for key, value in RECORD_MAP.items():
setattr(rrdata, key, record[value])
current_rrset.records.append(rrdata)
else:
# We've already got a rrset, add the rdata
if record[r_map['id']] is not None:
if record[RECORD_MAP['id']] is not None:
rrdata = objects.Record()
for key, value in r_map.items():
for key, value in RECORD_MAP.items():
setattr(rrdata, key, record[value])
current_rrset.records.append(rrdata)

View File

@ -21,7 +21,7 @@ from oslo_config import cfg
from oslo_db.sqlalchemy import types
from oslo_utils import timeutils
from designate.sqlalchemy.types import UUID
from designate.storage.sqlalchemy.types import UUID
from designate import utils

View File

@ -26,7 +26,7 @@ from sqlalchemy import select
from designate import exceptions
from designate.i18n import _
from designate.sqlalchemy import sql
from designate.storage import sql
LOG = log.getLogger(__name__)

View File

@ -49,7 +49,7 @@ CONF.import_opt('storage_driver', 'designate.central',
group='service:central')
CONF.import_opt('auth_strategy', 'designate.api',
group='service:api')
CONF.import_opt('connection', 'designate.storage.impl_sqlalchemy',
CONF.import_opt('connection', 'designate.storage.sqlalchemy',
group='storage:sqlalchemy')
CONF.import_opt('emitter_type', 'designate.heartbeat_emitter',
group="heartbeat_emitter")
@ -397,8 +397,7 @@ class TestCase(base.BaseTestCase):
self.admin_context = self.get_admin_context()
self.admin_context_all_tenants = self.get_admin_context(
all_tenants=True)
storage_driver = CONF['service:central'].storage_driver
self.storage = storage.get_storage(storage_driver)
self.storage = storage.get_storage()
# Setup the Default Pool with some useful settings
self._setup_default_pool()

View File

@ -37,8 +37,8 @@ import testtools
from designate import exceptions
from designate import objects
from designate.storage.impl_sqlalchemy import tables
from designate.storage import sql
from designate.storage.sqlalchemy import tables
import designate.tests
from designate.tests import fixtures
from designate import utils

View File

@ -21,8 +21,8 @@ from oslo_log import log as logging
from oslo_utils import timeutils
from designate.producer import tasks
from designate.storage.impl_sqlalchemy import tables
from designate.storage import sql
from designate.storage.sqlalchemy import tables
from designate.tests import fixtures
from designate.tests import TestCase
from designate.worker import rpcapi as worker_api

View File

@ -19,7 +19,7 @@ from unittest import mock
import sqlalchemy as sa
from sqlalchemy.sql import operators
from designate.sqlalchemy import base
from designate.storage.sqlalchemy import base
from designate.tests import TestCase
metadata = sa.MetaData()

File diff suppressed because it is too large Load Diff

View File

@ -1,123 +0,0 @@
# Copyright 2012 Managed I.T.
#
# Author: Kiall Mac Innes <kiall@managedit.ie>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from sqlalchemy import text
from designate import storage
from designate.storage import sql
from designate.tests.test_storage import StorageTestCase
from designate.tests import TestCase
LOG = logging.getLogger(__name__)
class SqlalchemyStorageTest(StorageTestCase, TestCase):
def setUp(self):
super(SqlalchemyStorageTest, self).setUp()
self.storage = storage.get_storage('sqlalchemy')
def test_schema_table_names(self):
table_names = [
'blacklists',
'pool_also_notifies',
'pool_attributes',
'pool_nameservers',
'pool_ns_records',
'pool_target_masters',
'pool_target_options',
'pool_targets',
'pools',
'quotas',
'records',
'recordsets',
'service_statuses',
'shared_zones',
'tlds',
'tsigkeys',
'zone_attributes',
'zone_masters',
'zone_tasks',
'zone_transfer_accepts',
'zone_transfer_requests',
'zones'
]
inspector = self.storage.get_inspector()
actual_table_names = inspector.get_table_names()
# We have transitioned database schema migration tools.
# They use different tracking tables. Accomidate that one or both
# may exist in this test.
migration_table_found = False
if ('migrate_version' in actual_table_names or
'alembic_version' in actual_table_names):
migration_table_found = True
self.assertTrue(
migration_table_found, 'A DB migration table was not found.'
)
try:
actual_table_names.remove('migrate_version')
except ValueError:
pass
try:
actual_table_names.remove('alembic_version')
except ValueError:
pass
self.assertEqual(table_names, actual_table_names)
def test_schema_table_indexes(self):
with sql.get_read_session() as session:
indexes_t = session.execute(
text("SELECT * FROM sqlite_master WHERE type = 'index';"))
indexes = {} # table name -> index names -> cmd
for _, index_name, table_name, num, cmd in indexes_t:
if index_name.startswith("sqlite_"):
continue # ignore sqlite-specific indexes
if table_name not in indexes:
indexes[table_name] = {}
indexes[table_name][index_name] = cmd
expected = {
"records": {
"record_created_at": "CREATE INDEX record_created_at ON records (created_at)", # noqa
"records_tenant": "CREATE INDEX records_tenant ON records (tenant_id)", # noqa
"update_status_index": "CREATE INDEX update_status_index ON records (status, zone_id, tenant_id, created_at, serial)", # noqa
},
"recordsets": {
"recordset_created_at": "CREATE INDEX recordset_created_at ON recordsets (created_at)", # noqa
"recordset_type_name": "CREATE INDEX recordset_type_name ON recordsets (type, name)", # noqa
"reverse_name_dom_id": "CREATE INDEX reverse_name_dom_id ON recordsets (reverse_name, zone_id)", # noqa
"rrset_type_domainid": "CREATE INDEX rrset_type_domainid ON recordsets (type, zone_id)", # noqa
"rrset_updated_at": "CREATE INDEX rrset_updated_at ON recordsets (updated_at)", # noqa
"rrset_zoneid": "CREATE INDEX rrset_zoneid ON recordsets (zone_id)", # noqa
"rrset_type": "CREATE INDEX rrset_type ON recordsets (type)", # noqa
"rrset_ttl": "CREATE INDEX rrset_ttl ON recordsets (ttl)", # noqa
"rrset_tenant_id": "CREATE INDEX rrset_tenant_id ON recordsets (tenant_id)", # noqa
},
"zones": {
"delayed_notify": "CREATE INDEX delayed_notify ON zones (delayed_notify)", # noqa
"increment_serial": "CREATE INDEX increment_serial ON zones (increment_serial)", # noqa
"reverse_name_deleted": "CREATE INDEX reverse_name_deleted ON zones (reverse_name, deleted)", # noqa
"zone_created_at": "CREATE INDEX zone_created_at ON zones (created_at)", # noqa
"zone_deleted": "CREATE INDEX zone_deleted ON zones (deleted)",
"zone_tenant_deleted": "CREATE INDEX zone_tenant_deleted ON zones (tenant_id, deleted)", # noqa
}
}
self.assertDictEqual(expected, indexes)

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@ from unittest import mock
import oslotest.base
from designate.storage.impl_sqlalchemy.alembic import legacy_utils
from designate.storage.sqlalchemy.alembic import legacy_utils
class TestLegacyUtils(oslotest.base.BaseTestCase):

View File

@ -29,6 +29,7 @@ import designate.central.service
from designate.central.service import Service
from designate import exceptions
from designate import objects
from designate.storage import sqlalchemy
from designate.tests.fixtures import random_seed
from designate.tests import TestCase
@ -223,7 +224,7 @@ class CentralBasic(TestCase):
super(CentralBasic, self).setUp()
self.CONF = self.useFixture(cfg_fixture.Config(cfg.CONF)).conf
self.CONF([], project='designate')
mock_storage = mock.Mock(spec=designate.storage.base.Storage)
mock_storage = mock.Mock(spec=sqlalchemy.SQLAlchemyStorage)
pool_list = objects.PoolList.from_list(
[

View File

@ -18,7 +18,7 @@ from sqlalchemy.schema import MetaData
from sqlalchemy.schema import Table
from designate.cmd import status
from designate.sqlalchemy import sql
from designate.storage import sql
from designate import tests
@ -69,7 +69,7 @@ class TestDuplicateServiceStatus(tests.TestCase):
self.assertEqual(upgradecheck.Code.SUCCESS,
checks._duplicate_service_status().code)
@mock.patch('designate.sqlalchemy.sql.get_read_session')
@mock.patch('designate.storage.sql.get_read_session')
@mock.patch('designate.storage.sql.get_read_engine')
def test_failure(self, mock_get_engine, mock_get_read):
mock_sql_execute = mock.Mock()

View File

@ -111,8 +111,7 @@ class Service(service.RPCService):
@property
def storage(self):
if not self._storage:
storage_driver = cfg.CONF['service:worker'].storage_driver
self._storage = storage.get_storage(storage_driver)
self._storage = storage.get_storage()
return self._storage
@property

View File

@ -123,9 +123,7 @@ class Task(TaskConfig):
@property
def storage(self):
if not self._storage:
# Get a storage connection
storage_driver = cfg.CONF['service:central'].storage_driver
self._storage = storage.get_storage(storage_driver)
self._storage = storage.get_storage()
return self._storage
@property

View File

@ -9,7 +9,7 @@ Storage
Storage Base
=============
.. automodule:: designate.storage.base
.. automodule:: designate.storage.sqlalchemy
:members:
:undoc-members:
:show-inheritance:

View File

@ -63,9 +63,6 @@ designate.api.admin.extensions =
quotas = designate.api.admin.controllers.extensions.quotas:QuotasController
zones = designate.api.admin.controllers.extensions.zones:ZonesController
designate.storage =
sqlalchemy = designate.storage.impl_sqlalchemy:SQLAlchemyStorage
designate.notification.handler =
fake = designate.notification_handler.fake:FakeHandler
nova_fixed = designate.notification_handler.nova:NovaFixedHandler