pyupgrade changes for Python3.8+

Result of running

$ pyupgrade --py38-plus $(git ls-files | grep ".py$")

This was inspired by Nova [1] and Octavia [2]

Fixed PEP8 errors introduced by pyupgrade by running:

$ autopep8 --select=E127,E128,E501 --max-line-length 79 -r \
  --in-place designate

and manual updates.

[1]: https://review.opendev.org/c/openstack/nova/+/896986
[2]: https://review.opendev.org/c/openstack/octavia/+/899263

Change-Id: Idfa757d7ba238012db116fdb3e98cc7c5ff4b169
This commit is contained in:
Erik Olof Gunnar Andersson 2023-10-31 10:18:40 -07:00
parent 7ef4c01e02
commit 68fc28527a
200 changed files with 751 additions and 703 deletions

View File

@ -60,7 +60,7 @@ class SampleHandler(NotificationHandler):
zone_id = cfg.CONF[self.name].zone_id
zone_name = cfg.CONF[self.name].zone_name
record_name = '%s.%s' % (payload['instance_id'], zone_name)
record_name = '{}.{}'.format(payload['instance_id'], zone_name)
context = DesignateContext().elevated()
context.all_tenants = True

View File

@ -200,7 +200,7 @@ def main():
zone.to_file(args.write)
else:
zone.to_stdout()
except IOError as e:
except OSError as e:
LOG.error(e)

View File

@ -27,6 +27,6 @@ class ZonesController(rest.RestController):
return '.zones'
def __init__(self):
super(ZonesController, self).__init__()
super().__init__()
export = export.ExportController()

View File

@ -22,7 +22,7 @@ from designate.api.v2.controllers import errors
LOG = logging.getLogger(__name__)
class RootController(object):
class RootController:
"""
This is /admin/ Controller. Pecan will find all controllers via the object
properties attached to this.

View File

@ -26,7 +26,7 @@ LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class BaseView(object):
class BaseView:
"""
The Views are responsible for converting to/from the "internal" and
"external" representations of collections and resources. This includes
@ -40,7 +40,7 @@ class BaseView(object):
_collection_name = None
def __init__(self):
super(BaseView, self).__init__()
super().__init__()
self.base_uri = CONF['service:api']['api_base_uri'].rstrip('/')
@ -160,13 +160,13 @@ class BaseView(object):
return links
def _get_base_href(self, parents=None):
href = "%s/v2/%s" % (self.base_uri, self._collection_name)
href = f"{self.base_uri}/v2/{self._collection_name}"
return href.rstrip('?')
def _get_resource_href(self, request, item, parents=None):
base_href = self._get_base_href(parents)
href = "%s/%s" % (base_href, item['id'])
href = "{}/{}".format(base_href, item['id'])
return href.rstrip('?')
@ -178,7 +178,7 @@ class BaseView(object):
base_href = self._get_base_href(parents)
href = "%s?%s" % (base_href, parse.urlencode(params))
href = f"{base_href}?{parse.urlencode(params)}"
return href.rstrip('?')

View File

@ -130,7 +130,7 @@ class ContextMiddleware(base.Middleware):
class KeystoneContextMiddleware(ContextMiddleware):
def __init__(self, application):
super(KeystoneContextMiddleware, self).__init__(application)
super().__init__(application)
LOG.info('Starting designate keystonecontext middleware')
@ -170,7 +170,7 @@ class KeystoneContextMiddleware(ContextMiddleware):
class NoAuthContextMiddleware(ContextMiddleware):
def __init__(self, application):
super(NoAuthContextMiddleware, self).__init__(application)
super().__init__(application)
LOG.info('Starting designate noauthcontext middleware')
@ -188,7 +188,7 @@ class NoAuthContextMiddleware(ContextMiddleware):
class TestContextMiddleware(ContextMiddleware):
def __init__(self, application, tenant_id=None, user_id=None):
super(TestContextMiddleware, self).__init__(application)
super().__init__(application)
LOG.critical('Starting designate testcontext middleware')
LOG.critical('**** DO NOT USE IN PRODUCTION ****')
@ -223,7 +223,7 @@ class TestContextMiddleware(ContextMiddleware):
class MaintenanceMiddleware(base.Middleware):
def __init__(self, application):
super(MaintenanceMiddleware, self).__init__(application)
super().__init__(application)
LOG.info('Starting designate maintenance middleware')
@ -257,7 +257,7 @@ class NormalizeURIMiddleware(base.Middleware):
class FaultWrapperMiddleware(base.Middleware):
def __init__(self, application):
super(FaultWrapperMiddleware, self).__init__(application)
super().__init__(application)
LOG.info('Starting designate faultwrapper middleware')
@ -338,7 +338,7 @@ class FaultWrapperMiddleware(base.Middleware):
class APIv2ValidationErrorMiddleware(base.Middleware):
def __init__(self, application):
super(APIv2ValidationErrorMiddleware, self).__init__(application)
super().__init__(application)
self.api_version = 'API_v2'
LOG.info('Starting designate validation middleware')

View File

@ -26,17 +26,17 @@ LOG = logging.getLogger(__name__)
class Service(service.WSGIService):
def __init__(self):
super(Service, self).__init__(
super().__init__(
self.wsgi_application,
self.service_name,
cfg.CONF['service:api'].listen,
)
def start(self):
super(Service, self).start()
super().start()
def stop(self, graceful=True):
super(Service, self).stop(graceful)
super().stop(graceful)
@property
def service_name(self):

View File

@ -19,7 +19,7 @@ from pecan import expose
from designate import exceptions
class ErrorsController(object):
class ErrorsController:
@expose(content_type='text/plain')
@expose(content_type='text/dns')

View File

@ -29,7 +29,7 @@ from designate.api.v2.controllers import tsigkeys
from designate.api.v2.controllers import zones
class RootController(object):
class RootController:
"""
This is /v2/ Controller. Pecan will find all controllers via the object
properties attached to this.

View File

@ -31,7 +31,7 @@ from designate.api.v2.controllers.zones.tasks.xfr import XfrController
LOG = logging.getLogger(__name__)
class TasksController(object):
class TasksController:
transfer_accepts = TRA()
transfer_requests = TRC()

View File

@ -27,7 +27,7 @@ GOOD_STATUSES = [
def get_backend(target):
cls = base.Backend.get_driver(target.type)
message = "Backend Driver '%s' loaded. Has status of '%s'" % (
message = "Backend Driver '{}' loaded. Has status of '{}'".format(
target.type, cls.__backend_status__
)

View File

@ -33,7 +33,7 @@ class Backend(DriverPlugin):
__backend_status__ = 'untested'
def __init__(self, target):
super(Backend, self).__init__()
super().__init__()
self.target = target
self.options = target.options

View File

@ -28,7 +28,7 @@ from designate import exceptions
LOG = logging.getLogger(__name__)
class AkamaiClient(object):
class AkamaiClient:
def __init__(self, client_token=None, client_secret=None,
access_token=None, host=None):
session = requests.Session()
@ -164,7 +164,7 @@ class AkamaiBackend(base.Backend):
__backend_status__ = 'untested'
def __init__(self, target):
super(AkamaiBackend, self).__init__(target)
super().__init__(target)
self._host = self.options.get('host', '127.0.0.1')
self._port = int(self.options.get('port', 53))

View File

@ -40,7 +40,7 @@ class Bind9Backend(base.Backend):
__backend_status__ = 'integrated'
def __init__(self, target):
super(Bind9Backend, self).__init__(target)
super().__init__(target)
self._host = self.options.get('host', '127.0.0.1')
self._port = int(self.options.get('port', 53))
@ -83,7 +83,7 @@ class Bind9Backend(base.Backend):
for master in self.masters:
host = master['host']
port = master['port']
masters.append('%s port %s' % (host, port))
masters.append(f'{host} port {port}')
# Ensure different MiniDNS instances are targeted for AXFRs
random.shuffle(masters)
@ -113,7 +113,7 @@ class Bind9Backend(base.Backend):
rndc_op = [
'showzone',
'%s %s' % (zone['name'].rstrip('.'), view),
'{} {}'.format(zone['name'].rstrip('.'), view),
]
try:
self._execute_rndc(rndc_op)
@ -137,10 +137,10 @@ class Bind9Backend(base.Backend):
rndc_op = [
'delzone',
'%s %s' % (zone['name'].rstrip('.'), view),
'{} {}'.format(zone['name'].rstrip('.'), view),
]
if (self._clean_zonefile or (zone_params and
zone_params.get('hard_delete'))):
zone_params.get('hard_delete'))):
rndc_op.insert(1, '-clean')
try:
@ -174,7 +174,7 @@ class Bind9Backend(base.Backend):
for master in self.masters:
host = master['host']
port = master['port']
masters.append('%s port %s' % (host, port))
masters.append(f'{host} port {port}')
# Ensure different MiniDNS instances are targeted for AXFRs
random.shuffle(masters)
@ -206,7 +206,7 @@ class Bind9Backend(base.Backend):
try:
rndc_call = self._rndc_call_base + rndc_op
LOG.debug('Executing RNDC call: %r with timeout %s',
rndc_call, self._rndc_timeout)
rndc_call, self._rndc_timeout)
utils.execute(*rndc_call, timeout=self._rndc_timeout)
except (utils.processutils.ProcessExecutionError,
subprocess.TimeoutExpired) as e:

View File

@ -34,7 +34,7 @@ class DesignateBackend(base.Backend):
__backend_status__ = 'untested'
def __init__(self, target):
super(DesignateBackend, self).__init__(target)
super().__init__(target)
self.auth_url = self.options.get('auth_url')
self.username = self.options.get('username')
@ -76,7 +76,7 @@ class DesignateBackend(base.Backend):
LOG.info('Creating zone %(d_id)s / %(d_name)s',
{'d_id': zone['id'], 'd_name': zone['name']})
masters = ["%s:%s" % (i.host, i.port) for i in self.masters]
masters = [f'{i.host}:{i.port}' for i in self.masters]
self.client.zones.create(
zone.name, 'SECONDARY', masters=masters)

View File

@ -34,6 +34,7 @@ CFG_GROUP_NAME = 'backend:dynect'
class DynClientError(exceptions.Backend):
"""The base exception class for all HTTP exceptions.
"""
def __init__(self, data=None, job_id=None, msgs=None,
http_status=None, url=None, method=None, details=None):
self.data = data
@ -44,14 +45,16 @@ class DynClientError(exceptions.Backend):
self.url = url
self.method = method
self.details = details
formatted_string = "%s (HTTP %s to %s - %s) - %s" % (self.msgs,
self.method,
self.url,
self.http_status,
self.details)
formatted_string = '{} (HTTP {} to {} - {}) - {}'.format(
self.msgs,
self.method,
self.url,
self.http_status,
self.details
)
if job_id:
formatted_string += " (Job-ID: %s)" % job_id
super(DynClientError, self).__init__(formatted_string)
formatted_string += f' (Job-ID: {job_id})'
super().__init__(formatted_string)
@staticmethod
def from_response(response, details=None):
@ -90,12 +93,13 @@ class DynClientOperationBlocked(exceptions.BadRequest, DynClientError):
error_type = 'operation_blocked'
class DynClient(object):
class DynClient:
"""
DynECT service client.
https://help.dynect.net/rest/
"""
def __init__(self, customer_name, user_name, password,
endpoint="https://api.dynect.net:443",
api_version='3.5.6', headers=None, verify=True, retries=1,
@ -141,7 +145,7 @@ class DynClient(object):
]
for element in kwargs['headers']:
header = "-H '%s: %s'" % (element, kwargs['headers'][element])
header = "-H '{}: {}'".format(element, kwargs['headers'][element])
string_parts.append(header)
LOG.debug("REQ: %s", " ".join(string_parts))
@ -205,7 +209,7 @@ class DynClient(object):
start_time = time.monotonic()
resp = self.http.request(method, url, **kwargs)
if self.timings:
self.times.append(("%s %s" % (method, url),
self.times.append((f"{method} {url}",
start_time, time.monotonic()))
self._http_log_resp(resp)
@ -303,7 +307,7 @@ class DynECTBackend(base.Backend):
__backend_status__ = 'untested'
def __init__(self, target):
super(DynECTBackend, self).__init__(target)
super().__init__(target)
self.customer_name = self.options.get('customer_name')
self.username = self.options.get('username')

View File

@ -31,7 +31,7 @@ class InfobloxBackend(base.Backend):
__plugin_name__ = 'infoblox'
def __init__(self, *args, **kwargs):
super(InfobloxBackend, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.infoblox = object_manipulator.InfobloxObjectManipulator(
connector.Infoblox(self.options))

View File

@ -27,7 +27,7 @@ CONF = cfg.CONF
LOG = log.getLogger(__name__)
class Infoblox(object):
class Infoblox:
"""Infoblox class
Defines methods for getting, creating, updating and

View File

@ -27,14 +27,14 @@ class InfobloxExceptionBase(exceptions.Backend):
def __init__(self, **kwargs):
try:
super(InfobloxExceptionBase, self).__init__(self.message % kwargs)
super().__init__(self.message % kwargs)
self.msg = self.message % kwargs
except Exception:
if self.use_fatal_exceptions():
raise
else:
# at least get the core message out if something happened
super(InfobloxExceptionBase, self).__init__(self.message)
super().__init__(self.message)
def __unicode__(self):
return str(self.msg)
@ -55,7 +55,7 @@ class InfobloxException(InfobloxExceptionBase):
"""Generic Infoblox Exception."""
def __init__(self, response, **kwargs):
self.response = response
super(InfobloxException, self).__init__(**kwargs)
super().__init__(**kwargs)
class InfobloxIsMisconfigured(InfobloxExceptionBase):

View File

@ -20,7 +20,7 @@ from designate.i18n import _
LOG = log.getLogger(__name__)
class InfobloxObjectManipulator(object):
class InfobloxObjectManipulator:
FIELDS = ['ttl', 'use_ttl']
def __init__(self, connector):
@ -73,8 +73,8 @@ class InfobloxObjectManipulator(object):
def create_multi_tenant_dns_view(self, net_view, tenant):
if not net_view:
net_view = "%s.%s" % (self.connector.network_view, tenant)
dns_view = "%s.%s" % (self.connector.dns_view, net_view)
net_view = f"{self.connector.network_view}.{tenant}"
dns_view = f"{self.connector.dns_view}.{net_view}"
try:
self.create_network_view(

View File

@ -30,7 +30,7 @@ class NS1Backend(base.Backend):
__backend_status__ = 'untested'
def __init__(self, target):
super(NS1Backend, self).__init__(target)
super().__init__(target)
self.api_endpoint = "https://" + self.options.get('api_endpoint')
self.api_token = self.options.get('api_token')
@ -43,7 +43,9 @@ class NS1Backend(base.Backend):
}
def _build_url(self, zone):
return "%s/v1/zones/%s" % (self.api_endpoint, zone.name.rstrip('.'))
return '{}/v1/zones/{}'.format(
self.api_endpoint, zone.name.rstrip('.')
)
def _get_master(self):
try:
@ -103,16 +105,16 @@ class NS1Backend(base.Backend):
# check if the zone was actually created
if self._check_zone_exists(zone):
LOG.info("%s was created with an error. Deleting zone",
zone.name)
zone.name)
try:
self.delete_zone(context, zone)
except exceptions.Backend:
LOG.error('Could not delete errored zone %s',
zone.name)
zone.name)
raise exceptions.Backend(e)
else:
LOG.info("Can't create zone %s because it already exists",
zone.name)
zone.name)
def delete_zone(self, context, zone, zone_params=None):
"""Delete a DNS zone"""

View File

@ -19,7 +19,6 @@
# under the License.
import random
import socket
import ssl
import eventlet
@ -40,7 +39,7 @@ class NSD4Backend(base.Backend):
NSDCT_VERSION = 'NSDCT1'
def __init__(self, target):
super(NSD4Backend, self).__init__(target)
super().__init__(target)
self.host = self.options.get('host', '127.0.0.1')
self.port = int(self.options.get('port', 8952))
@ -56,7 +55,7 @@ class NSD4Backend(base.Backend):
keyfile=self.keyfile,
certfile=self.certfile)
stream = sock.makefile()
stream.write('%s %s\n' % (self.NSDCT_VERSION, command))
stream.write(f'{self.NSDCT_VERSION} {command}\n')
stream.flush()
result = stream.read()
stream.close()
@ -68,7 +67,7 @@ class NSD4Backend(base.Backend):
LOG.debug('Executing NSD4 control call: %s on %s',
command, self.host)
result = self._command(command)
except (ssl.SSLError, socket.error) as e:
except (ssl.SSLError, OSError) as e:
LOG.debug('NSD4 control call failure: %s' % e)
raise exceptions.Backend(e)
if result.rstrip("\n") != 'ok':
@ -80,12 +79,12 @@ class NSD4Backend(base.Backend):
for master in self.masters:
host = master['host']
port = master['port']
masters.append('%s port %s' % (host, port))
masters.append(f'{host} port {port}')
# Ensure different MiniDNS instances are targeted for AXFRs
random.shuffle(masters)
command = 'addzone %s %s' % (zone['name'], self.pattern)
command = 'addzone {} {}'.format(zone['name'], self.pattern)
try:
self._execute_nsd4(command)

View File

@ -32,7 +32,7 @@ class PDNS4Backend(base.Backend):
__backend_status__ = 'integrated'
def __init__(self, target):
super(PDNS4Backend, self).__init__(target)
super().__init__(target)
self.api_endpoint = self.options.get('api_endpoint')
self.api_token = self.options.get('api_token')
@ -45,7 +45,7 @@ class PDNS4Backend(base.Backend):
def _build_url(self, zone=''):
r_url = urllib.parse.urlparse(self.api_endpoint)
return "%s://%s/api/v1/servers/localhost/zones%s%s" % (
return "{}://{}/api/v1/servers/localhost/zones{}{}".format(
r_url.scheme, r_url.netloc, '/' if zone else '', zone)
def _check_zone_exists(self, zone):

View File

@ -34,7 +34,7 @@ def reset():
@profiler.trace_cls("rpc")
@rpc_logging(LOG, 'central')
class CentralAPI(object):
class CentralAPI:
"""
Client side of the central RPC API.

View File

@ -64,7 +64,7 @@ class Service(service.RPCService):
self._storage = None
self._quota = None
super(Service, self).__init__(
super().__init__(
self.service_name, cfg.CONF['service:central'].topic,
threads=cfg.CONF['service:central'].threads,
)
@ -103,12 +103,12 @@ class Service(service.RPCService):
LOG.warning("Managed Resource Tenant ID is not properly "
"configured")
super(Service, self).start()
super().start()
self.coordination.start()
def stop(self, graceful=True):
self.coordination.stop()
super(Service, self).stop(graceful)
super().stop(graceful)
@property
def worker_api(self):
@ -233,8 +233,10 @@ class Service(service.RPCService):
except Exception:
continue
else:
msg = ('RecordSet belongs in a child zone: %s' %
child_zone['name'])
msg = (
'RecordSet belongs in a child zone: {}'
.format(child_zone['name'])
)
raise exceptions.InvalidRecordSetLocation(msg)
def _is_valid_recordset_records(self, recordset):
@ -1812,7 +1814,7 @@ class Service(service.RPCService):
}
records = self.find_records(elevated_context, criterion)
records = dict([(r['managed_extra'], r) for r in records])
records = {r['managed_extra']: r for r in records}
invalid = []
data = {}
@ -1873,7 +1875,8 @@ class Service(service.RPCService):
def _get_floatingip(self, context, region, floatingip_id, fips):
if (region, floatingip_id) not in fips:
raise exceptions.NotFound(
'FloatingIP %s in %s is not associated for project "%s"' % (
'FloatingIP {} in {} is not associated for project '
'"{}"'.format(
floatingip_id, region, context.project_id
)
)
@ -2023,7 +2026,7 @@ class Service(service.RPCService):
elevated_context, criterion=criterion
)
except exceptions.RecordNotFound:
msg = 'No such FloatingIP %s:%s' % (region, floatingip_id)
msg = f'No such FloatingIP {region}:{floatingip_id}'
raise exceptions.NotFound(msg)
self._delete_or_update_managed_recordset(
@ -2309,10 +2312,10 @@ class Service(service.RPCService):
return updated_pool
# Find the current NS hostnames
existing_ns = set([n.hostname for n in original_pool_ns_records])
existing_ns = {n.hostname for n in original_pool_ns_records}
# Find the desired NS hostnames
request_ns = set([n.hostname for n in pool.ns_records])
request_ns = {n.hostname for n in pool.ns_records}
# Get the NS's to be created and deleted, ignoring the ones that
# are in both sets, as those haven't changed.
@ -2790,7 +2793,7 @@ class Service(service.RPCService):
zone_import.status = 'COMPLETE'
zone_import.zone_id = zone.id
zone_import.message = (
'%(name)s imported' % {'name': zone.name}
f'{zone.name} imported'
)
except exceptions.DuplicateZone:
zone_import.status = 'ERROR'
@ -2842,7 +2845,7 @@ class Service(service.RPCService):
criterion['task_type'] = 'IMPORT'
return self.storage.find_zone_imports(context, criterion, marker,
limit, sort_key, sort_dir)
limit, sort_key, sort_dir)
@rpc.expected_exceptions()
def get_zone_import(self, context, zone_import_id):
@ -2912,7 +2915,7 @@ class Service(service.RPCService):
@rpc.expected_exceptions()
def find_zone_exports(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
limit=None, sort_key=None, sort_dir=None):
if policy.enforce_new_defaults():
target = {constants.RBAC_PROJECT_ID: context.project_id}
@ -2928,7 +2931,7 @@ class Service(service.RPCService):
criterion['task_type'] = 'EXPORT'
return self.storage.find_zone_exports(context, criterion, marker,
limit, sort_key, sort_dir)
limit, sort_key, sort_dir)
@rpc.expected_exceptions()
def get_zone_export(self, context, zone_export_id):

View File

@ -53,7 +53,7 @@ def methods_of(obj):
def get_available_commands():
em = ExtensionManager('designate.manage')
return dict([(e.name, e.plugin) for e in em.extensions])
return {e.name: e.plugin for e in em.extensions}
def add_command_parsers(subparsers):

View File

@ -23,7 +23,7 @@ LOG = logging.getLogger(__name__)
class ZoneLockLocal(threading.local):
def __init__(self):
super(ZoneLockLocal, self).__init__()
super().__init__()
self._held = set()
def hold(self, name):

View File

@ -25,7 +25,7 @@ LOG = logging.getLogger(__name__)
class NotificationThreadLocal(threading.local):
def __init__(self):
super(NotificationThreadLocal, self).__init__()
super().__init__()
self.stack = 0
self.queue = collections.deque()

View File

@ -20,7 +20,7 @@ import designate.exceptions
class ExceptionThreadLocal(threading.local):
def __init__(self):
super(ExceptionThreadLocal, self).__init__()
super().__init__()
self.depth = 0
def reset_depth(self):

View File

@ -30,7 +30,7 @@ if profiler_opts:
profiler_opts.set_defaults(CONF)
class WsgiMiddleware(object):
class WsgiMiddleware:
def __init__(self, application, **kwargs):
self.application = application

View File

@ -46,7 +46,7 @@ class DesignateContext(context.RequestContext):
edit_managed_records=False, hide_counts=False,
client_addr=None, user_auth_plugin=None,
hard_delete=False, delete_shares=False, **kwargs):
super(DesignateContext, self).__init__(**kwargs)
super().__init__(**kwargs)
self.user_auth_plugin = user_auth_plugin
self.service_catalog = service_catalog
@ -68,7 +68,7 @@ class DesignateContext(context.RequestContext):
return self.from_dict(d)
def to_dict(self):
d = super(DesignateContext, self).to_dict()
d = super().to_dict()
# Override the user_identity field to account for TSIG. When a TSIG key
# is used as authentication e.g. via MiniDNS, it will act as a form
@ -232,7 +232,7 @@ class _ContextAuthPlugin(plugin.BaseAuthPlugin):
auth data.
"""
def __init__(self, auth_token, sc):
super(_ContextAuthPlugin, self).__init__()
super().__init__()
self.auth_token = auth_token
self.service_catalog = ksa_service_catalog.ServiceCatalogV2(sc)
@ -257,7 +257,7 @@ class _ContextAuthPlugin(plugin.BaseAuthPlugin):
urlkw[k] = kwargs[k]
endpoint = endpoint_override or self.service_catalog.url_for(**urlkw)
return super(_ContextAuthPlugin, self).get_endpoint_data(
return super().get_endpoint_data(
session, endpoint_override=endpoint,
discover_versions=discover_versions, **kwargs)

View File

@ -35,7 +35,7 @@ def _retry_if_tooz_error(exception):
return isinstance(exception, tooz.coordination.ToozError)
class Coordination(object):
class Coordination:
def __init__(self, name, tg, grouping_enabled=False):
# NOTE(eandersson): Workaround until tooz handles the conversion.
if not isinstance(name, bytes):
@ -128,7 +128,7 @@ class Coordination(object):
)
class Partitioner(object):
class Partitioner:
def __init__(self, coordinator, group_id, my_id, partitions):
self._coordinator = coordinator
self._group_id = group_id

View File

@ -30,7 +30,7 @@ CONF = designate.conf.CONF
LOG = logging.getLogger(__name__)
class DNSMiddleware(object):
class DNSMiddleware:
"""Base DNS Middleware class with some utility methods"""
def __init__(self, application):
self.application = application
@ -69,7 +69,7 @@ class SerializationMiddleware(DNSMiddleware):
"""DNS Middleware to serialize/deserialize DNS Packets"""
def __init__(self, application, tsig_keyring=None):
super(SerializationMiddleware, self).__init__(application)
super().__init__(application)
self.tsig_keyring = tsig_keyring
def __call__(self, request):
@ -148,7 +148,7 @@ class TsigInfoMiddleware(DNSMiddleware):
"""Middleware which looks up the information available for a TsigKey"""
def __init__(self, application, storage):
super(TsigInfoMiddleware, self).__init__(application)
super().__init__(application)
self.storage = storage
def process_request(self, request):

View File

@ -41,7 +41,7 @@ class TsigKeyring(dict):
"""Implements the DNSPython KeyRing API, backed by the Designate DB"""
def __init__(self, storage):
super(TsigKeyring, self).__init__()
super().__init__()
self.storage = storage
def __getitem__(self, key):
@ -63,7 +63,7 @@ class TsigKeyring(dict):
return default
class ZoneLock(object):
class ZoneLock:
"""A Lock across all zones that enforces a rate limit on NOTIFYs"""
def __init__(self, delay):
@ -209,28 +209,28 @@ def do_axfr(zone_name, servers, source=None):
port=srv['port'], source=source
)
raw_zone = dns.zone.from_xfr(xfr, relativize=False)
LOG.debug("AXFR Successful for %s", raw_zone.origin.to_text())
LOG.debug('AXFR Successful for %s', raw_zone.origin.to_text())
return raw_zone
except eventlet.Timeout as t:
if t == to:
LOG.error("AXFR timed out for %(name)s from %(host)s",
LOG.error('AXFR timed out for %(name)s from %(host)s',
log_info)
continue
except dns.exception.FormError:
LOG.error("Zone %(name)s is not present on %(host)s."
"Trying next server.", log_info)
except socket.error:
LOG.error("Connection error when doing AXFR for %(name)s from "
"%(host)s", log_info)
LOG.error('Zone %(name)s is not present on %(host)s.'
'Trying next server.', log_info)
except OSError:
LOG.error('Connection error when doing AXFR for %(name)s from '
'%(host)s', log_info)
except Exception:
LOG.exception("Problem doing AXFR %(name)s from %(host)s. "
"Trying next server.", log_info)
LOG.exception('Problem doing AXFR %(name)s from %(host)s. '
'Trying next server.', log_info)
finally:
to.cancel()
raise exceptions.XFRFailure(
"XFR failed for %(name)s. No servers in %(servers)s was reached." %
{"name": zone_name, "servers": servers}
'XFR failed for %(name)s. No servers in %(servers)s was reached.' %
{'name': zone_name, 'servers': servers}
)

View File

@ -26,7 +26,7 @@ class DesignateException(Exception):
self.errors = kwargs.pop('errors', None)
self.object = kwargs.pop('object', None)
super(DesignateException, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
if args and isinstance(args[0], str):
self.error_message = args[0]
@ -41,13 +41,13 @@ class RelationNotLoaded(DesignateException):
error_type = 'relation_not_loaded'
def __init__(self, *args, **kwargs):
self.relation = kwargs.pop('relation', None)
super(RelationNotLoaded, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.error_message = ("%(relation)s is not loaded on %(object)s" %
{"relation": self.relation, "object": self.object.obj_name()})
self.error_message = ('{relation} is not loaded on {object}'.format(
relation=self.relation, object=self.object.obj_name()
))
def __str__(self):
return self.error_message

View File

@ -89,7 +89,7 @@ def check_explicit_underscore_import(logical_line, filename):
custom_underscore_check.match(logical_line)):
UNDERSCORE_IMPORT_FILES.append(filename)
elif (translated_log.match(logical_line) or
string_translation.match(logical_line)):
string_translation.match(logical_line)):
yield(0, "D703: Found use of _() without explicit import of _!")

View File

@ -40,7 +40,7 @@ class HeartbeatEmitter(plugin.DriverPlugin):
__plugin_type__ = 'heartbeat_emitter'
def __init__(self, service_name, **kwargs):
super(HeartbeatEmitter, self).__init__()
super().__init__()
self._status = 'UP'
self._stats = {}
@ -100,7 +100,7 @@ class RpcEmitter(HeartbeatEmitter):
__plugin_name__ = 'rpc'
def __init__(self, service_name, rpc_api=None, **kwargs):
super(RpcEmitter, self).__init__(service_name, **kwargs)
super().__init__(service_name, **kwargs)
self.rpc_api = rpc_api
def transmit(self, status):

View File

@ -34,7 +34,7 @@ def name(name):
return _decorator
class Commands(object):
class Commands:
def __init__(self):
self.context = context.DesignateContext.get_admin_context(
request_id='designate-manage'

View File

@ -73,7 +73,7 @@ class DatabaseCommands(base.Commands):
stringio_buffer=latest_buffer))
latest = latest_buffer.getvalue().replace('\n', ' ')
latest_buffer.close()
print("Current: %s Latest: %s" % (current, latest))
print(f'Current: {current} Latest: {latest}')
def sync(self, db_url=None, stringio_buffer=sys.stdout):
alembic_command.upgrade(

View File

@ -36,7 +36,7 @@ CONF = cfg.CONF
class PoolCommands(base.Commands):
def __init__(self):
super(PoolCommands, self).__init__()
super().__init__()
self.central_api = None
self.dry_run = False
self.skip_verify_drivers = False
@ -258,7 +258,7 @@ class PoolCommands(base.Commands):
@staticmethod
def _load_config(filename):
with open(filename, 'r') as stream:
with open(filename) as stream:
return yaml.safe_load(stream)
@staticmethod

View File

@ -56,7 +56,7 @@ class TLDCommands(base.Commands):
"""
def __init__(self):
super(TLDCommands, self).__init__()
super().__init__()
def _startup(self):
rpc.init(cfg.CONF)

View File

@ -40,7 +40,7 @@ CONF.import_opt('default_pool_id', 'designate.central',
TSIG_RRSIZE = 10 + 64 + 160 + 1
class RequestHandler(object):
class RequestHandler:
def __init__(self, storage, tg):
self._worker_api = None
@ -72,18 +72,15 @@ class RequestHandler(object):
# It is permissible for a server to send an AXFR response when
# receiving an IXFR request.
if q_rrset.rdtype in (dns.rdatatype.AXFR, dns.rdatatype.IXFR):
for response in self._handle_axfr(request):
yield response
yield from self._handle_axfr(request)
return
else:
for response in self._handle_record_query(request):
yield response
yield from self._handle_record_query(request)
return
elif request.opcode() == dns.opcode.NOTIFY:
for response in self._handle_notify(request):
yield response
yield from self._handle_notify(request)
return
else:

View File

@ -34,7 +34,7 @@ class Service(service.Service):
def __init__(self):
self._storage = None
super(Service, self).__init__(
super().__init__(
self.service_name, threads=cfg.CONF['service:mdns'].threads,
)
self.dns_service = service.DNSService(
@ -45,12 +45,12 @@ class Service(service.Service):
)
def start(self):
super(Service, self).start()
super().start()
self.dns_service.start()
def stop(self, graceful=True):
self.dns_service.stop()
super(Service, self).stop(graceful)
super().stop(graceful)
@property
def storage(self):

View File

@ -21,8 +21,7 @@ from designate.utils import generate_uuid
LOG = logging.getLogger(__name__)
POOL = dict([(generate_uuid(), '192.168.2.%s' % i) for i in
range(0, 254)])
POOL = {generate_uuid(): '192.168.2.%s' % i for i in range(0, 254)}
ALLOCATIONS = {}

View File

@ -34,7 +34,7 @@ class NotificationHandler(ExtensionPlugin):
__plugin_type__ = 'handler'
def __init__(self, *args, **kw):
super(NotificationHandler, self).__init__(*args, **kw)
super().__init__(*args, **kw)
self.central_api = central_rpcapi.CentralAPI()
@abc.abstractmethod

View File

@ -40,7 +40,7 @@ class NovaFixedHandler(BaseAddressHandler):
]
def _get_ip_data(self, addr_dict):
data = super(NovaFixedHandler, self)._get_ip_data(addr_dict)
data = super()._get_ip_data(addr_dict)
data['label'] = addr_dict['label']
return data

View File

@ -61,7 +61,7 @@ class NotificationPlugin(DriverPlugin):
__plugin_ns__ = 'designate.notification.plugin'
def __init__(self):
super(NotificationPlugin, self).__init__()
super().__init__()
@abc.abstractmethod
def emit(self, notification_type, context, result, *args, **kwargs):

View File

@ -30,7 +30,7 @@ class APIv2Adapter(base.DesignateAdapter):
@classmethod
def render_list(cls, list_objects, *args, **kwargs):
r_list = super(APIv2Adapter, cls).render_list(
r_list = super().render_list(
list_objects, *args, **kwargs)
if (cls.MODIFICATIONS['options'].get('links', True) and
@ -49,7 +49,7 @@ class APIv2Adapter(base.DesignateAdapter):
@classmethod
def render_object(cls, obj, *args, **kwargs):
new_obj = super(APIv2Adapter, cls).render_object(obj, *args, **kwargs)
new_obj = super().render_object(obj, *args, **kwargs)
if (cls.MODIFICATIONS['options'].get('links', True) and
'request' in kwargs):
@ -63,7 +63,7 @@ class APIv2Adapter(base.DesignateAdapter):
@classmethod
def parse(cls, values, output_object, *args, **kwargs):
return super(APIv2Adapter, cls).parse(
return super().parse(
cls.ADAPTER_FORMAT, values, output_object, *args, **kwargs)
#####################
@ -81,7 +81,7 @@ class APIv2Adapter(base.DesignateAdapter):
base_uri = cls._get_base_url(request)
path = cls._get_path(request, obj)
return {'self': '%s%s/%s' % (base_uri, path, obj.id)}
return {'self': f'{base_uri}{path}/{obj.id}'}
@classmethod
def _get_path(cls, request, *args):
@ -132,7 +132,7 @@ class APIv2Adapter(base.DesignateAdapter):
base_uri = cls._get_base_url(request)
href = "%s%s?%s" % (
href = '{}{}?{}'.format(
base_uri,
cls._get_path(request),
parse.urlencode(params))

View File

@ -49,7 +49,7 @@ class FloatingIPAPIv2Adapter(base.APIv2Adapter):
@classmethod
def _get_resource_links(cls, obj, request):
return {
'self': '%s%s/%s' % (
'self': '{}{}/{}'.format(
cls._get_base_url(request),
cls._get_path(request),
obj.key

View File

@ -46,6 +46,6 @@ class PoolNsRecordListAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_list(cls, list_objects, *args, **kwargs):
r_list = super(PoolNsRecordListAPIv2Adapter, cls).render_list(
r_list = super().render_list(
list_objects, *args, **kwargs)
return r_list[cls.MODIFICATIONS['options']['collection_name']]

View File

@ -115,7 +115,7 @@ class RecordSetAPIv2Adapter(base.APIv2Adapter):
# Do a single assignment, preserves the object change fields
recordset.records = new_recordset_records
return super(RecordSetAPIv2Adapter, cls).parse_object(
return super().parse_object(
new_recordset, recordset, *args, **kwargs)
@classmethod
@ -126,7 +126,7 @@ class RecordSetAPIv2Adapter(base.APIv2Adapter):
to_insert = ''
if 'zones' not in path and obj is not None:
insert_zones = True
to_insert = 'zones/{0}'.format(obj.zone_id)
to_insert = f'zones/{obj.zone_id}'
item_path = ''
for part in path:
@ -134,7 +134,7 @@ class RecordSetAPIv2Adapter(base.APIv2Adapter):
item_path += '/' + part
return item_path
elif insert_zones and to_insert and part == 'v2':
item_path += '/v2/{0}'.format(to_insert)
item_path += f'/v2/{to_insert}'
insert_zones = False # make sure only insert once if needed
else:
item_path += '/' + part

View File

@ -38,12 +38,15 @@ class ServiceStatusAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, obj, *args, **kwargs):
new_obj = super(ServiceStatusAPIv2Adapter, cls).render_object(
new_obj = super().render_object(
obj, *args, **kwargs
)
new_obj['links']['self'] = (
'%s/v2/%s/%s' % (cls._get_base_url(kwargs['request']),
'service_statuses', new_obj['id'])
'{}/v2/{}/{}'.format(
cls._get_base_url(kwargs['request']),
'service_statuses',
new_obj['id']
)
)
return new_obj

View File

@ -39,17 +39,17 @@ class SharedZoneAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, object, *args, **kwargs):
obj = super(SharedZoneAPIv2Adapter, cls).render_object(
obj = super().render_object(
object, *args, **kwargs)
if obj['zone_id'] is not None:
obj['links']['self'] = (
'%s/v2/zones/%s/shares/%s' % (
'{}/v2/zones/{}/shares/{}'.format(
cls._get_base_url(kwargs['request']), obj['zone_id'],
obj['id']))
obj['links']['zone'] = (
'%s/v2/zones/%s' % (cls._get_base_url(kwargs['request']),
obj['zone_id']))
'{}/v2/zones/{}'.format(cls._get_base_url(kwargs['request']),
obj['zone_id']))
return obj
@ -74,7 +74,7 @@ class SharedZoneListAPIv2Adapter(base.APIv2Adapter):
base_uri = cls._get_base_url(request)
href = "%s%s?%s" % (
href = '{}{}?{}'.format(
base_uri,
request.path,
parse.urlencode(params))

View File

@ -39,7 +39,7 @@ class ValidationErrorAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, error, *args, **kwargs):
# Do the usual rename
error_dict = super(ValidationErrorAPIv2Adapter, cls).render_object(
error_dict = super().render_object(
error, *args, **kwargs)
# Currently JSON Schema doesn't add the path on for required items

View File

@ -76,7 +76,7 @@ class ZoneAPIv2Adapter(base.APIv2Adapter):
*args, **kwargs)
del values['attributes']
return super(ZoneAPIv2Adapter, cls).parse_object(
return super().parse_object(
values, obj, *args, **kwargs
)

View File

@ -46,7 +46,7 @@ class ZoneExportAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, obj, *args, **kwargs):
new_obj = super(ZoneExportAPIv2Adapter, cls).render_object(
new_obj = super().render_object(
obj, *args, **kwargs
)
@ -57,7 +57,9 @@ class ZoneExportAPIv2Adapter(base.APIv2Adapter):
cls._get_path(kwargs['request']))[0]
new_obj['links']['export'] = (
'%s/%s' % (base_uri, new_obj['location'].split('://')[1])
'{}/{}'.format(
base_uri, new_obj['location'].split('://')[1]
)
)
return new_obj

View File

@ -41,13 +41,15 @@ class ZoneImportAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, obj, *args, **kwargs):
new_obj = super(ZoneImportAPIv2Adapter, cls).render_object(
new_obj = super().render_object(
obj, *args, **kwargs)
if new_obj['zone_id'] is not None:
new_obj['links']['zone'] = (
'%s/v2/%s/%s' % (cls._get_base_url(kwargs['request']), 'zones',
new_obj['zone_id'])
'{}/v2/{}/{}'.format(
cls._get_base_url(
kwargs['request']), 'zones', new_obj['zone_id']
)
)
return new_obj

View File

@ -43,12 +43,14 @@ class ZoneTransferAcceptAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, obj, *args, **kwargs):
new_obj = super(ZoneTransferAcceptAPIv2Adapter, cls).render_object(
new_obj = super().render_object(
obj, *args, **kwargs
)
new_obj['links']['zone'] = (
'%s/v2/%s/%s' % (cls._get_base_url(kwargs['request']),
'zones', new_obj['zone_id'])
'{}/v2/{}/{}'.format(
cls._get_base_url(kwargs['request']),
'zones', new_obj['zone_id']
)
)
return new_obj

View File

@ -60,7 +60,7 @@ class ZoneTransferRequestAPIv2Adapter(base.APIv2Adapter):
@classmethod
def render_object(cls, obj, *args, **kwargs):
new_obj = super(ZoneTransferRequestAPIv2Adapter, cls).render_object(
new_obj = super().render_object(
obj, *args, **kwargs
)
try:

View File

@ -29,7 +29,7 @@ class DesignateObjectAdapterMetaclass(type):
cls._adapter_classes = {}
return
key = '%s:%s' % (cls.adapter_format(), cls.adapter_object())
key = f'{cls.adapter_format()}:{cls.adapter_object()}'
if key not in cls._adapter_classes:
cls._adapter_classes[key] = cls
else:
@ -43,7 +43,7 @@ class DesignateObjectAdapterMetaclass(type):
)
class DesignateAdapter(object, metaclass=DesignateObjectAdapterMetaclass):
class DesignateAdapter(metaclass=DesignateObjectAdapterMetaclass):
"""docstring for DesignateObjectAdapter"""
ADAPTER_FORMAT = None
ADAPTER_OBJECT = objects.DesignateObject
@ -62,9 +62,9 @@ class DesignateAdapter(object, metaclass=DesignateObjectAdapterMetaclass):
if obj_format is None:
obj_format = cls.ADAPTER_FORMAT
if isinstance(obj, objects.DesignateObject):
key = '%s:%s' % (obj_format, obj.obj_name())
key = f'{obj_format}:{obj.obj_name()}'
else:
key = '%s:%s' % (obj_format, obj)
key = f'{obj_format}:{obj}'
try:
return cls._adapter_classes[key]
except KeyError as e:
@ -283,8 +283,8 @@ class DesignateAdapter(object, metaclass=DesignateObjectAdapterMetaclass):
if error_keys:
raise exceptions.InvalidObject(
'Provided object does not match schema. Keys {0} are not '
'valid for {1}'.format(
'Provided object does not match schema. Keys {} are not '
'valid for {}'.format(
error_keys, cls.MODIFICATIONS['options']['resource_name']
)
)

View File

@ -28,7 +28,7 @@ class YAMLAdapter(base.DesignateAdapter):
@classmethod
def parse(cls, values, output_object, *args, **kwargs):
obj = super(YAMLAdapter, cls).parse(
obj = super().parse(
cls.ADAPTER_FORMAT, values, output_object, *args, **kwargs)
return obj

View File

@ -28,7 +28,7 @@ LOG = logging.getLogger(__name__)
def _get_attrname(name):
return "_obj_{}".format(name)
return f'_obj_{name}'
def get_dict_attr(klass, attr):
@ -49,7 +49,7 @@ class DesignateObject(base.VersionedObject):
if name not in self.fields:
raise TypeError("__init__() got an unexpected keyword "
"argument '%(name)s'" % {'name': name})
super(DesignateObject, self).__init__(self, *args, **kwargs)
super().__init__(self, *args, **kwargs)
self._obj_original_values = dict()
self.FIELDS = self.fields
@ -57,7 +57,7 @@ class DesignateObject(base.VersionedObject):
def _make_obj_str(cls, data):
msg = "<%s" % cls.obj_name()
for key in cls.STRING_KEYS:
msg += " %s:'%s'" % (key, data.get(key))
msg += f" {key}:'{data.get(key)}'"
msg += ">"
return msg
@ -141,7 +141,7 @@ class DesignateObject(base.VersionedObject):
'type': self.obj_name(),
'name': name,
})
super(DesignateObject, self).__setattr__(name, value)
super().__setattr__(name, value)
def __eq__(self, other):
if self.__class__ != other.__class__:
@ -285,7 +285,7 @@ class DesignateObject(base.VersionedObject):
field.coerce(self, name, value) # Check value
except Exception:
raise exceptions.InvalidObject(
"{} is invalid".format(name))
f'{name} is invalid')
elif not field.nullable:
# Check required is True ~ nullable is False
errors = ValidationErrorList()
@ -296,8 +296,8 @@ class DesignateObject(base.VersionedObject):
e.message = "'%s' is a required property" % name
errors.append(e)
raise exceptions.InvalidObject(
"Provided object does not match "
"schema", errors=errors, object=self)
'Provided object does not match '
'schema', errors=errors, object=self)
def obj_attr_is_set(self, name):
"""
@ -445,7 +445,7 @@ class AttributeListObjectMixin(ListObjectMixin):
return default
class PersistentObjectMixin(object):
class PersistentObjectMixin:
"""
Mixin class for Persistent objects.
@ -459,7 +459,7 @@ class PersistentObjectMixin(object):
}
class SoftDeleteObjectMixin(object):
class SoftDeleteObjectMixin:
"""
Mixin class for Soft-Deleted objects.
@ -471,7 +471,7 @@ class SoftDeleteObjectMixin(object):
}
class PagedListObjectMixin(object):
class PagedListObjectMixin:
"""
Mixin class for List objects.
@ -517,8 +517,8 @@ class DesignateRegistry(base.VersionedObjectRegistry):
LOG.exception(
'Error setting %{obj_name}s.%{field_name}s',
{
"obj_name": self.obj_name(),
"field_name": name
'obj_name': self.obj_name(),
'field_name': name
})
setattr(cls, name, property(getter, setter, attr.fdel))

View File

@ -13,7 +13,6 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import dns.exception
from dns import ipv4
import re
@ -36,7 +35,7 @@ class PolymorphicObject(ovoo_fields.Object):
def coerce(self, obj, attr, value):
if hasattr(value, '__bases__'):
check_value = value.__bases__[0]
super(PolymorphicObject, self).coerce(obj, attr, check_value)
super().coerce(obj, attr, check_value)
return value
@ -44,7 +43,7 @@ class PolymorphicObjectField(ovoo_fields.AutoTypedField):
def __init__(self, objtype, subclasses=False, **kwargs):
self.AUTO_TYPE = PolymorphicObject(objtype, subclasses)
self.objname = objtype
super(PolymorphicObjectField, self).__init__(**kwargs)
super().__init__(**kwargs)
class ListOfObjectsField(ovoo_fields.ListOfObjectsField):
@ -56,21 +55,21 @@ class ObjectFields(ovoo_fields.ObjectField):
self.AUTO_TYPE = ovoo_fields.List(
ovoo_fields.Object(objtype, subclasses))
self.objname = objtype
super(ObjectFields, self).__init__(objtype, **kwargs)
super().__init__(objtype, **kwargs)
self.relation = relation
class IntegerFields(IntegerField):
def __init__(self, nullable=False, default=ovoo_fields.UnspecifiedDefault,
read_only=False, minimum=0, maximum=None):
super(IntegerFields, self).__init__(nullable=nullable,
default=default,
read_only=read_only)
super().__init__(
nullable=nullable, default=default, read_only=read_only
)
self.min = minimum
self.max = maximum
def coerce(self, obj, attr, value):
value = super(IntegerFields, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value is None:
return value
if value < self.min:
@ -87,9 +86,9 @@ class StringFields(ovoo_fields.StringField):
def __init__(self, nullable=False, read_only=False,
default=ovoo_fields.UnspecifiedDefault, description='',
maxLength=None):
super(StringFields, self).__init__(nullable=nullable, default=default,
read_only=read_only)
super().__init__(
nullable=nullable, default=default, read_only=read_only
)
self.description = description
self.maxLength = maxLength
@ -97,7 +96,7 @@ class StringFields(ovoo_fields.StringField):
if value is None:
return self._null(obj, attr)
else:
value = super(StringFields, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if self.maxLength and len(value) > self.maxLength:
raise ValueError('Value too long for %s' % attr)
return value
@ -119,7 +118,7 @@ class UUIDFields(ovoo_fields.AutoTypedField):
class DateTimeField(ovoo_fields.DateTimeField):
def __init__(self, tzinfo_aware=False, **kwargs):
super(DateTimeField, self).__init__(tzinfo_aware, **kwargs)
super().__init__(tzinfo_aware, **kwargs)
class ObjectField(ovoo_fields.ObjectField):
@ -134,7 +133,7 @@ class IPV4AddressField(ovoo_fields.IPV4AddressField):
ipv4.inet_aton(str(value))
except dns.exception.SyntaxError:
raise ValueError()
value = super(IPV4AddressField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
# we use this field as a string, not need a netaddr.IPAdress
# as oslo.versionedobjects is using
return str(value)
@ -143,7 +142,7 @@ class IPV4AddressField(ovoo_fields.IPV4AddressField):
class IPV6AddressField(ovoo_fields.IPV6AddressField):
def coerce(self, obj, attr, value):
value = super(IPV6AddressField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
# we use this field as a string, not need a netaddr.IPAdress
# as oslo.versionedobjects is using
return str(value)
@ -152,7 +151,7 @@ class IPV6AddressField(ovoo_fields.IPV6AddressField):
class IPV4AndV6AddressField(ovoo_fields.IPV4AndV6AddressField):
def coerce(self, obj, attr, value):
value = super(IPV4AndV6AddressField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
# we use this field as a string, not need a netaddr.IPAdress
# as oslo.versionedobjects is using
return str(value)
@ -169,15 +168,15 @@ class Enum(ovoo_fields.Enum):
class EnumField(ovoo_fields.BaseEnumField):
def __init__(self, valid_values, **kwargs):
self.AUTO_TYPE = Enum(valid_values=valid_values)
super(EnumField, self).__init__(**kwargs)
super().__init__(**kwargs)
class DomainField(StringFields):
def __init__(self, **kwargs):
super(DomainField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(DomainField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value is None:
return
domain = value.split('.')
@ -193,10 +192,10 @@ class DomainField(StringFields):
class EmailField(StringFields):
def __init__(self, **kwargs):
super(EmailField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(EmailField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value.count('@') != 1:
raise ValueError("%s is not an email" % value)
email = value.replace('@', '.')
@ -207,10 +206,10 @@ class EmailField(StringFields):
class HostField(StringFields):
def __init__(self, **kwargs):
super(HostField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(HostField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value is None:
return
hostname = value.split('.')
@ -226,10 +225,10 @@ class HostField(StringFields):
class SRVField(StringFields):
def __init__(self, **kwargs):
super(SRVField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(SRVField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value is None:
return
srvtype = value.split('.')
@ -245,10 +244,10 @@ class SRVField(StringFields):
class TxtField(StringFields):
def __init__(self, **kwargs):
super(TxtField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(TxtField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value.endswith('\\'):
raise ValueError("Do NOT put '\\' into end of TXT record")
return value
@ -256,10 +255,10 @@ class TxtField(StringFields):
class Sshfp(StringFields):
def __init__(self, **kwargs):
super(Sshfp, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(Sshfp, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if not constants.RE_SSHFP_FINGERPRINT.match("%s" % value):
raise ValueError("Host name %s is not a SSHFP record" % value)
return value
@ -267,10 +266,10 @@ class Sshfp(StringFields):
class TldField(StringFields):
def __init__(self, **kwargs):
super(TldField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(TldField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if not constants.RE_TLDNAME.match(value):
raise ValueError("%s is not a TLD" % value)
return value
@ -278,10 +277,10 @@ class TldField(StringFields):
class NaptrFlagsField(StringFields):
def __init__(self, **kwargs):
super(NaptrFlagsField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(NaptrFlagsField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if (len(value) > 255):
raise ValueError("NAPTR record flags field cannot be longer than"
" 255 characters" % value)
@ -292,10 +291,10 @@ class NaptrFlagsField(StringFields):
class NaptrServiceField(StringFields):
def __init__(self, **kwargs):
super(NaptrServiceField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(NaptrServiceField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if (len(value) > 255):
raise ValueError("NAPTR record service field cannot be longer than"
" 255 characters" % value)
@ -306,10 +305,10 @@ class NaptrServiceField(StringFields):
class NaptrRegexpField(StringFields):
def __init__(self, **kwargs):
super(NaptrRegexpField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(NaptrRegexpField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if (len(value) > 255):
raise ValueError("NAPTR record regexp field cannot be longer than"
" 255 characters" % value)
@ -321,10 +320,10 @@ class NaptrRegexpField(StringFields):
class CaaPropertyField(StringFields):
def __init__(self, **kwargs):
super(CaaPropertyField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(CaaPropertyField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
prpt = value.split(' ', 1)
tag = prpt[0]
val = prpt[1]
@ -373,10 +372,10 @@ class CaaPropertyField(StringFields):
class CertTypeField(StringFields):
def __init__(self, **kwargs):
super(CertTypeField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(CertTypeField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if not constants.RE_CERT_TYPE.match("%s" % value):
raise ValueError("Cert type %s is not a valid Mnemonic or "
"value" % value)
@ -385,10 +384,10 @@ class CertTypeField(StringFields):
class CertAlgoField(StringFields):
def __init__(self, **kwargs):
super(CertAlgoField, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(CertAlgoField, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if not constants.RE_CERT_ALGO.match("%s" % value):
raise ValueError("Cert Algo %s is not a valid Mnemonic or "
"value" % value)
@ -421,12 +420,13 @@ class BaseObjectField(ovoo_fields.AutoTypedField):
class IPOrHost(IPV4AndV6AddressField):
def __init__(self, nullable=False, read_only=False,
default=ovoo_fields.UnspecifiedDefault):
super(IPOrHost, self).__init__(nullable=nullable,
default=default, read_only=read_only)
super().__init__(
nullable=nullable, default=default, read_only=read_only
)
def coerce(self, obj, attr, value):
try:
value = super(IPOrHost, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
except ValueError:
if not constants.RE_ZONENAME.match(value):
raise ValueError("%s is not IP address or host name" % value)
@ -435,10 +435,10 @@ class IPOrHost(IPV4AndV6AddressField):
class DenylistFields(StringFields):
def __init__(self, **kwargs):
super(DenylistFields, self).__init__(**kwargs)
super().__init__(**kwargs)
def coerce(self, obj, attr, value):
value = super(DenylistFields, self).coerce(obj, attr, value)
value = super().coerce(obj, attr, value)
if value is None:
return self._null(obj, attr)

View File

@ -40,7 +40,7 @@ class FloatingIP(base.DictObjectMixin, base.PersistentObjectMixin,
@property
def key(self):
return '%s:%s' % (self.region, self.id)
return f'{self.region}:{self.id}'
@base.DesignateRegistry.register

View File

@ -20,7 +20,7 @@ from designate.objects import fields
class Record(base.DesignateObject, base.PersistentObjectMixin,
base.DictObjectMixin):
def __init__(self, *args, **kwargs):
super(Record, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
'shard': fields.IntegerFields(nullable=True, minimum=0, maximum=4095),

View File

@ -36,7 +36,7 @@ cfg.CONF.import_opt('supported_record_type', 'designate')
class RecordSet(base.DesignateObject, base.DictObjectMixin,
base.PersistentObjectMixin):
def __init__(self, *args, **kwargs):
super(RecordSet, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
@property
def action(self):
@ -190,8 +190,10 @@ class RecordSet(base.DesignateObject, base.DictObjectMixin,
error_indexes.append(i)
except Exception as e:
error_message = ('Provided object is not valid. Got a %s error'
' with message %s' % (type(e).__name__, str(e)))
error_message = (
'Provided object is not valid. Got a %s error with '
'message %s' % (type(e).__name__, str(e))
)
raise exceptions.InvalidObject(error_message)
else:
@ -202,7 +204,7 @@ class RecordSet(base.DesignateObject, base.DictObjectMixin,
try:
# Run the actual validate code
super(RecordSet, self).validate()
super().validate()
except exceptions.InvalidObject as e:
raise e

View File

@ -84,7 +84,7 @@ class TXT(Record):
for record_string in record_strings:
# add back the delimiting quotes after
# strip and split for each string
record_string = '"{}"'.format(record_string)
record_string = f'"{record_string}"'
# further validate each string individually
self._validate_record_single_string(value=record_string)
else:

View File

@ -20,7 +20,7 @@ class ServiceStatus(base.DesignateObject, base.DictObjectMixin,
base.PersistentObjectMixin):
def __init__(self, *args, **kwargs):
super(ServiceStatus, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
"service_name": fields.StringFields(),

View File

@ -19,7 +19,7 @@ from designate.objects import fields
@base.DesignateRegistry.register
class Tenant(base.DesignateObject, base.DictObjectMixin):
def __init__(self, *args, **kwargs):
super(Tenant, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
'id': fields.AnyField(nullable=True),

View File

@ -20,7 +20,7 @@ from designate.objects import fields
class Tld(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
def __init__(self, *args, **kwargs):
super(Tld, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
'name': fields.TldField(maxLength=255),

View File

@ -20,11 +20,12 @@ from designate.objects import fields
class TsigKey(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
def __init__(self, *args, **kwargs):
super(TsigKey, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
'name': fields.StringFields(nullable=False, maxLength=160),
'algorithm': fields.EnumField(nullable=False,
'algorithm': fields.EnumField(
nullable=False,
valid_values=[
'hmac-md5',
'hmac-sha1',
@ -35,8 +36,8 @@ class TsigKey(base.DictObjectMixin, base.PersistentObjectMixin,
]
),
'secret': fields.StringFields(maxLength=160),
'scope': fields.EnumField(nullable=False,
valid_values=['POOL', 'ZONE']
'scope': fields.EnumField(
nullable=False, valid_values=['POOL', 'ZONE']
),
'resource_id': fields.UUIDFields(nullable=False)
}

View File

@ -24,7 +24,7 @@ from designate import utils
class Zone(base.DesignateObject, base.DictObjectMixin,
base.PersistentObjectMixin, base.SoftDeleteObjectMixin):
def __init__(self, *args, **kwargs):
super(Zone, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
'shard': fields.IntegerFields(nullable=True, minimum=0, maximum=4095),
@ -133,7 +133,7 @@ class Zone(base.DesignateObject, base.DictObjectMixin,
errors.append(e)
self._raise(errors)
super(Zone, self).validate()
super().validate()
except exceptions.RelationNotLoaded as ex:
errors = ValidationErrorList()
e = ValidationError()

View File

@ -19,14 +19,15 @@ from designate.objects import fields
@base.DesignateRegistry.register
class ZoneExport(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
base.DesignateObject):
fields = {
'status': fields.EnumField(nullable=True,
valid_values=["ACTIVE", "PENDING",
"DELETED", "ERROR", "COMPLETE"]
'status': fields.EnumField(
nullable=True, valid_values=[
'ACTIVE', 'PENDING', 'DELETED', 'ERROR', 'COMPLETE'
]
),
'task_type': fields.EnumField(nullable=True,
valid_values=["EXPORT"]
'task_type': fields.EnumField(
nullable=True, valid_values=['EXPORT']
),
'tenant_id': fields.StringFields(nullable=True),
'location': fields.StringFields(nullable=True),
@ -37,7 +38,7 @@ class ZoneExport(base.DictObjectMixin, base.PersistentObjectMixin,
@base.DesignateRegistry.register
class ZoneExportList(base.ListObjectMixin, base.DesignateObject,
base.PagedListObjectMixin):
base.PagedListObjectMixin):
LIST_ITEM_TYPE = ZoneExport
fields = {

View File

@ -19,14 +19,15 @@ from designate.objects import fields
@base.DesignateRegistry.register
class ZoneImport(base.DictObjectMixin, base.PersistentObjectMixin,
base.DesignateObject):
base.DesignateObject):
fields = {
'status': fields.EnumField(nullable=True,
valid_values=["ACTIVE", "PENDING",
"DELETED", "ERROR", "COMPLETE"]
'status': fields.EnumField(
nullable=True, valid_values=[
'ACTIVE', 'PENDING', 'DELETED', 'ERROR', 'COMPLETE'
]
),
'task_type': fields.EnumField(nullable=True,
valid_values=["IMPORT"]
'task_type': fields.EnumField(
nullable=True, valid_values=['IMPORT']
),
'tenant_id': fields.StringFields(nullable=True),
'message': fields.StringFields(nullable=True, maxLength=160),
@ -36,7 +37,7 @@ class ZoneImport(base.DictObjectMixin, base.PersistentObjectMixin,
@base.DesignateRegistry.register
class ZoneImportList(base.ListObjectMixin, base.DesignateObject,
base.PagedListObjectMixin):
base.PagedListObjectMixin):
LIST_ITEM_TYPE = ZoneImport
fields = {

View File

@ -26,7 +26,7 @@ class ZoneMaster(base.DesignateObject,
base.PersistentObjectMixin,
base.SoftDeleteObjectMixin):
def __init__(self, *args, **kwargs):
super(ZoneMaster, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
fields = {
'zone_id': fields.UUIDFields(nullable=True),
@ -35,12 +35,12 @@ class ZoneMaster(base.DesignateObject,
}
def to_data(self):
return "{}:{}".format(self.host, self.port)
return f'{self.host}:{self.port}'
@classmethod
def from_data(cls, data):
host, port = utils.split_host_port(data)
dict_data = {"host": host, "port": port}
dict_data = {'host': host, 'port': port}
return cls(**dict_data)

View File

@ -24,7 +24,7 @@ class ZoneTransferAccept(base.DictObjectMixin, base.PersistentObjectMixin,
'zone_transfer_request_id': fields.UUIDFields(nullable=True),
'tenant_id': fields.StringFields(nullable=True),
'status': fields.EnumField(nullable=True, valid_values=[
"ACTIVE", "PENDING", "DELETED", "ERROR", "COMPLETE"
'ACTIVE', 'PENDING', 'DELETED', 'ERROR', 'COMPLETE'
]),
'key': fields.StringFields(maxLength=160),
'zone_id': fields.UUIDFields(nullable=True),

View File

@ -27,7 +27,8 @@ class ZoneTransferRequest(base.DictObjectMixin, base.PersistentObjectMixin,
'tenant_id': fields.StringFields(nullable=True),
'target_tenant_id': fields.StringFields(nullable=True),
'status': fields.EnumField(nullable=True, valid_values=[
"ACTIVE", "PENDING", "DELETED", "ERROR", "COMPLETE"]),
'ACTIVE', 'PENDING', 'DELETED', 'ERROR', 'COMPLETE'
]),
'zone_name': fields.StringFields(nullable=True, maxLength=255),
}

View File

@ -24,7 +24,7 @@ LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class Plugin(object, metaclass=abc.ABCMeta):
class Plugin(metaclass=abc.ABCMeta):
__plugin_ns__ = None
__plugin_name__ = None
@ -32,7 +32,7 @@ class Plugin(object, metaclass=abc.ABCMeta):
def __init__(self):
self.name = self.get_canonical_name()
LOG.debug("Loaded plugin %s", self.name)
LOG.debug('Loaded plugin %s', self.name)
@classmethod
def get_canonical_name(cls):
@ -41,7 +41,7 @@ class Plugin(object, metaclass=abc.ABCMeta):
"""
type_ = cls.get_plugin_type()
name = cls.get_plugin_name()
return "%s:%s" % (type_, name)
return f'{type_}:{name}'
@classmethod
def get_plugin_name(cls):

View File

@ -41,7 +41,7 @@ class Service(service.RPCService):
self._storage = None
self._quota = None
super(Service, self).__init__(
super().__init__(
self.service_name, cfg.CONF['service:producer'].topic,
threads=cfg.CONF['service:producer'].threads,
)
@ -59,7 +59,7 @@ class Service(service.RPCService):
return rpcapi.CentralAPI.get_instance()
def start(self):
super(Service, self).start()
super().start()
self.coordination.start()
self._partitioner = coordination.Partitioner(
@ -91,7 +91,7 @@ class Service(service.RPCService):
self.tg.add_timer(interval, task)
def stop(self, graceful=True):
super(Service, self).stop(graceful)
super().stop(graceful)
self.coordination.stop()
def _rebalance(self, my_partitions, members, event):

View File

@ -36,7 +36,7 @@ class PeriodicTask(plugin.ExtensionPlugin):
__plugin_type__ = 'producer_task'
def __init__(self):
super(PeriodicTask, self).__init__()
super().__init__()
self.my_partitions = None
@property
@ -73,8 +73,7 @@ class PeriodicTask(plugin.ExtensionPlugin):
else:
kwargs["marker"] = items[-1].id
for i in items:
yield i
yield from items
def _iter_zones(self, ctxt, criterion=None):
criterion = criterion or {}
@ -90,7 +89,7 @@ class DeletedZonePurgeTask(PeriodicTask):
__plugin_name__ = 'zone_purge'
def __init__(self):
super(DeletedZonePurgeTask, self).__init__()
super().__init__()
def __call__(self):
"""Call the Central API to perform a purge of deleted zones based on
@ -126,7 +125,7 @@ class PeriodicExistsTask(PeriodicTask):
__plugin_name__ = 'periodic_exists'
def __init__(self):
super(PeriodicExistsTask, self).__init__()
super().__init__()
self.notifier = rpc.get_notifier('producer')
@staticmethod
@ -219,7 +218,7 @@ class PeriodicGenerateDelayedNotifyTask(PeriodicTask):
__plugin_name__ = 'delayed_notify'
def __init__(self):
super(PeriodicGenerateDelayedNotifyTask, self).__init__()
super().__init__()
def __call__(self):
"""Fetch a list of zones with the delayed_notify flag set up to
@ -271,7 +270,7 @@ class PeriodicIncrementSerialTask(PeriodicTask):
__plugin_name__ = 'increment_serial'
def __init__(self):
super(PeriodicIncrementSerialTask, self).__init__()
super().__init__()
def __call__(self):
ctxt = context.DesignateContext.get_admin_context()

View File

@ -38,7 +38,7 @@ class Quota(DriverPlugin, metaclass=abc.ABCMeta):
resources_exceeded.append(resource)
else:
raise exceptions.QuotaResourceUnknown(
"'%s' is not a valid quota resource." % resource
f"'{resource}' is not a valid quota resource."
)
if resources_exceeded:

View File

@ -29,7 +29,7 @@ class StorageQuota(base.Quota):
__plugin_name__ = 'storage'
def __init__(self):
super(StorageQuota, self).__init__()
super().__init__()
self.storage = storage.get_storage()
def _get_quotas(self, context, tenant_id):
@ -37,7 +37,7 @@ class StorageQuota(base.Quota):
'tenant_id': tenant_id,
})
return dict((q['resource'], q['hard_limit']) for q in quotas)
return {q['resource']: q['hard_limit'] for q in quotas}
def get_quota(self, context, tenant_id, resource):
context = context.deepcopy()
@ -56,8 +56,9 @@ class StorageQuota(base.Quota):
context.all_tenants = True
if resource not in list(self.get_default_quotas(context).keys()):
raise exceptions.QuotaResourceUnknown("%s is not a valid quota "
"resource" % resource)
raise exceptions.QuotaResourceUnknown(
f'{resource} is not a valid quota resource'
)
try:
self._create_quota(context, tenant_id, resource, hard_limit)

View File

@ -225,7 +225,7 @@ def get_notifier(service=None, host=None, publisher_id=None):
if NOTIFIER is None:
raise AssertionError("'NOTIFIER' must not be None")
if not publisher_id:
publisher_id = "%s.%s" % (service, host or CONF.host)
publisher_id = f'{service}.{host or CONF.host}'
return NOTIFIER.prepare(publisher_id=publisher_id)

View File

@ -21,7 +21,7 @@ from designate import exceptions
LOG = logging.getLogger(__name__)
class Scheduler(object):
class Scheduler:
"""Scheduler that schedules zones based on the filters provided on the zone
and other inputs.

View File

@ -22,7 +22,7 @@ from designate import utils
LOG = logging.getLogger(__name__)
class Schema(object):
class Schema:
def __init__(self, version, name):
self.raw_schema = utils.load_schema(version, name)
self.validator = jsonschema.Draft4Validator(self.raw_schema)

View File

@ -46,7 +46,7 @@ LOG = logging.getLogger(__name__)
class Service(service.Service):
def __init__(self, name, threads=None):
threads = threads or 1000
super(Service, self).__init__(threads)
super().__init__(threads)
self.name = name
self.host = CONF.host
@ -64,16 +64,16 @@ class Service(service.Service):
'name': self.name,
'version': version.version_info.version_string()
})
super(Service, self).start()
super().start()
def stop(self, graceful=True):
LOG.info('Stopping %(name)s service', {'name': self.name})
super(Service, self).stop(graceful)
super().stop(graceful)
class RPCService(Service):
def __init__(self, name, rpc_topic, threads=None):
super(RPCService, self).__init__(name, threads)
super().__init__(name, threads)
LOG.debug("Creating RPC Server on topic '%s' for %s",
rpc_topic, self.name)
@ -84,7 +84,7 @@ class RPCService(Service):
self.rpc_topic = rpc_topic
def start(self):
super(RPCService, self).start()
super().start()
target = messaging.Target(topic=self.rpc_topic, server=self.host)
self.rpc_server = rpc.get_server(target, self.endpoints)
self.rpc_server.start()
@ -93,15 +93,15 @@ class RPCService(Service):
def stop(self, graceful=True):
if self.rpc_server:
self.rpc_server.stop()
super(RPCService, self).stop(graceful)
super().stop(graceful)
def wait(self):
super(RPCService, self).wait()
super().wait()
class WSGIService(Service):
def __init__(self, app, name, listen, max_url_len=None):
super(WSGIService, self).__init__(name)
super().__init__(name)
self.app = app
self.name = name
@ -125,20 +125,20 @@ class WSGIService(Service):
def start(self):
for server in self.servers:
server.start()
super(WSGIService, self).start()
super().start()
def stop(self, graceful=True):
for server in self.servers:
server.stop()
super(WSGIService, self).stop(graceful)
super().stop(graceful)
def wait(self):
for server in self.servers:
server.wait()
super(WSGIService, self).wait()
super().wait()
class DNSService(object):
class DNSService:
_TCP_RECV_MAX_SIZE = 65535
def __init__(self, app, tg, listen, tcp_backlog, tcp_recv_timeout):
@ -227,7 +227,7 @@ class DNSService(object):
# ensure no exceptions are generated from within.
except socket.timeout:
pass
except socket.error as e:
except OSError as e:
if client:
client.close()
errname = errno.errorcode[e.args[0]]
@ -314,7 +314,7 @@ class DNSService(object):
'port': port
}
)
except socket.error as e:
except OSError as e:
errname = errno.errorcode[e.args[0]]
LOG.warning(
'Socket error %(err)s from: %(host)s:%(port)d',
@ -374,7 +374,7 @@ class DNSService(object):
payload)
except socket.timeout:
pass
except socket.error as e:
except OSError as e:
errname = errno.errorcode[e.args[0]]
addr = addr or (None, 0)
LOG.warning(

View File

@ -28,7 +28,7 @@ LOG = logging.getLogger(__name__)
class Service(service.Service):
def __init__(self):
super(Service, self).__init__(
super().__init__(
self.service_name, threads=cfg.CONF['service:sink'].threads
)
@ -65,7 +65,7 @@ class Service(service.Service):
return subscriptions
def start(self):
super(Service, self).start()
super().start()
# Setup notification subscriptions and start consuming
targets = self._get_targets()
@ -90,7 +90,7 @@ class Service(service.Service):
'Unable to gracefully stop the notification listener: %s', e
)
super(Service, self).stop(graceful)
super().stop(graceful)
def _get_targets(self):
"""

View File

@ -36,7 +36,7 @@ class SQLAlchemyStorage(base.SQLAlchemy):
__plugin_name__ = 'sqlalchemy'
def __init__(self):
super(SQLAlchemyStorage, self).__init__()
super().__init__()
def get_inspector(self):
return sql.get_inspector()
@ -499,11 +499,11 @@ class SQLAlchemyStorage(base.SQLAlchemy):
if zone.obj_attr_is_set('attributes'):
# Gather the Attribute ID's we have
have = set([r.id for r in self._find_zone_attributes(
context, {'zone_id': zone.id})])
have = {r.id for r in self._find_zone_attributes(
context, {'zone_id': zone.id})}
# Prep some lists of changes
keep = set([])
keep = set()
create = []
update = []
@ -539,11 +539,11 @@ class SQLAlchemyStorage(base.SQLAlchemy):
if zone.obj_attr_is_set('masters'):
# Gather the Attribute ID's we have
have = set([r.id for r in self._find_zone_masters(
context, {'zone_id': zone.id})])
have = {r.id for r in self._find_zone_masters(
context, {'zone_id': zone.id})}
# Prep some lists of changes
keep = set([])
keep = set()
create = []
update = []
@ -597,7 +597,7 @@ class SQLAlchemyStorage(base.SQLAlchemy):
if zone.type == 'SECONDARY':
# Purge anything that shouldn't be there :P
for i in set([i.id for i in existing.values()]) - keep:
for i in {i.id for i in existing.values()} - keep:
self.delete_recordset(context, i)
if tenant_id_changed:
@ -1075,11 +1075,11 @@ class SQLAlchemyStorage(base.SQLAlchemy):
if recordset.obj_attr_is_set('records'):
# Gather the Record ID's we have
have_records = set([r.id for r in self._find_records(
context, {'recordset_id': recordset.id})])
have_records = {r.id for r in self._find_records(
context, {'recordset_id': recordset.id})}
# Prep some lists of changes
keep_records = set([])
keep_records = set()
create_records = []
update_records = []
@ -1174,8 +1174,8 @@ class SQLAlchemyStorage(base.SQLAlchemy):
Calculates the hash of the record, used to ensure record uniqueness.
"""
md5sum = md5(usedforsecurity=False)
md5sum.update(('%s:%s' % (record.recordset_id,
record.data)).encode('utf-8'))
md5sum.update(('{}:{}'.format(record.recordset_id,
record.data)).encode('utf-8'))
return md5sum.hexdigest()
@ -1652,7 +1652,7 @@ class SQLAlchemyStorage(base.SQLAlchemy):
context, {'id': pool_nameserver_id}, one=True)
def find_pool_nameservers(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
limit=None, sort_key=None, sort_dir=None):
return self._find_pool_nameservers(context, criterion, marker=marker,
limit=limit, sort_key=sort_key,
sort_dir=sort_dir)
@ -1752,7 +1752,7 @@ class SQLAlchemyStorage(base.SQLAlchemy):
have_items.add(r.id)
# Prep some lists of changes
keep_items = set([])
keep_items = set()
create_items = []
update_items = []
@ -1809,7 +1809,7 @@ class SQLAlchemyStorage(base.SQLAlchemy):
have_items.add(r.id)
# Prep some lists of changes
keep_items = set([])
keep_items = set()
create_items = []
update_items = []
@ -1961,8 +1961,8 @@ class SQLAlchemyStorage(base.SQLAlchemy):
def find_pool_also_notifies(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
return self._find_pool_also_notifies(context, criterion, marker=marker,
limit=limit, sort_key=sort_key,
sort_dir=sort_dir)
limit=limit, sort_key=sort_key,
sort_dir=sort_dir)
def find_pool_also_notify(self, context, criterion):
return self._find_pool_also_notifies(context, criterion, one=True)
@ -2085,10 +2085,12 @@ class SQLAlchemyStorage(base.SQLAlchemy):
def count_zone_transfer_accept(self, context, criterion=None):
query = select(func.count(tables.zone_transfer_accepts.c.id))
query = self._apply_criterion(tables.zone_transfer_accepts,
query, criterion)
query = self._apply_deleted_criteria(context,
tables.zone_transfer_accepts, query)
query = self._apply_criterion(
tables.zone_transfer_accepts, query, criterion
)
query = self._apply_deleted_criteria(
context, tables.zone_transfer_accepts, query
)
with sql.get_read_session() as session:
resultproxy = session.execute(query)
@ -2202,7 +2204,7 @@ class SQLAlchemyStorage(base.SQLAlchemy):
one=True)
def find_zone_imports(self, context, criterion=None, marker=None,
limit=None, sort_key=None, sort_dir=None):
limit=None, sort_key=None, sort_dir=None):
"""
Find Zone Imports

View File

@ -43,6 +43,6 @@ def upgrade() -> None:
sa.Column('target_project_id', sa.String(36), nullable=False),
sa.UniqueConstraint('zone_id', 'project_id', 'target_project_id',
name='unique_shared_zone'),
name='unique_shared_zone'),
sa.ForeignKeyConstraint(['zone_id'], ['zones.id'], ondelete='CASCADE'),
)

View File

@ -138,10 +138,10 @@ def upgrade() -> None:
sa.Column('transferred_at', sa.DateTime, default=None),
sa.Column('shard', sa.SmallInteger, nullable=False),
sa.UniqueConstraint('name', 'deleted', 'pool_id',
name='unique_domain_name'),
name='unique_domain_name'),
sa.ForeignKeyConstraint(['parent_domain_id'],
['domains.id'],
ondelete='SET NULL'),
['domains.id'],
ondelete='SET NULL'),
sa.Index('zone_deleted', 'deleted'),
sa.Index('zone_tenant_deleted', 'tenant_id', 'deleted'),
sa.Index('reverse_name_deleted', 'reverse_name', 'deleted'),

View File

@ -119,10 +119,10 @@ def _set_listobject_from_models(obj, models):
return obj
class SQLAlchemy(object, metaclass=abc.ABCMeta):
class SQLAlchemy(metaclass=abc.ABCMeta):
def __init__(self):
super(SQLAlchemy, self).__init__()
super().__init__()
self.local_store = threading.local()
@staticmethod
@ -376,8 +376,8 @@ class SQLAlchemy(object, metaclass=abc.ABCMeta):
count_q = self._apply_criterion(tables.recordsets, count_q, criterion)
if filtering_records:
records_criterion = dict((k, v) for k, v in (
('data', data), ('status', status)) if v is not None)
records_criterion = {k: v for k, v in (
('data', data), ('status', status)) if v is not None}
inner_q = self._apply_criterion(tables.records, inner_q,
records_criterion)
count_q = self._apply_criterion(tables.records, count_q,

View File

@ -61,13 +61,13 @@ def paginate_query(query, table, limit, sort_keys, marker=None,
crit_attrs = []
for j in range(i):
table_attr = getattr(table.c, sort_keys[j])
crit_attrs.append((table_attr == marker_values[j]))
crit_attrs.append(table_attr == marker_values[j])
table_attr = getattr(table.c, sort_keys[i])
if sort_dirs[i] == 'desc':
crit_attrs.append((table_attr < marker_values[i]))
crit_attrs.append(table_attr < marker_values[i])
else:
crit_attrs.append((table_attr > marker_values[i]))
crit_attrs.append(table_attr > marker_values[i])
criteria = sqlalchemy.sql.and_(*crit_attrs)
criteria_list.append(criteria)

View File

@ -331,7 +331,7 @@ class TestCase(base.BaseTestCase):
}]
def setUp(self):
super(TestCase, self).setUp()
super().setUp()
self.CONF = self.useFixture(cfg_fixture.Config(CONF)).conf

View File

@ -53,7 +53,7 @@ class CoordinatorFixture(fixtures.Fixture):
self._kwargs = kwargs
def setUp(self):
super(CoordinatorFixture, self).setUp()
super().setUp()
self.coordinator = tooz.coordination.get_coordinator(
*self._args, **self._kwargs)
@ -67,7 +67,7 @@ class RPCFixture(fixtures.Fixture):
self.conf = conf
def setUp(self):
super(RPCFixture, self).setUp()
super().setUp()
rpc.init(self.conf)
self.addCleanup(self.deinit)
@ -85,7 +85,7 @@ class ServiceFixture(fixtures.Fixture):
@mock.patch.object(designate.service.DNSService, '_start')
def setUp(self, mock_start):
super(ServiceFixture, self).setUp()
super().setUp()
LOG.info('Starting service %s (%s)', self.svc_name, id(self.svc))
self.svc.start()
self.addCleanup(self.stop)
@ -111,7 +111,7 @@ class ServiceFixture(fixtures.Fixture):
class PolicyFixture(fixtures.Fixture):
def setUp(self):
super(PolicyFixture, self).setUp()
super().setUp()
self.addCleanup(policy.reset)
@ -142,7 +142,7 @@ class DatabaseFixture(fixtures.Fixture):
return path
def __init__(self):
super(DatabaseFixture, self).__init__()
super().__init__()
# Create the Golden DB
self.golden_db = self._mktemp()
@ -157,19 +157,19 @@ class DatabaseFixture(fixtures.Fixture):
self.url = 'sqlite:///%s' % self.working_copy
def setUp(self):
super(DatabaseFixture, self).setUp()
super().setUp()
shutil.copyfile(self.golden_db, self.working_copy)
def tearDown(self):
# This is currently unused
super(DatabaseFixture, self).tearDown()
super().tearDown()
LOG.debug("Deleting %s", self.working_copy)
os.unlink(self.working_copy)
class NetworkAPIFixture(fixtures.Fixture):
def setUp(self):
super(NetworkAPIFixture, self).setUp()
super().setUp()
self.api = network_api.get_network_api(cfg.CONF.network_api)
self.fake = fake_network_api
self.addCleanup(self.fake.reset_floatingips)
@ -177,11 +177,11 @@ class NetworkAPIFixture(fixtures.Fixture):
class ZoneManagerTaskFixture(fixtures.Fixture):
def __init__(self, task_cls):
super(ZoneManagerTaskFixture, self).__init__()
super().__init__()
self._task_cls = task_cls
def setUp(self):
super(ZoneManagerTaskFixture, self).setUp()
super().setUp()
self.task = self._task_cls()
self.task.on_partition_change(range(0, 4095), None, None)
@ -224,7 +224,7 @@ class StandardLogging(fixtures.Fixture):
"""
def setUp(self):
super(StandardLogging, self).setUp()
super().setUp()
# set root logger to debug
root = std_logging.getLogger()

View File

@ -35,7 +35,7 @@ INVALID_ID = [
class AdminApiTestCase(designate.tests.TestCase):
def setUp(self):
super(AdminApiTestCase, self).setUp()
super().setUp()
# Ensure the v2 API is enabled
self.config(enable_api_admin=True, group='service:api')
@ -61,7 +61,7 @@ class AdminApiTestCase(designate.tests.TestCase):
self.app = None
self.client = None
super(AdminApiTestCase, self).tearDown()
super().tearDown()
def _assert_invalid_uuid(self, method, url_format, *args, **kw):
"""

View File

@ -1,4 +1,3 @@
# coding=utf-8
# COPYRIGHT 2014 Rackspace
#
# Author: Tim Simmons <tim.simmons@rackspace.com>
@ -25,7 +24,7 @@ cfg.CONF.import_opt('enabled_extensions_admin', 'designate.api.admin',
class AdminApiQuotasTest(AdminApiTestCase):
def setUp(self):
self.config(enabled_extensions_admin=['quotas'], group='service:api')
super(AdminApiQuotasTest, self).setUp()
super().setUp()
def test_get_quotas(self):
self.policy({'get_quotas': '@'})
@ -33,7 +32,7 @@ class AdminApiQuotasTest(AdminApiTestCase):
response = self.client.get('/quotas/%s' % context.project_id,
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
@ -56,7 +55,7 @@ class AdminApiQuotasTest(AdminApiTestCase):
response = self.client.get('/quotas/%s' % 'a',
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
@ -69,12 +68,12 @@ class AdminApiQuotasTest(AdminApiTestCase):
response = self.client.patch_json('/quotas/%s' % 'a', body,
status=200,
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
self.assertEqual(200, response.status_int)
response = self.client.get('/quotas/%s' % 'a',
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
new_count = response.json['quota']['zones']
@ -111,7 +110,7 @@ class AdminApiQuotasTest(AdminApiTestCase):
response = self.client.get('/quotas/%s' % 'a',
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
self.assertEqual(200, response.status_int)
self.assertEqual('application/json', response.content_type)
@ -124,23 +123,23 @@ class AdminApiQuotasTest(AdminApiTestCase):
response = self.client.patch_json('/quotas/%s' % 'a', body,
status=200,
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
self.assertEqual(200, response.status_int)
response = self.client.get('/quotas/%s' % 'a',
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
new_count = response.json['quota']['zones']
self.assertNotEqual(current_count, new_count)
response = self.client.delete('/quotas/%s' % 'a',
headers={'X-Test-Tenant-Id':
context.project_id}, status=204)
headers={'X-Test-Tenant-Id':
context.project_id}, status=204)
response = self.client.get('/quotas/%s' % 'a',
headers={'X-Test-Tenant-Id':
context.project_id})
context.project_id})
newest_count = response.json['quota']['zones']
self.assertNotEqual(new_count, newest_count)

View File

@ -1,4 +1,3 @@
# coding=utf-8
# COPYRIGHT 2015 Rackspace
#
# Author: Betsy Luzader <betsy.luzader@rackspace.com>
@ -25,7 +24,7 @@ cfg.CONF.import_opt('enabled_extensions_admin', 'designate.api.admin',
class AdminApiReportsTest(AdminApiTestCase):
def setUp(self):
self.config(enabled_extensions_admin=['reports'], group='service:api')
super(AdminApiReportsTest, self).setUp()
super().setUp()
def test_get_counts(self):
self.policy({'count_tenants': '@'})

View File

@ -27,7 +27,7 @@ from designate import rpc
import designate.tests
class FakeRequest(object):
class FakeRequest:
def __init__(self):
self.headers = {}
self.environ = {}

View File

@ -25,7 +25,7 @@ import designate.tests
class ApiServiceTest(designate.tests.TestCase):
def setUp(self):
super(ApiServiceTest, self).setUp()
super().setUp()
self.config(listen=['0.0.0.0:0'], group='service:api')

View File

@ -35,7 +35,7 @@ INVALID_ID = [
class ApiV2TestCase(designate.tests.TestCase):
def setUp(self):
super(ApiV2TestCase, self).setUp()
super().setUp()
# Create the application
self.app = api_v2.factory({})
@ -61,7 +61,7 @@ class ApiV2TestCase(designate.tests.TestCase):
self.app = None
self.client = None
super(ApiV2TestCase, self).tearDown()
super().tearDown()
def _assert_invalid_uuid(self, method, url_format, *args, **kw):
"""

Some files were not shown because too many files have changed in this diff Show More