Merge "Fix flake8 N534 untranslated exception message"
This commit is contained in:
commit
aa0540a87b
@ -207,9 +207,10 @@ OF_PROTOCOL_TO_VERSION = {
|
|||||||
|
|
||||||
def version_from_protocol(protocol):
|
def version_from_protocol(protocol):
|
||||||
if protocol not in OF_PROTOCOL_TO_VERSION:
|
if protocol not in OF_PROTOCOL_TO_VERSION:
|
||||||
raise Exception("unknown OVS protocol string, cannot compare: %s, "
|
raise Exception(_("unknown OVS protocol string, cannot compare: "
|
||||||
"(known: %s)" % (protocol,
|
"%(protocol)s, (known: %(known)s)") %
|
||||||
list(OF_PROTOCOL_TO_VERSION)))
|
{'protocol': protocol,
|
||||||
|
'known': list(OF_PROTOCOL_TO_VERSION)})
|
||||||
return OF_PROTOCOL_TO_VERSION[protocol]
|
return OF_PROTOCOL_TO_VERSION[protocol]
|
||||||
|
|
||||||
|
|
||||||
@ -395,7 +396,7 @@ class OVSBridge(BaseOVS):
|
|||||||
# broken here
|
# broken here
|
||||||
LOG.exception("Timed out retrieving datapath_id on bridge %s.",
|
LOG.exception("Timed out retrieving datapath_id on bridge %s.",
|
||||||
self.br_name)
|
self.br_name)
|
||||||
raise RuntimeError('No datapath_id on bridge %s' % self.br_name)
|
raise RuntimeError(_('No datapath_id on bridge %s') % self.br_name)
|
||||||
|
|
||||||
def do_action_flows(self, action, kwargs_list, use_bundle=False):
|
def do_action_flows(self, action, kwargs_list, use_bundle=False):
|
||||||
# we can't mix strict and non-strict, so we'll use the first kw
|
# we can't mix strict and non-strict, so we'll use the first kw
|
||||||
@ -409,8 +410,8 @@ class OVSBridge(BaseOVS):
|
|||||||
# cookie to match flows whatever their cookie is
|
# cookie to match flows whatever their cookie is
|
||||||
kw.pop('cookie')
|
kw.pop('cookie')
|
||||||
if kw.get('cookie_mask'): # non-zero cookie mask
|
if kw.get('cookie_mask'): # non-zero cookie mask
|
||||||
raise Exception("cookie=COOKIE_ANY but cookie_mask "
|
raise Exception(_("cookie=COOKIE_ANY but cookie_mask "
|
||||||
"set to %s" % kw.get('cookie_mask'))
|
"set to %s") % kw.get('cookie_mask'))
|
||||||
elif 'cookie' in kw:
|
elif 'cookie' in kw:
|
||||||
# a cookie was specified, use it
|
# a cookie was specified, use it
|
||||||
kw['cookie'] = check_cookie_mask(kw['cookie'])
|
kw['cookie'] = check_cookie_mask(kw['cookie'])
|
||||||
|
@ -16,6 +16,7 @@ import os
|
|||||||
|
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -104,9 +105,10 @@ class ItemAllocator(object):
|
|||||||
# The number of address pairs allocated from the
|
# The number of address pairs allocated from the
|
||||||
# pool depends upon the prefix length specified
|
# pool depends upon the prefix length specified
|
||||||
# in DVR_FIP_LL_CIDR
|
# in DVR_FIP_LL_CIDR
|
||||||
raise RuntimeError("Cannot allocate item of type:"
|
raise RuntimeError(_("Cannot allocate item of type: "
|
||||||
" %s from pool using file %s"
|
"%(class)s from pool using file %(file)s")
|
||||||
% (self.ItemClass, self.state_file))
|
% {'class': self.ItemClass,
|
||||||
|
'file': self.state_file})
|
||||||
|
|
||||||
self.allocations[key] = self.pool.pop()
|
self.allocations[key] = self.pool.pop()
|
||||||
self._write_allocations()
|
self._write_allocations()
|
||||||
|
@ -22,6 +22,7 @@ from neutron_lib import constants as lib_const
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import netutils
|
from oslo_utils import netutils
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.agent import firewall
|
from neutron.agent import firewall
|
||||||
from neutron.agent.linux.openvswitch_firewall import constants as ovsfw_consts
|
from neutron.agent.linux.openvswitch_firewall import constants as ovsfw_consts
|
||||||
from neutron.agent.linux.openvswitch_firewall import exceptions
|
from neutron.agent.linux.openvswitch_firewall import exceptions
|
||||||
@ -239,9 +240,9 @@ class ConjIdMap(object):
|
|||||||
"""
|
"""
|
||||||
if direction not in [lib_const.EGRESS_DIRECTION,
|
if direction not in [lib_const.EGRESS_DIRECTION,
|
||||||
lib_const.INGRESS_DIRECTION]:
|
lib_const.INGRESS_DIRECTION]:
|
||||||
raise ValueError("Invalid direction '%s'" % direction)
|
raise ValueError(_("Invalid direction '%s'") % direction)
|
||||||
if ethertype not in [lib_const.IPv4, lib_const.IPv6]:
|
if ethertype not in [lib_const.IPv4, lib_const.IPv6]:
|
||||||
raise ValueError("Invalid ethertype '%s'" % ethertype)
|
raise ValueError(_("Invalid ethertype '%s'") % ethertype)
|
||||||
|
|
||||||
return self.id_map[(sg_id, remote_sg_id, direction, ethertype)]
|
return self.id_map[(sg_id, remote_sg_id, direction, ethertype)]
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ import collections
|
|||||||
import netaddr
|
import netaddr
|
||||||
from neutron_lib import constants as n_consts
|
from neutron_lib import constants as n_consts
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.agent.linux.openvswitch_firewall import constants as ovsfw_consts
|
from neutron.agent.linux.openvswitch_firewall import constants as ovsfw_consts
|
||||||
from neutron.common import utils
|
from neutron.common import utils
|
||||||
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants \
|
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants \
|
||||||
@ -59,8 +60,8 @@ def _assert_mergeable_rules(rule_conj_list):
|
|||||||
rule1.pop('port_range_max', None)
|
rule1.pop('port_range_max', None)
|
||||||
if rule_tmpl != rule1:
|
if rule_tmpl != rule1:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Incompatible SG rules detected: %(rule1)s and %(rule2)s. "
|
_("Incompatible SG rules detected: %(rule1)s and %(rule2)s. "
|
||||||
"They cannot be merged. This should not happen." %
|
"They cannot be merged. This should not happen.") %
|
||||||
{'rule1': rule_tmpl, 'rule2': rule})
|
{'rule1': rule_tmpl, 'rule2': rule})
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ from neutron_lib.callbacks import registry
|
|||||||
from neutron_lib import context as n_ctx
|
from neutron_lib import context as n_ctx
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.api.rpc.callbacks.consumer import registry as registry_rpc
|
from neutron.api.rpc.callbacks.consumer import registry as registry_rpc
|
||||||
from neutron.api.rpc.callbacks import events as events_rpc
|
from neutron.api.rpc.callbacks import events as events_rpc
|
||||||
from neutron.api.rpc.handlers import resources_rpc
|
from neutron.api.rpc.handlers import resources_rpc
|
||||||
@ -42,7 +43,7 @@ class RemoteResourceCache(object):
|
|||||||
|
|
||||||
def _type_cache(self, rtype):
|
def _type_cache(self, rtype):
|
||||||
if rtype not in self.resource_types:
|
if rtype not in self.resource_types:
|
||||||
raise RuntimeError("Resource cache not tracking %s" % rtype)
|
raise RuntimeError(_("Resource cache not tracking %s") % rtype)
|
||||||
return self._cache_by_type_and_id[rtype]
|
return self._cache_by_type_and_id[rtype]
|
||||||
|
|
||||||
def start_watcher(self):
|
def start_watcher(self):
|
||||||
|
@ -158,8 +158,8 @@ def _moved_global(old_name, new_module=None, new_name=None):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if not (new_module or new_name):
|
if not (new_module or new_name):
|
||||||
raise AssertionError("'new_module' and 'new_name' "
|
raise AssertionError(_("'new_module' and 'new_name' "
|
||||||
"must not be both None")
|
"must not be both None"))
|
||||||
if isinstance(new_module, _MovedGlobals):
|
if isinstance(new_module, _MovedGlobals):
|
||||||
# The new module has been shimmed, get the original
|
# The new module has been shimmed, get the original
|
||||||
new_module = new_module._mg__old_ref
|
new_module = new_module._mg__old_ref
|
||||||
|
@ -643,7 +643,7 @@ def wait_until_true(predicate, timeout=60, sleep=1, exception=None):
|
|||||||
if exception is not None:
|
if exception is not None:
|
||||||
# pylint: disable=raising-bad-type
|
# pylint: disable=raising-bad-type
|
||||||
raise exception
|
raise exception
|
||||||
raise WaitTimeout("Timed out after %d seconds" % timeout)
|
raise WaitTimeout(_("Timed out after %d seconds") % timeout)
|
||||||
|
|
||||||
|
|
||||||
class _AuthenticBase(object):
|
class _AuthenticBase(object):
|
||||||
|
@ -16,6 +16,7 @@ from alembic.operations import ops
|
|||||||
from alembic.util import Dispatcher
|
from alembic.util import Dispatcher
|
||||||
from alembic.util import rev_id as new_rev_id
|
from alembic.util import rev_id as new_rev_id
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.db.migration import cli
|
from neutron.db.migration import cli
|
||||||
|
|
||||||
_ec_dispatcher = Dispatcher()
|
_ec_dispatcher = Dispatcher()
|
||||||
@ -107,8 +108,8 @@ def _alter_column(context, directive, phase):
|
|||||||
return directive
|
return directive
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
"Don't know if operation is an expand or "
|
_("Don't know if operation is an expand or "
|
||||||
"contract at the moment: %s" % directive)
|
"contract at the moment: %s") % directive)
|
||||||
|
|
||||||
|
|
||||||
@_ec_dispatcher.dispatch_for(ops.ModifyTableOps)
|
@_ec_dispatcher.dispatch_for(ops.ModifyTableOps)
|
||||||
|
@ -18,6 +18,7 @@ from neutron_lib.callbacks import resources
|
|||||||
from neutron_lib.db import api as db_api
|
from neutron_lib.db import api as db_api
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.db import models_v2
|
from neutron.db import models_v2
|
||||||
from neutron.objects import provisioning_blocks as pb_obj
|
from neutron.objects import provisioning_blocks as pb_obj
|
||||||
|
|
||||||
@ -121,7 +122,7 @@ def provisioning_complete(context, object_id, object_type, entity):
|
|||||||
# this can't be called in a transaction to avoid REPEATABLE READ
|
# this can't be called in a transaction to avoid REPEATABLE READ
|
||||||
# tricking us into thinking there are remaining provisioning components
|
# tricking us into thinking there are remaining provisioning components
|
||||||
if context.session.is_active:
|
if context.session.is_active:
|
||||||
raise RuntimeError("Must not be called in a transaction")
|
raise RuntimeError(_("Must not be called in a transaction"))
|
||||||
standard_attr_id = _get_standard_attr_id(context, object_id,
|
standard_attr_id = _get_standard_attr_id(context, object_id,
|
||||||
object_type)
|
object_type)
|
||||||
if not standard_attr_id:
|
if not standard_attr_id:
|
||||||
@ -161,10 +162,10 @@ def is_object_blocked(context, object_id, object_type):
|
|||||||
def _get_standard_attr_id(context, object_id, object_type):
|
def _get_standard_attr_id(context, object_id, object_type):
|
||||||
model = _RESOURCE_TO_MODEL_MAP.get(object_type)
|
model = _RESOURCE_TO_MODEL_MAP.get(object_type)
|
||||||
if not model:
|
if not model:
|
||||||
raise RuntimeError("Could not find model for %s. If you are "
|
raise RuntimeError(_("Could not find model for %s. If you are "
|
||||||
"adding provisioning blocks for a new resource "
|
"adding provisioning blocks for a new resource "
|
||||||
"you must call add_model_for_resource during "
|
"you must call add_model_for_resource during "
|
||||||
"initialization for your type." % object_type)
|
"initialization for your type.") % object_type)
|
||||||
obj = (context.session.query(model.standard_attr_id).
|
obj = (context.session.query(model.standard_attr_id).
|
||||||
enable_eagerloads(False).
|
enable_eagerloads(False).
|
||||||
filter_by(id=object_id).first())
|
filter_by(id=object_id).first())
|
||||||
|
@ -96,7 +96,7 @@ class HasStandardAttributes(object):
|
|||||||
# with the declarative base others inherit from.
|
# with the declarative base others inherit from.
|
||||||
if hasattr(cls, 'api_collections'):
|
if hasattr(cls, 'api_collections'):
|
||||||
return cls.api_collections
|
return cls.api_collections
|
||||||
raise NotImplementedError("%s must define api_collections" % cls)
|
raise NotImplementedError(_("%s must define api_collections") % cls)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_api_sub_resources(cls):
|
def get_api_sub_resources(cls):
|
||||||
@ -119,8 +119,8 @@ class HasStandardAttributes(object):
|
|||||||
try:
|
try:
|
||||||
return cls.collection_resource_map
|
return cls.collection_resource_map
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise NotImplementedError("%s must define "
|
raise NotImplementedError(_("%s must define "
|
||||||
"collection_resource_map" % cls)
|
"collection_resource_map") % cls)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def validate_tag_support(cls):
|
def validate_tag_support(cls):
|
||||||
@ -191,8 +191,9 @@ class HasStandardAttributes(object):
|
|||||||
|
|
||||||
def _resource_model_map_helper(rs_map, resource, subclass):
|
def _resource_model_map_helper(rs_map, resource, subclass):
|
||||||
if resource in rs_map:
|
if resource in rs_map:
|
||||||
raise RuntimeError("Model %(sub)s tried to register for API resource "
|
raise RuntimeError(_("Model %(sub)s tried to register for API "
|
||||||
"%(res)s which conflicts with model %(other)s." %
|
"resource %(res)s which conflicts with model "
|
||||||
|
"%(other)s.") %
|
||||||
dict(sub=subclass,
|
dict(sub=subclass,
|
||||||
other=rs_map[resource],
|
other=rs_map[resource],
|
||||||
res=resource))
|
res=resource))
|
||||||
@ -231,8 +232,8 @@ def get_tag_resource_parent_map():
|
|||||||
@event.listens_for(se.Session, 'after_bulk_delete')
|
@event.listens_for(se.Session, 'after_bulk_delete')
|
||||||
def throw_exception_on_bulk_delete_of_listened_for_objects(delete_context):
|
def throw_exception_on_bulk_delete_of_listened_for_objects(delete_context):
|
||||||
if hasattr(delete_context.mapper.class_, 'revises_on_change'):
|
if hasattr(delete_context.mapper.class_, 'revises_on_change'):
|
||||||
raise RuntimeError("%s may not be deleted in bulk because it "
|
raise RuntimeError(_("%s may not be deleted in bulk because it "
|
||||||
"bumps the revision of other resources via "
|
"bumps the revision of other resources via "
|
||||||
"SQLAlchemy event handlers, which are not "
|
"SQLAlchemy event handlers, which are not "
|
||||||
"compatible with bulk deletes." %
|
"compatible with bulk deletes.") %
|
||||||
delete_context.mapper.class_)
|
delete_context.mapper.class_)
|
||||||
|
@ -24,6 +24,7 @@ import pecan
|
|||||||
from pecan import request
|
from pecan import request
|
||||||
from six.moves import urllib
|
from six.moves import urllib
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.api.views import versions as versions_view
|
from neutron.api.views import versions as versions_view
|
||||||
from neutron import manager
|
from neutron import manager
|
||||||
from neutron.pecan_wsgi.controllers import extensions as ext_ctrl
|
from neutron.pecan_wsgi.controllers import extensions as ext_ctrl
|
||||||
@ -43,8 +44,8 @@ _CORE_RESOURCES = {net_def.RESOURCE_NAME: net_def.COLLECTION_NAME,
|
|||||||
|
|
||||||
def _load_version_info(version_info):
|
def _load_version_info(version_info):
|
||||||
if version_info['id'] in _VERSION_INFO:
|
if version_info['id'] in _VERSION_INFO:
|
||||||
raise AssertionError("ID %s must not be in "
|
raise AssertionError(_("ID %s must not be in "
|
||||||
"VERSION_INFO" % version_info['id'])
|
"VERSION_INFO") % version_info['id'])
|
||||||
_VERSION_INFO[version_info['id']] = version_info
|
_VERSION_INFO[version_info['id']] = version_info
|
||||||
|
|
||||||
|
|
||||||
|
@ -130,7 +130,8 @@ class OpenFlowSwitchMixin(object):
|
|||||||
if cookie == ovs_lib.COOKIE_ANY:
|
if cookie == ovs_lib.COOKIE_ANY:
|
||||||
cookie = 0
|
cookie = 0
|
||||||
if cookie_mask != 0:
|
if cookie_mask != 0:
|
||||||
raise Exception("cookie=COOKIE_ANY but cookie_mask set to %s" %
|
raise Exception(_("cookie=COOKIE_ANY but cookie_mask set to "
|
||||||
|
"%s") %
|
||||||
cookie_mask)
|
cookie_mask)
|
||||||
elif cookie == COOKIE_DEFAULT:
|
elif cookie == COOKIE_DEFAULT:
|
||||||
cookie = self._default_cookie
|
cookie = self._default_cookie
|
||||||
@ -266,8 +267,8 @@ class BundledOpenFlowBridge(object):
|
|||||||
return under
|
return under
|
||||||
return functools.partial(under, active_bundle=dict(
|
return functools.partial(under, active_bundle=dict(
|
||||||
id=self.active_bundle, bundle_flags=self.bundle_flags))
|
id=self.active_bundle, bundle_flags=self.bundle_flags))
|
||||||
raise AttributeError("Only install_* or uninstall_* methods "
|
raise AttributeError(_("Only install_* or uninstall_* methods "
|
||||||
"can be used")
|
"can be used"))
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
if self.active_bundle is not None:
|
if self.active_bundle is not None:
|
||||||
@ -285,7 +286,7 @@ class BundledOpenFlowBridge(object):
|
|||||||
reply = self.br._send_msg(msg, reply_cls=ofpp.ONFBundleCtrlMsg)
|
reply = self.br._send_msg(msg, reply_cls=ofpp.ONFBundleCtrlMsg)
|
||||||
if reply.type != ofp.ONF_BCT_OPEN_REPLY:
|
if reply.type != ofp.ONF_BCT_OPEN_REPLY:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Unexpected reply type %d != ONF_BCT_OPEN_REPLY" %
|
_("Unexpected reply type %d != ONF_BCT_OPEN_REPLY") %
|
||||||
reply.type)
|
reply.type)
|
||||||
return self
|
return self
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -313,7 +314,7 @@ class BundledOpenFlowBridge(object):
|
|||||||
if reply.type != expected_reply:
|
if reply.type != expected_reply:
|
||||||
# The bundle ID may be in a bad state. Let's leave it
|
# The bundle ID may be in a bad state. Let's leave it
|
||||||
# in active_bundles so that we will never use it again.
|
# in active_bundles so that we will never use it again.
|
||||||
raise RuntimeError("Unexpected reply type %d" % reply.type)
|
raise RuntimeError(_("Unexpected reply type %d") % reply.type)
|
||||||
self.br.active_bundles.remove(self.active_bundle)
|
self.br.active_bundles.remove(self.active_bundle)
|
||||||
finally:
|
finally:
|
||||||
# It is possible the bundle is kept open, but this must be
|
# It is possible the bundle is kept open, but this must be
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from oslo_utils import excutils
|
from oslo_utils import excutils
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.agent.common import ovs_lib
|
from neutron.agent.common import ovs_lib
|
||||||
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants \
|
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants \
|
||||||
as ovs_consts
|
as ovs_consts
|
||||||
@ -46,7 +47,7 @@ class OVSAgentBridge(ofswitch.OpenFlowSwitchMixin,
|
|||||||
LOG.info("Bridge %(br_name)s has datapath-ID %(dpid)s",
|
LOG.info("Bridge %(br_name)s has datapath-ID %(dpid)s",
|
||||||
{"br_name": self.br_name, "dpid": dpid})
|
{"br_name": self.br_name, "dpid": dpid})
|
||||||
if dpid is None:
|
if dpid is None:
|
||||||
raise RuntimeError("Unknown datapath id.")
|
raise RuntimeError(_("Unknown datapath id."))
|
||||||
self._cached_dpid = int(dpid, 16)
|
self._cached_dpid = int(dpid, 16)
|
||||||
try:
|
try:
|
||||||
dp = self._get_dp_by_dpid(self._cached_dpid)
|
dp = self._get_dp_by_dpid(self._cached_dpid)
|
||||||
|
@ -23,6 +23,7 @@ from neutron_lib import context as n_ctx
|
|||||||
from oslo_concurrency import lockutils
|
from oslo_concurrency import lockutils
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.api.rpc.callbacks import events as rpc_events
|
from neutron.api.rpc.callbacks import events as rpc_events
|
||||||
from neutron.api.rpc.handlers import resources_rpc
|
from neutron.api.rpc.handlers import resources_rpc
|
||||||
from neutron.db import api as db_api
|
from neutron.db import api as db_api
|
||||||
@ -123,7 +124,7 @@ class _ObjectChangeHandler(object):
|
|||||||
return callback_kwargs[id_kwarg]
|
return callback_kwargs[id_kwarg]
|
||||||
if self._resource in callback_kwargs:
|
if self._resource in callback_kwargs:
|
||||||
return callback_kwargs[self._resource]['id']
|
return callback_kwargs[self._resource]['id']
|
||||||
raise RuntimeError("Couldn't find resource ID in callback event")
|
raise RuntimeError(_("Couldn't find resource ID in callback event"))
|
||||||
|
|
||||||
|
|
||||||
class OVOServerRpcInterface(object):
|
class OVOServerRpcInterface(object):
|
||||||
|
@ -21,6 +21,7 @@ from oslo_utils import excutils
|
|||||||
from sqlalchemy import exc as sql_exc
|
from sqlalchemy import exc as sql_exc
|
||||||
from sqlalchemy.orm import session as se
|
from sqlalchemy.orm import session as se
|
||||||
|
|
||||||
|
from neutron._i18n import _
|
||||||
from neutron.db import api as db_api
|
from neutron.db import api as db_api
|
||||||
from neutron.db.quota import api as quota_api
|
from neutron.db.quota import api as quota_api
|
||||||
|
|
||||||
@ -52,7 +53,7 @@ def _count_resource(context, collection_name, tenant_id):
|
|||||||
except (NotImplementedError, AttributeError):
|
except (NotImplementedError, AttributeError):
|
||||||
pass
|
pass
|
||||||
raise NotImplementedError(
|
raise NotImplementedError(
|
||||||
'No plugins that support counting %s found.' % collection_name)
|
_('No plugins that support counting %s found.') % collection_name)
|
||||||
|
|
||||||
|
|
||||||
class BaseResource(object):
|
class BaseResource(object):
|
||||||
@ -314,10 +315,10 @@ class TrackedResource(BaseResource):
|
|||||||
|
|
||||||
def _except_bulk_delete(self, delete_context):
|
def _except_bulk_delete(self, delete_context):
|
||||||
if delete_context.mapper.class_ == self._model_class:
|
if delete_context.mapper.class_ == self._model_class:
|
||||||
raise RuntimeError("%s may not be deleted in bulk because "
|
raise RuntimeError(_("%s may not be deleted in bulk because "
|
||||||
"it is tracked by the quota engine via "
|
"it is tracked by the quota engine via "
|
||||||
"SQLAlchemy event handlers, which are not "
|
"SQLAlchemy event handlers, which are not "
|
||||||
"compatible with bulk deletes." %
|
"compatible with bulk deletes.") %
|
||||||
self._model_class)
|
self._model_class)
|
||||||
|
|
||||||
def register_events(self):
|
def register_events(self):
|
||||||
|
4
tox.ini
4
tox.ini
@ -157,12 +157,10 @@ commands = sphinx-build -W -b linkcheck doc/source doc/build/linkcheck
|
|||||||
# E128 continuation line under-indented for visual indent
|
# E128 continuation line under-indented for visual indent
|
||||||
# H405 multi line docstring summary not separated with an empty line
|
# H405 multi line docstring summary not separated with an empty line
|
||||||
# N530 direct neutron imports not allowed
|
# N530 direct neutron imports not allowed
|
||||||
# TODO(ihrachys) figure out what to do with N534
|
|
||||||
# N534 Untranslated exception message
|
|
||||||
# TODO(amotoki) check the following new rules should be fixed or ignored
|
# TODO(amotoki) check the following new rules should be fixed or ignored
|
||||||
# E731 do not assign a lambda expression, use a def
|
# E731 do not assign a lambda expression, use a def
|
||||||
# W504 line break after binary operator
|
# W504 line break after binary operator
|
||||||
ignore = E125,E126,E128,E731,H405,N530,N534,W504
|
ignore = E125,E126,E128,E731,H405,N530,W504
|
||||||
# H106: Don't put vim configuration in source files
|
# H106: Don't put vim configuration in source files
|
||||||
# H203: Use assertIs(Not)None to check for None
|
# H203: Use assertIs(Not)None to check for None
|
||||||
# H204: Use assert(Not)Equal to check for equality
|
# H204: Use assert(Not)Equal to check for equality
|
||||||
|
Loading…
Reference in New Issue
Block a user