Start running pylint for pep8 tests
When 'tox -e pep8' was run it didn't run any pylint checks, let's start doing it. Fixed issues pylint found, 2.6.0 added two new checkers [1] that make pep8 validation fail: - raise-missing-from (W0707) - super-with-arguments (R1725) [1] http://pylint.pycqa.org/en/latest/technical_reference/features.html Change-Id: Ib83f30696269c968f41f9c1131596268a4412658
This commit is contained in:
parent
18129456d8
commit
e92e98f8c6
138
.pylintrc
Normal file
138
.pylintrc
Normal file
@ -0,0 +1,138 @@
|
||||
# The format of this file isn't really documented; just use --generate-rcfile
|
||||
[MASTER]
|
||||
# Add <file or directory> to the black list. It should be a base name, not a
|
||||
# path. You may set this option multiple times.
|
||||
ignore=.git,tests
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
# NOTE(gus): This is a long list. A number of these are important and
|
||||
# should be re-enabled once the offending code is fixed (or marked
|
||||
# with a local disable)
|
||||
disable=
|
||||
# "F" Fatal errors that prevent further processing
|
||||
import-error,
|
||||
# "I" Informational noise
|
||||
c-extension-no-member,
|
||||
locally-disabled,
|
||||
# "E" Error for important programming issues (likely bugs)
|
||||
access-member-before-definition,
|
||||
no-member,
|
||||
no-method-argument,
|
||||
no-self-argument,
|
||||
not-an-iterable,
|
||||
# "W" Warnings for stylistic problems or minor programming issues
|
||||
abstract-method,
|
||||
arguments-differ,
|
||||
attribute-defined-outside-init,
|
||||
bad-builtin,
|
||||
bad-indentation,
|
||||
broad-except,
|
||||
dangerous-default-value,
|
||||
deprecated-lambda,
|
||||
expression-not-assigned,
|
||||
fixme,
|
||||
global-statement,
|
||||
keyword-arg-before-vararg,
|
||||
literal-comparison,
|
||||
no-init,
|
||||
non-parent-init-called,
|
||||
not-callable,
|
||||
protected-access,
|
||||
redefined-builtin,
|
||||
redefined-outer-name,
|
||||
signature-differs,
|
||||
star-args,
|
||||
super-init-not-called,
|
||||
super-on-old-class,
|
||||
unpacking-non-sequence,
|
||||
unused-argument,
|
||||
unused-import,
|
||||
unused-variable,
|
||||
useless-super-delegation,
|
||||
using-constant-test,
|
||||
# TODO(dougwig) - disable nonstandard-exception while we have neutron_lib shims
|
||||
nonstandard-exception,
|
||||
# "C" Coding convention violations
|
||||
bad-continuation,
|
||||
consider-iterating-dictionary,
|
||||
consider-using-enumerate,
|
||||
invalid-name,
|
||||
len-as-condition,
|
||||
misplaced-comparison-constant,
|
||||
missing-docstring,
|
||||
singleton-comparison,
|
||||
superfluous-parens,
|
||||
ungrouped-imports,
|
||||
wrong-import-order,
|
||||
# "R" Refactor recommendations
|
||||
abstract-class-little-used,
|
||||
abstract-class-not-used,
|
||||
consider-merging-isinstance,
|
||||
consider-using-ternary,
|
||||
duplicate-code,
|
||||
inconsistent-return-statements,
|
||||
interface-not-implemented,
|
||||
no-else-return,
|
||||
no-self-use,
|
||||
redefined-argument-from-local,
|
||||
simplifiable-if-statement,
|
||||
too-few-public-methods,
|
||||
too-many-ancestors,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-locals,
|
||||
too-many-nested-blocks,
|
||||
too-many-public-methods,
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
# new for python3 version of pylint
|
||||
consider-using-set-comprehension,
|
||||
unnecessary-pass,
|
||||
useless-object-inheritance
|
||||
|
||||
[BASIC]
|
||||
# Variable names can be 1 to 31 characters long, with lowercase and underscores
|
||||
variable-rgx=[a-z_][a-z0-9_]{0,30}$
|
||||
|
||||
# Argument names can be 2 to 31 characters long, with lowercase and underscores
|
||||
argument-rgx=[a-z_][a-z0-9_]{1,30}$
|
||||
|
||||
# Method names should be at least 3 characters long
|
||||
# and be lowercased with underscores
|
||||
method-rgx=([a-z_][a-z0-9_]{2,}|setUp|tearDown)$
|
||||
|
||||
# Module names matching neutron_lib-* are ok (files in bin/)
|
||||
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+)|(neutron_lib-[a-z0-9_-]+))$
|
||||
|
||||
# Don't require docstrings on tests.
|
||||
no-docstring-rgx=((__.*__)|([tT]est.*)|setUp|tearDown)$
|
||||
|
||||
[FORMAT]
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=79
|
||||
|
||||
[VARIABLES]
|
||||
# List of additional names supposed to be defined in builtins. Remember that
|
||||
# you should avoid to define new builtins when possible.
|
||||
# _ is used by our localization
|
||||
additional-builtins=_
|
||||
|
||||
[CLASSES]
|
||||
# List of interface methods to ignore, separated by a comma.
|
||||
ignore-iface-methods=
|
||||
|
||||
[IMPORTS]
|
||||
# Deprecated modules which should not be used, separated by a comma
|
||||
deprecated-modules=
|
||||
# should use oslo_serialization.jsonutils
|
||||
json
|
||||
|
||||
[TYPECHECK]
|
||||
# List of module names for which member attributes should not be checked
|
||||
ignored-modules=six.moves,_MovedItems
|
||||
|
||||
[REPORTS]
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=no
|
@ -1,7 +1,7 @@
|
||||
alabaster==0.7.10
|
||||
alembic==0.8.10
|
||||
amqp==2.1.1
|
||||
appdirs==1.3.0
|
||||
appdirs==1.4.3
|
||||
Babel==2.3.4
|
||||
cachetools==2.0.0
|
||||
contextlib2==0.4.0
|
||||
@ -18,6 +18,7 @@ futurist==1.2.0
|
||||
greenlet==0.4.15
|
||||
imagesize==0.7.1
|
||||
iso8601==0.1.11
|
||||
isort==4.3.21
|
||||
Jinja2==2.10
|
||||
keystoneauth1==3.4.0
|
||||
kombu==4.6.1
|
||||
|
@ -32,9 +32,9 @@ def convert_to_boolean(data):
|
||||
"""
|
||||
try:
|
||||
return strutils.bool_from_string(data, strict=True)
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
msg = _("'%s' cannot be converted to boolean") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
raise n_exc.InvalidInput(error_message=msg) from e
|
||||
|
||||
|
||||
def convert_to_boolean_if_not_none(data):
|
||||
@ -58,9 +58,9 @@ def convert_to_int(data):
|
||||
"""
|
||||
try:
|
||||
return int(data)
|
||||
except (ValueError, TypeError):
|
||||
except (ValueError, TypeError) as e:
|
||||
msg = _("'%s' is not an integer") % data
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
raise n_exc.InvalidInput(error_message=msg) from e
|
||||
|
||||
|
||||
def convert_to_int_if_not_none(data):
|
||||
@ -96,9 +96,9 @@ def convert_to_positive_float_or_none(val):
|
||||
val = float(val)
|
||||
if val < 0:
|
||||
raise ValueError()
|
||||
except (ValueError, TypeError):
|
||||
except (ValueError, TypeError) as e:
|
||||
msg = _("'%s' must be a non negative decimal") % val
|
||||
raise n_exc.InvalidInput(error_message=msg)
|
||||
raise n_exc.InvalidInput(error_message=msg) from e
|
||||
return val
|
||||
|
||||
|
||||
@ -210,8 +210,8 @@ def convert_cidr_to_canonical_format(value):
|
||||
cidr = netaddr.IPNetwork(value)
|
||||
return str(convert_ip_to_canonical_format(
|
||||
cidr.ip)) + "/" + str(cidr.prefixlen)
|
||||
except netaddr.core.AddrFormatError:
|
||||
raise n_exc.InvalidInput(error_message=error_message)
|
||||
except netaddr.core.AddrFormatError as e:
|
||||
raise n_exc.InvalidInput(error_message=error_message) from e
|
||||
|
||||
|
||||
def convert_string_to_case_insensitive(data):
|
||||
@ -226,9 +226,9 @@ def convert_string_to_case_insensitive(data):
|
||||
"""
|
||||
try:
|
||||
return data.lower()
|
||||
except AttributeError:
|
||||
except AttributeError as e:
|
||||
error_message = _("Input value %s must be string type") % data
|
||||
raise n_exc.InvalidInput(error_message=error_message)
|
||||
raise n_exc.InvalidInput(error_message=error_message) from e
|
||||
|
||||
|
||||
def convert_to_protocol(data):
|
||||
@ -260,8 +260,8 @@ def convert_to_protocol(data):
|
||||
return data
|
||||
else:
|
||||
raise n_exc.InvalidInput(error_message=error_message)
|
||||
except n_exc.InvalidInput:
|
||||
raise n_exc.InvalidInput(error_message=error_message)
|
||||
except n_exc.InvalidInput as e:
|
||||
raise n_exc.InvalidInput(error_message=error_message) from e
|
||||
|
||||
|
||||
def convert_to_string(data):
|
||||
|
@ -262,5 +262,5 @@ class APIExtensionDescriptor(ExtensionDescriptor):
|
||||
"""
|
||||
if extension_attrs_map is None:
|
||||
extension_attrs_map = cls.get_extended_resources('2.0')
|
||||
super(APIExtensionDescriptor, cls()).update_attributes_map(
|
||||
attributes, extension_attrs_map=extension_attrs_map)
|
||||
super(APIExtensionDescriptor, cls).update_attributes_map(
|
||||
cls, attributes, extension_attrs_map=extension_attrs_map)
|
||||
|
@ -162,14 +162,14 @@ def validate_values(data, valid_values=None, valid_values_display=None):
|
||||
{'data': data, 'valid_values': valid_values_display})
|
||||
LOG.debug(msg)
|
||||
return msg
|
||||
except TypeError:
|
||||
except TypeError as e:
|
||||
# This is a programming error
|
||||
msg = (_("'data' of type '%(typedata)s' and 'valid_values' "
|
||||
"of type '%(typevalues)s' are not "
|
||||
"compatible for comparison") %
|
||||
{'typedata': type(data),
|
||||
'typevalues': type(valid_values)})
|
||||
raise TypeError(msg)
|
||||
raise TypeError(msg) from e
|
||||
else:
|
||||
# This is a programming error
|
||||
msg = (_("'valid_values' does not support membership operations"))
|
||||
@ -377,7 +377,7 @@ def validate_mac_address(data, valid_values=None):
|
||||
valid_mac = False
|
||||
|
||||
if valid_mac:
|
||||
valid_mac = (not netaddr.EUI(data) in
|
||||
valid_mac = (netaddr.EUI(data) not in
|
||||
map(netaddr.EUI, constants.INVALID_MAC_ADDRESSES))
|
||||
# TODO(arosen): The code in this file should be refactored
|
||||
# so it catches the correct exceptions. validate_no_whitespace
|
||||
@ -868,8 +868,8 @@ def _extract_validator(key_validator):
|
||||
return (validator_name,
|
||||
validators[validator_name],
|
||||
validator_params)
|
||||
except KeyError:
|
||||
raise UndefinedValidator(validator_name)
|
||||
except KeyError as e:
|
||||
raise UndefinedValidator(validator_name) from e
|
||||
return None, None, None
|
||||
|
||||
|
||||
@ -1119,7 +1119,7 @@ def validate_subnet_service_types(service_types, valid_values=None):
|
||||
if not isinstance(service_type, str):
|
||||
raise n_exc.InvalidInputSubnetServiceType(
|
||||
service_type=service_type)
|
||||
elif not service_type.startswith(tuple(prefixes)):
|
||||
if not service_type.startswith(tuple(prefixes)):
|
||||
raise n_exc.InvalidSubnetServiceType(service_type=service_type)
|
||||
|
||||
|
||||
@ -1141,8 +1141,7 @@ def validate_ethertype(ethertype, valid_values=None):
|
||||
if ethertype_str == "IPV4":
|
||||
ethertype_str = "IP"
|
||||
ethertype_name = 'ETH_TYPE_' + ethertype_str
|
||||
if ethertype_name in os_ken_ethertypes:
|
||||
ethertype = os_ken_ethertypes[ethertype_name]
|
||||
ethertype = os_ken_ethertypes.get(ethertype_name, ethertype)
|
||||
except TypeError:
|
||||
# Value of ethertype cannot be coerced into a string, like None
|
||||
pass
|
||||
@ -1150,8 +1149,8 @@ def validate_ethertype(ethertype, valid_values=None):
|
||||
if isinstance(ethertype, str):
|
||||
ethertype = int(ethertype, 16)
|
||||
if (isinstance(ethertype, int) and
|
||||
constants.ETHERTYPE_MIN <= ethertype and
|
||||
ethertype <= constants.ETHERTYPE_MAX):
|
||||
constants.ETHERTYPE_MIN <= ethertype <=
|
||||
constants.ETHERTYPE_MAX):
|
||||
return None
|
||||
except ValueError:
|
||||
pass
|
||||
|
@ -107,7 +107,7 @@ class DBEventPayload(EventPayload):
|
||||
def __init__(self, context, metadata=None, request_body=None,
|
||||
states=None, resource_id=None, desired_state=None):
|
||||
|
||||
super(DBEventPayload, self).__init__(
|
||||
super().__init__(
|
||||
context, metadata=metadata, request_body=request_body,
|
||||
states=states, resource_id=resource_id)
|
||||
|
||||
@ -138,8 +138,7 @@ class DBEventPayload(EventPayload):
|
||||
:returns: If this payload has a desired_state its returned, otherwise
|
||||
latest_state is returned.
|
||||
"""
|
||||
return (self.desired_state or
|
||||
super(DBEventPayload, self).latest_state)
|
||||
return (self.desired_state or super().latest_state)
|
||||
|
||||
|
||||
class APIEventPayload(EventPayload):
|
||||
@ -149,7 +148,7 @@ class APIEventPayload(EventPayload):
|
||||
metadata=None, request_body=None, states=None,
|
||||
resource_id=None, collection_name=None):
|
||||
|
||||
super(APIEventPayload, self).__init__(
|
||||
super().__init__(
|
||||
context, metadata=metadata, request_body=request_body,
|
||||
states=states, resource_id=resource_id)
|
||||
|
||||
|
@ -40,7 +40,7 @@ class ContextBase(oslo_context.RequestContext):
|
||||
# prefer project_name, as that's what's going to be set by
|
||||
# keystone. Fall back to tenant_name if for some reason it's blank.
|
||||
kwargs.setdefault('project_name', tenant_name)
|
||||
super(ContextBase, self).__init__(
|
||||
super().__init__(
|
||||
is_admin=is_admin, user_id=user_id, **kwargs)
|
||||
|
||||
self.user_name = user_name
|
||||
@ -72,7 +72,7 @@ class ContextBase(oslo_context.RequestContext):
|
||||
self.project_name = tenant_name
|
||||
|
||||
def to_dict(self):
|
||||
context = super(ContextBase, self).to_dict()
|
||||
context = super().to_dict()
|
||||
context.update({
|
||||
'user_id': self.user_id,
|
||||
'tenant_id': self.project_id,
|
||||
@ -85,7 +85,7 @@ class ContextBase(oslo_context.RequestContext):
|
||||
return context
|
||||
|
||||
def to_policy_values(self):
|
||||
values = super(ContextBase, self).to_policy_values()
|
||||
values = super().to_policy_values()
|
||||
values['tenant_id'] = self.project_id
|
||||
values['is_admin'] = self.is_admin
|
||||
|
||||
@ -138,7 +138,7 @@ _TransactionConstraint = collections.namedtuple(
|
||||
|
||||
class Context(ContextBaseWithSession):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(Context, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self._session = None
|
||||
self._txn_constraint = None
|
||||
|
||||
@ -146,13 +146,13 @@ class Context(ContextBaseWithSession):
|
||||
def session(self):
|
||||
# TODO(akamyshnikova): checking for session attribute won't be needed
|
||||
# when reader and writer will be used
|
||||
if hasattr(super(Context, self), 'session'):
|
||||
if hasattr(super(), 'session'):
|
||||
if self._session:
|
||||
warnings.warn('context.session is used with and without '
|
||||
'new enginefacade. Please update the code to '
|
||||
'use new enginefacede consistently.',
|
||||
DeprecationWarning)
|
||||
return super(Context, self).session
|
||||
return super().session
|
||||
if self._session is None:
|
||||
self._session = db_api.get_writer_session()
|
||||
|
||||
|
@ -216,18 +216,17 @@ def retry_if_session_inactive(context_var_name='context'):
|
||||
# deals with the horrors of finding args of already decorated
|
||||
# functions
|
||||
ctx_arg_index = p_util.getargspec(f).args.index(context_var_name)
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
msg = _("Could not find position of var %s") % context_var_name
|
||||
raise RuntimeError(msg)
|
||||
raise RuntimeError(msg) from e
|
||||
f_with_retry = retry_db_errors(f)
|
||||
|
||||
@functools.wraps(f)
|
||||
def wrapped(*args, **kwargs):
|
||||
# only use retry wrapper if we aren't nested in an active
|
||||
# transaction
|
||||
if context_var_name in kwargs:
|
||||
context = kwargs[context_var_name]
|
||||
else:
|
||||
context = kwargs.get(context_var_name)
|
||||
if context is None:
|
||||
context = args[ctx_arg_index]
|
||||
method = f if context.session.is_active else f_with_retry
|
||||
return method(*args, **kwargs)
|
||||
|
@ -129,9 +129,9 @@ class HasStandardAttributes(object):
|
||||
def get_collection_resource_map(cls):
|
||||
try:
|
||||
return cls.collection_resource_map
|
||||
except AttributeError:
|
||||
raise NotImplementedError(_("%s must define "
|
||||
"collection_resource_map") % cls)
|
||||
except AttributeError as e:
|
||||
raise NotImplementedError(
|
||||
_("%s must define collection_resource_map") % cls) from e
|
||||
|
||||
@classmethod
|
||||
def validate_tag_support(cls):
|
||||
@ -168,7 +168,7 @@ class HasStandardAttributes(object):
|
||||
for key in standard_attr_keys:
|
||||
if key in kwargs:
|
||||
standard_attr_kwargs[key] = kwargs.pop(key)
|
||||
super(HasStandardAttributes, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
# here we automatically create the related standard attribute object
|
||||
self.standard_attr = StandardAttribute(
|
||||
resource_type=self.__tablename__, **standard_attr_kwargs)
|
||||
@ -190,7 +190,7 @@ class HasStandardAttributes(object):
|
||||
# happens if code calls update_port with modified results of get_port
|
||||
new_dict.pop('created_at', None)
|
||||
new_dict.pop('updated_at', None)
|
||||
super(HasStandardAttributes, self).update(new_dict)
|
||||
super().update(new_dict)
|
||||
|
||||
@declarative.declared_attr
|
||||
def revision_number(cls):
|
||||
|
@ -38,11 +38,12 @@ def get_and_validate_sort_keys(sorts, model):
|
||||
for sort_key in sort_keys:
|
||||
try:
|
||||
sort_key_attr = getattr(model, sort_key)
|
||||
except AttributeError:
|
||||
except AttributeError as e:
|
||||
# Extension attributes don't support sorting. Because it
|
||||
# existed in attr_info, it will be caught here.
|
||||
msg = _("'%s' is an invalid attribute for sort key") % sort_key
|
||||
raise n_exc.BadRequest(resource=model.__tablename__, msg=msg)
|
||||
raise n_exc.BadRequest(
|
||||
resource=model.__tablename__, msg=msg) from e
|
||||
if isinstance(sort_key_attr.property,
|
||||
properties.RelationshipProperty):
|
||||
msg = _("Attribute '%(attr)s' references another resource and "
|
||||
|
@ -33,14 +33,14 @@ class NeutronException(Exception):
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
try:
|
||||
super(NeutronException, self).__init__(self.message % kwargs)
|
||||
super().__init__(self.message % kwargs)
|
||||
self.msg = self.message % kwargs
|
||||
except Exception:
|
||||
with excutils.save_and_reraise_exception() as ctxt:
|
||||
if not self.use_fatal_exceptions():
|
||||
ctxt.reraise = False
|
||||
# at least get the core message out if something happened
|
||||
super(NeutronException, self).__init__(self.message)
|
||||
super().__init__(self.message)
|
||||
|
||||
def __str__(self):
|
||||
return self.msg
|
||||
@ -200,7 +200,7 @@ class SubnetInUse(InUse):
|
||||
if 'reason' not in kwargs:
|
||||
kwargs['reason'] = _("One or more ports have an IP allocation "
|
||||
"from this subnet")
|
||||
super(SubnetInUse, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class SubnetPoolInUse(InUse):
|
||||
@ -219,7 +219,7 @@ class SubnetPoolInUse(InUse):
|
||||
def __init__(self, **kwargs):
|
||||
if 'reason' not in kwargs:
|
||||
kwargs['reason'] = _("Two or more concurrent subnets allocated")
|
||||
super(SubnetPoolInUse, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class PortInUse(InUse):
|
||||
@ -389,7 +389,7 @@ class Invalid(NeutronException):
|
||||
"""A generic base class for invalid errors."""
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
super(Invalid, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class InvalidInput(BadRequest):
|
||||
@ -481,7 +481,7 @@ class NetworkTunnelRangeError(NeutronException):
|
||||
# Convert tunnel_range tuple to 'start:end' format for display
|
||||
if isinstance(kwargs['tunnel_range'], tuple):
|
||||
kwargs['tunnel_range'] = "%d:%d" % kwargs['tunnel_range']
|
||||
super(NetworkTunnelRangeError, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class PolicyInitError(NeutronException):
|
||||
@ -517,7 +517,7 @@ class MultipleExceptions(Exception):
|
||||
:param args: Passed onto parent constructor.
|
||||
:param kwargs: Passed onto parent constructor.
|
||||
"""
|
||||
super(MultipleExceptions, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.inner_exceptions = exceptions
|
||||
|
||||
|
||||
@ -552,7 +552,7 @@ class NetworkVlanRangeError(NeutronException):
|
||||
# Convert vlan_range tuple to 'start:end' format for display
|
||||
if isinstance(kwargs['vlan_range'], tuple):
|
||||
kwargs['vlan_range'] = "%d:%d" % kwargs['vlan_range']
|
||||
super(NetworkVlanRangeError, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class PhysicalNetworkNameError(NeutronException):
|
||||
@ -670,7 +670,7 @@ class DuplicatedExtension(NeutronException):
|
||||
|
||||
class DriverCallError(MultipleExceptions):
|
||||
def __init__(self, exc_list=None):
|
||||
super(DriverCallError, self).__init__(exc_list or [])
|
||||
super().__init__(exc_list or [])
|
||||
|
||||
|
||||
class DeviceIDNotOwnedByTenant(Conflict):
|
||||
@ -809,7 +809,7 @@ class PortBindingError(NeutronException):
|
||||
|
||||
class ProcessExecutionError(RuntimeError):
|
||||
def __init__(self, message, returncode):
|
||||
super(ProcessExecutionError, self).__init__(message)
|
||||
super().__init__(message)
|
||||
self.returncode = returncode
|
||||
|
||||
|
||||
|
@ -27,7 +27,7 @@ class RouterInUse(exceptions.InUse):
|
||||
def __init__(self, **kwargs):
|
||||
if 'reason' not in kwargs:
|
||||
kwargs['reason'] = "still has ports"
|
||||
super(RouterInUse, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
|
||||
class RouterInterfaceNotFound(exceptions.NotFound):
|
||||
@ -85,7 +85,7 @@ class RouterNotFoundInRouterFactory(exceptions.NeutronException):
|
||||
class FloatingIpSetupException(exceptions.NeutronException):
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
super(FloatingIpSetupException, self).__init__()
|
||||
super().__init__()
|
||||
|
||||
|
||||
class AbortSyncRouters(exceptions.NeutronException):
|
||||
@ -95,4 +95,4 @@ class AbortSyncRouters(exceptions.NeutronException):
|
||||
class IpTablesApplyException(exceptions.NeutronException):
|
||||
def __init__(self, message=None):
|
||||
self.message = message
|
||||
super(IpTablesApplyException, self).__init__()
|
||||
super().__init__()
|
||||
|
@ -40,7 +40,7 @@ CONF = cfg.CONF
|
||||
class PluginDirectoryFixture(fixtures.Fixture):
|
||||
|
||||
def __init__(self, plugin_directory=None):
|
||||
super(PluginDirectoryFixture, self).__init__()
|
||||
super().__init__()
|
||||
self.plugin_directory = (
|
||||
plugin_directory or directory._PluginDirectory())
|
||||
|
||||
@ -69,7 +69,7 @@ class CallbackRegistryFixture(fixtures.Fixture):
|
||||
_get_callback_manager(). Otherwise a new instance of CallbacksManager
|
||||
is used.
|
||||
"""
|
||||
super(CallbackRegistryFixture, self).__init__()
|
||||
super().__init__()
|
||||
self.callback_manager = callback_manager or manager.CallbacksManager()
|
||||
self.patcher = None
|
||||
|
||||
@ -258,7 +258,7 @@ class PlacementAPIClientFixture(fixtures.Fixture):
|
||||
|
||||
:param placement_api_client: Placement API client object.
|
||||
"""
|
||||
super(PlacementAPIClientFixture, self).__init__()
|
||||
super().__init__()
|
||||
self.placement_api_client = placement_api_client
|
||||
|
||||
def _setUp(self):
|
||||
|
@ -38,10 +38,10 @@ class RangeConstrainedInteger(obj_fields.Integer):
|
||||
try:
|
||||
self._start = int(start)
|
||||
self._end = int(end)
|
||||
except (TypeError, ValueError):
|
||||
except (TypeError, ValueError) as e:
|
||||
raise o_exc.NeutronRangeConstrainedIntegerInvalidLimit(
|
||||
start=start, end=end)
|
||||
super(RangeConstrainedInteger, self).__init__(**kwargs)
|
||||
start=start, end=end) from e
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def coerce(self, obj, attr, value):
|
||||
if not isinstance(value, int):
|
||||
@ -50,14 +50,13 @@ class RangeConstrainedInteger(obj_fields.Integer):
|
||||
if not self._start <= value <= self._end:
|
||||
msg = _("Field value %s is invalid") % value
|
||||
raise ValueError(msg)
|
||||
return super(RangeConstrainedInteger, self).coerce(obj, attr, value)
|
||||
return super().coerce(obj, attr, value)
|
||||
|
||||
|
||||
class IPNetworkPrefixLen(RangeConstrainedInteger):
|
||||
"""IP network (CIDR) prefix length custom Enum"""
|
||||
def __init__(self, **kwargs):
|
||||
super(IPNetworkPrefixLen, self).__init__(
|
||||
start=0, end=lib_constants.IPv6_BITS, **kwargs)
|
||||
super().__init__(start=0, end=lib_constants.IPv6_BITS, **kwargs)
|
||||
|
||||
|
||||
class IPNetworkPrefixLenField(obj_fields.AutoTypedField):
|
||||
@ -66,7 +65,7 @@ class IPNetworkPrefixLenField(obj_fields.AutoTypedField):
|
||||
|
||||
class PortRange(RangeConstrainedInteger):
|
||||
def __init__(self, start=lib_constants.PORT_RANGE_MIN, **kwargs):
|
||||
super(PortRange, self).__init__(start=start,
|
||||
super().__init__(start=start,
|
||||
end=lib_constants.PORT_RANGE_MAX,
|
||||
**kwargs)
|
||||
|
||||
@ -81,7 +80,7 @@ class PortRangeWith0Field(obj_fields.AutoTypedField):
|
||||
|
||||
class VlanIdRange(RangeConstrainedInteger):
|
||||
def __init__(self, **kwargs):
|
||||
super(VlanIdRange, self).__init__(start=lib_constants.MIN_VLAN_TAG,
|
||||
super().__init__(start=lib_constants.MIN_VLAN_TAG,
|
||||
end=lib_constants.MAX_VLAN_TAG,
|
||||
**kwargs)
|
||||
|
||||
@ -106,7 +105,7 @@ class DomainName(obj_fields.String):
|
||||
if len(value) > lib_db_const.FQDN_FIELD_SIZE:
|
||||
msg = _("Domain name %s is too long") % value
|
||||
raise ValueError(msg)
|
||||
return super(DomainName, self).coerce(obj, attr, value)
|
||||
return super().coerce(obj, attr, value)
|
||||
|
||||
|
||||
class DomainNameField(obj_fields.AutoTypedField):
|
||||
@ -123,7 +122,7 @@ class IntegerEnum(obj_fields.Integer):
|
||||
msg = _("Possible value %s is not an integer") % value
|
||||
raise ValueError(msg)
|
||||
self._valid_values = valid_values
|
||||
super(IntegerEnum, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def coerce(self, obj, attr, value):
|
||||
if not isinstance(value, int):
|
||||
@ -136,13 +135,13 @@ class IntegerEnum(obj_fields.Integer):
|
||||
{'value': value, 'values': self._valid_values}
|
||||
)
|
||||
raise ValueError(msg)
|
||||
return super(IntegerEnum, self).coerce(obj, attr, value)
|
||||
return super().coerce(obj, attr, value)
|
||||
|
||||
|
||||
class IPVersionEnum(IntegerEnum):
|
||||
"""IP version integer Enum"""
|
||||
def __init__(self, **kwargs):
|
||||
super(IPVersionEnum, self).__init__(
|
||||
super().__init__(
|
||||
valid_values=lib_constants.IP_ALLOWED_VERSIONS, **kwargs)
|
||||
|
||||
|
||||
@ -152,8 +151,7 @@ class IPVersionEnumField(obj_fields.AutoTypedField):
|
||||
|
||||
class DscpMark(IntegerEnum):
|
||||
def __init__(self, valid_values=None, **kwargs):
|
||||
super(DscpMark, self).__init__(
|
||||
valid_values=lib_constants.VALID_DSCP_MARKS)
|
||||
super().__init__(valid_values=lib_constants.VALID_DSCP_MARKS)
|
||||
|
||||
|
||||
class DscpMarkField(obj_fields.AutoTypedField):
|
||||
@ -176,7 +174,7 @@ class EtherTypeEnumField(obj_fields.AutoTypedField):
|
||||
class IpProtocolEnum(obj_fields.Enum):
|
||||
"""IP protocol number Enum"""
|
||||
def __init__(self, **kwargs):
|
||||
super(IpProtocolEnum, self).__init__(
|
||||
super().__init__(
|
||||
valid_values=list(
|
||||
itertools.chain(
|
||||
lib_constants.IP_PROTOCOL_MAP.keys(),
|
||||
@ -205,7 +203,7 @@ class MACAddress(obj_fields.FieldType):
|
||||
if not isinstance(value, netaddr.EUI):
|
||||
msg = _("Field value %s is not a netaddr.EUI") % value
|
||||
raise ValueError(msg)
|
||||
return super(MACAddress, self).coerce(obj, attr, value)
|
||||
return super().coerce(obj, attr, value)
|
||||
|
||||
@staticmethod
|
||||
def to_primitive(obj, attr, value):
|
||||
@ -215,9 +213,9 @@ class MACAddress(obj_fields.FieldType):
|
||||
def from_primitive(obj, attr, value):
|
||||
try:
|
||||
return net_utils.AuthenticEUI(value)
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
msg = _("Field value %s is not a netaddr.EUI") % value
|
||||
raise ValueError(msg)
|
||||
raise ValueError(msg) from e
|
||||
|
||||
|
||||
class MACAddressField(obj_fields.AutoTypedField):
|
||||
@ -237,9 +235,9 @@ class DictOfMiscValues(obj_fields.FieldType):
|
||||
if isinstance(value, str):
|
||||
try:
|
||||
return jsonutils.loads(value)
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
msg = _("Field value %s is not stringified JSON") % value
|
||||
raise ValueError(msg)
|
||||
raise ValueError(msg) from e
|
||||
msg = (_("Field value %s is not type of dict or stringified JSON")
|
||||
% value)
|
||||
raise ValueError(msg)
|
||||
@ -276,7 +274,7 @@ class IPNetwork(obj_fields.FieldType):
|
||||
if not isinstance(value, netaddr.IPNetwork):
|
||||
msg = _("Field value %s is not a netaddr.IPNetwork") % value
|
||||
raise ValueError(msg)
|
||||
return super(IPNetwork, self).coerce(obj, attr, value)
|
||||
return super().coerce(obj, attr, value)
|
||||
|
||||
@staticmethod
|
||||
def to_primitive(obj, attr, value):
|
||||
@ -286,9 +284,9 @@ class IPNetwork(obj_fields.FieldType):
|
||||
def from_primitive(obj, attr, value):
|
||||
try:
|
||||
return net_utils.AuthenticIPNetwork(value)
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
msg = _("Field value %s is not a netaddr.IPNetwork") % value
|
||||
raise ValueError(msg)
|
||||
raise ValueError(msg) from e
|
||||
|
||||
|
||||
class IPNetworkField(obj_fields.AutoTypedField):
|
||||
|
@ -37,7 +37,7 @@ class NeutronDbObjectDuplicateEntry(exceptions.Conflict):
|
||||
"for attribute(s) %(attributes)s with value(s) %(values)s")
|
||||
|
||||
def __init__(self, object_class, db_exception):
|
||||
super(NeutronDbObjectDuplicateEntry, self).__init__(
|
||||
super().__init__(
|
||||
object_type=reflection.get_class_name(object_class,
|
||||
fully_qualified=False),
|
||||
attributes=db_exception.columns,
|
||||
@ -55,7 +55,7 @@ class NeutronPrimaryKeyMissing(exceptions.BadRequest):
|
||||
"%(missing_keys)s")
|
||||
|
||||
def __init__(self, object_class, missing_keys):
|
||||
super(NeutronPrimaryKeyMissing, self).__init__(
|
||||
super().__init__(
|
||||
object_type=reflection.get_class_name(object_class,
|
||||
fully_qualified=False),
|
||||
missing_keys=missing_keys
|
||||
|
@ -21,7 +21,7 @@ from neutron_lib.services.logapi import constants as log_const
|
||||
class SecurityEvent(obj_fields.String):
|
||||
def __init__(self, valid_values, **kwargs):
|
||||
self._valid_values = valid_values
|
||||
super(SecurityEvent, self).__init__(**kwargs)
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def coerce(self, obj, attr, value):
|
||||
if value not in self._valid_values:
|
||||
@ -31,7 +31,7 @@ class SecurityEvent(obj_fields.String):
|
||||
{'value': value, 'values': self._valid_values}
|
||||
)
|
||||
raise ValueError(msg)
|
||||
return super(SecurityEvent, self).coerce(obj, attr, value)
|
||||
return super().coerce(obj, attr, value)
|
||||
|
||||
|
||||
class SecurityEventField(obj_fields.AutoTypedField):
|
||||
|
@ -50,7 +50,7 @@ class StringMatchingFilterObj(FilterObj):
|
||||
class StringContains(StringMatchingFilterObj):
|
||||
|
||||
def __init__(self, matching_string):
|
||||
super(StringContains, self).__init__()
|
||||
super().__init__()
|
||||
self.contains = matching_string
|
||||
|
||||
def filter(self, column):
|
||||
@ -60,7 +60,7 @@ class StringContains(StringMatchingFilterObj):
|
||||
class StringStarts(StringMatchingFilterObj):
|
||||
|
||||
def __init__(self, matching_string):
|
||||
super(StringStarts, self).__init__()
|
||||
super().__init__()
|
||||
self.starts = matching_string
|
||||
|
||||
def filter(self, column):
|
||||
@ -70,7 +70,7 @@ class StringStarts(StringMatchingFilterObj):
|
||||
class StringEnds(StringMatchingFilterObj):
|
||||
|
||||
def __init__(self, matching_string):
|
||||
super(StringEnds, self).__init__()
|
||||
super().__init__()
|
||||
self.ends = matching_string
|
||||
|
||||
def filter(self, column):
|
||||
@ -80,7 +80,7 @@ class StringEnds(StringMatchingFilterObj):
|
||||
class NotIn(FilterObj):
|
||||
|
||||
def __init__(self, value):
|
||||
super(NotIn, self).__init__()
|
||||
super().__init__()
|
||||
self.value = value
|
||||
|
||||
def filter(self, column):
|
||||
@ -90,7 +90,7 @@ class NotIn(FilterObj):
|
||||
class NotEqual(FilterObj):
|
||||
|
||||
def __init__(self, value):
|
||||
super(NotEqual, self).__init__()
|
||||
super().__init__()
|
||||
self.value = value
|
||||
|
||||
def filter(self, column):
|
||||
|
@ -64,7 +64,6 @@ def _check_placement_api_available(f):
|
||||
# to avoid losing information.
|
||||
raise n_exc.PlacementClientError(
|
||||
msg=exc.response.text.replace('\n', ' '))
|
||||
else:
|
||||
raise
|
||||
return wrapper
|
||||
|
||||
@ -79,7 +78,7 @@ class UUIDEncoder(jsonutils.JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, uuid.UUID):
|
||||
return str(o)
|
||||
return super(UUIDEncoder, self).default(o)
|
||||
return super().default(o)
|
||||
|
||||
|
||||
class NoAuthClient(object):
|
||||
@ -233,7 +232,6 @@ class PlacementAPIClient(object):
|
||||
if e.response.json()[
|
||||
'errors'][0]['code'] == 'placement.concurrent_update':
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
|
||||
raise n_exc.PlacementResourceProviderGenerationConflict(
|
||||
@ -277,6 +275,7 @@ class PlacementAPIClient(object):
|
||||
the same name.
|
||||
:returns: The updated resource provider.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s' % resource_provider['uuid']
|
||||
# update does not tolerate if the uuid is repeated in the body
|
||||
update_body = resource_provider.copy()
|
||||
@ -325,6 +324,7 @@ class PlacementAPIClient(object):
|
||||
:raises PlacementResourceProviderNotFound: For failure to find resource
|
||||
:returns: The Resource Provider matching the UUID.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s' % resource_provider_uuid
|
||||
try:
|
||||
return self._get(url).json()
|
||||
@ -407,6 +407,7 @@ class PlacementAPIClient(object):
|
||||
server side.
|
||||
:returns: The updated set of inventory records.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s/inventories' % resource_provider_uuid
|
||||
body = {
|
||||
'resource_provider_generation': resource_provider_generation,
|
||||
@ -438,7 +439,6 @@ class PlacementAPIClient(object):
|
||||
if "No resource provider with uuid" in e.details:
|
||||
raise n_exc.PlacementResourceProviderNotFound(
|
||||
resource_provider=resource_provider_uuid)
|
||||
else:
|
||||
raise
|
||||
|
||||
@_check_placement_api_available
|
||||
@ -461,11 +461,10 @@ class PlacementAPIClient(object):
|
||||
if "No resource provider with uuid" in e.details:
|
||||
raise n_exc.PlacementResourceProviderNotFound(
|
||||
resource_provider=resource_provider_uuid)
|
||||
elif "No inventory of class" in e.details:
|
||||
if "No inventory of class" in e.details:
|
||||
raise n_exc.PlacementInventoryNotFound(
|
||||
resource_provider=resource_provider_uuid,
|
||||
resource_class=resource_class)
|
||||
else:
|
||||
raise
|
||||
|
||||
@_check_placement_api_available
|
||||
@ -489,11 +488,10 @@ class PlacementAPIClient(object):
|
||||
if "No resource provider with uuid" in e.details:
|
||||
raise n_exc.PlacementResourceProviderNotFound(
|
||||
resource_provider=resource_provider_uuid)
|
||||
elif _("No inventory of class") in e.details:
|
||||
if _("No inventory of class") in e.details:
|
||||
raise n_exc.PlacementInventoryNotFound(
|
||||
resource_provider=resource_provider_uuid,
|
||||
resource_class=resource_class)
|
||||
else:
|
||||
raise
|
||||
|
||||
@_check_placement_api_available
|
||||
@ -549,6 +547,7 @@ class PlacementAPIClient(object):
|
||||
:returns: The list of aggregates together with the resource provider
|
||||
generation.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s/aggregates' % resource_provider_uuid
|
||||
try:
|
||||
return self._get(url).json()
|
||||
@ -570,6 +569,7 @@ class PlacementAPIClient(object):
|
||||
:raises PlacementTraitNotFound: If the trait name not found.
|
||||
:returns: Evaluates to True if the trait exists.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/traits/%s' % name
|
||||
try:
|
||||
return self._get(url)
|
||||
@ -594,6 +594,7 @@ class PlacementAPIClient(object):
|
||||
:raises PlacementTraitNotFound: If the trait did not exist.
|
||||
:returns: None.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/traits/%s' % (name)
|
||||
try:
|
||||
self._delete(url)
|
||||
@ -626,6 +627,7 @@ class PlacementAPIClient(object):
|
||||
:returns: The new traits of the resource provider together with the
|
||||
resource provider generation.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s/traits' % (resource_provider_uuid)
|
||||
body = {
|
||||
'resource_provider_generation': resource_provider_generation,
|
||||
@ -653,6 +655,7 @@ class PlacementAPIClient(object):
|
||||
:returns: The associated traits of the resource provider together
|
||||
with the resource provider generation.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s/traits' % (resource_provider_uuid)
|
||||
try:
|
||||
return self._get(url).json()
|
||||
@ -670,6 +673,7 @@ class PlacementAPIClient(object):
|
||||
is not found.
|
||||
:returns: None.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_providers/%s/traits' % (resource_provider_uuid)
|
||||
try:
|
||||
self._delete(url)
|
||||
@ -692,6 +696,7 @@ class PlacementAPIClient(object):
|
||||
is not found.
|
||||
:returns: The name of resource class and its set of links.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_classes/%s' % (name)
|
||||
try:
|
||||
return self._get(url).json()
|
||||
@ -728,6 +733,7 @@ class PlacementAPIClient(object):
|
||||
is not found.
|
||||
:returns: None.
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
url = '/resource_classes/%s' % (name)
|
||||
try:
|
||||
self._delete(url)
|
||||
@ -776,7 +782,6 @@ class PlacementAPIClient(object):
|
||||
resp = e.response.json()
|
||||
if resp['errors'][0]['code'] == 'placement.concurrent_update':
|
||||
continue
|
||||
else:
|
||||
raise
|
||||
raise n_exc.PlacementAllocationGenerationConflict(
|
||||
consumer=consumer_uuid)
|
||||
|
@ -125,11 +125,11 @@ def _parse_bandwidth_value(bw_str):
|
||||
bw = int(bw_str)
|
||||
if bw < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
raise ValueError(_(
|
||||
'Cannot parse resource_provider_bandwidths. '
|
||||
'Expected: non-negative integer bandwidth value, got: %s') %
|
||||
bw_str)
|
||||
bw_str) from e
|
||||
return bw
|
||||
|
||||
|
||||
@ -160,10 +160,10 @@ def parse_rp_bandwidths(bandwidths):
|
||||
bandwidth += '::'
|
||||
try:
|
||||
device, egress_str, ingress_str = bandwidth.split(':')
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
raise ValueError(_(
|
||||
'Cannot parse resource_provider_bandwidths. '
|
||||
'Expected: DEVICE:EGRESS:INGRESS, got: %s') % bandwidth)
|
||||
'Expected: DEVICE:EGRESS:INGRESS, got: %s') % bandwidth) from e
|
||||
if device in rv:
|
||||
raise ValueError(_(
|
||||
'Cannot parse resource_provider_bandwidths. '
|
||||
@ -216,11 +216,11 @@ def parse_rp_inventory_defaults(inventory_defaults):
|
||||
inventory_defaults['allocation_ratio'])
|
||||
if inventory_defaults['allocation_ratio'] < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
raise ValueError(_(
|
||||
'Cannot parse inventory_defaults.allocation_ratio. '
|
||||
'Expected: non-negative float, got: %s') %
|
||||
inventory_defaults['allocation_ratio'])
|
||||
inventory_defaults['allocation_ratio']) from e
|
||||
|
||||
# the others are ints
|
||||
for key in ('min_unit', 'max_unit', 'reserved', 'step_size'):
|
||||
@ -229,12 +229,12 @@ def parse_rp_inventory_defaults(inventory_defaults):
|
||||
inventory_defaults[key] = int(inventory_defaults[key])
|
||||
if inventory_defaults[key] < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
raise ValueError(_(
|
||||
'Cannot parse inventory_defaults.%(key)s. '
|
||||
'Expected: non-negative int, got: %(got)s') % {
|
||||
'key': key,
|
||||
'got': inventory_defaults[key],
|
||||
})
|
||||
}) from e
|
||||
|
||||
return inventory_defaults
|
||||
|
@ -306,7 +306,7 @@ def _fixup_res_dict(context, attr_name, res_dict, check_allow_post=True):
|
||||
except web_exc.HTTPBadRequest as e:
|
||||
# convert webob exception into ValueError as these functions are
|
||||
# for internal use. webob exception doesn't make sense.
|
||||
raise ValueError(e.detail)
|
||||
raise ValueError(e.detail) from e
|
||||
attr_ops.fill_post_defaults(res_dict, check_allow_post=check_allow_post)
|
||||
attr_ops.convert_values(res_dict)
|
||||
return res_dict
|
||||
|
@ -189,7 +189,7 @@ class BackingOffClient(oslo_messaging.RPCClient):
|
||||
increase the timeout for the given call method.
|
||||
"""
|
||||
def prepare(self, *args, **kwargs):
|
||||
ctx = super(BackingOffClient, self).prepare(*args, **kwargs)
|
||||
ctx = super().prepare(*args, **kwargs)
|
||||
# don't back off contexts that explicitly set a timeout
|
||||
if 'timeout' in kwargs:
|
||||
return _ContextWrapper(ctx)
|
||||
@ -255,7 +255,7 @@ def get_notifier(service=None, host=None, publisher_id=None):
|
||||
class RequestContextSerializer(om_serializer.Serializer):
|
||||
"""Convert RPC common context into Neutron Context."""
|
||||
def __init__(self, base=None):
|
||||
super(RequestContextSerializer, self).__init__()
|
||||
super().__init__()
|
||||
self._base = base
|
||||
|
||||
def serialize_entity(self, ctxt, entity):
|
||||
@ -295,7 +295,7 @@ class Service(service.Service):
|
||||
A service enables rpc by listening to queues based on topic and host.
|
||||
"""
|
||||
def __init__(self, host, topic, manager=None, serializer=None):
|
||||
super(Service, self).__init__()
|
||||
super().__init__()
|
||||
self.host = host
|
||||
self.topic = topic
|
||||
self.serializer = serializer
|
||||
@ -305,7 +305,7 @@ class Service(service.Service):
|
||||
self.manager = manager
|
||||
|
||||
def start(self):
|
||||
super(Service, self).start()
|
||||
super().start()
|
||||
|
||||
self.conn = Connection()
|
||||
LOG.debug("Creating Consumer connection for Service %s",
|
||||
@ -330,14 +330,14 @@ class Service(service.Service):
|
||||
self.conn.close()
|
||||
except Exception: # nosec
|
||||
pass
|
||||
super(Service, self).stop()
|
||||
super().stop()
|
||||
|
||||
|
||||
class Connection(object):
|
||||
"""A utility class that manages a collection of RPC servers."""
|
||||
|
||||
def __init__(self):
|
||||
super(Connection, self).__init__()
|
||||
super().__init__()
|
||||
self.servers = []
|
||||
|
||||
def create_consumer(self, topic, endpoints, fanout=False):
|
||||
|
@ -70,7 +70,6 @@ def random_mac_generator(base_mac):
|
||||
mac = form.format(beginning, *numbers)
|
||||
if mac in seen:
|
||||
continue
|
||||
else:
|
||||
seen.add(mac)
|
||||
yield mac
|
||||
|
||||
@ -91,13 +90,13 @@ def is_port_trusted(port):
|
||||
|
||||
class _AuthenticBase(object):
|
||||
def __init__(self, addr, **kwargs):
|
||||
super(_AuthenticBase, self).__init__(addr, **kwargs)
|
||||
super().__init__(addr, **kwargs)
|
||||
self._initial_value = addr
|
||||
|
||||
def __str__(self):
|
||||
if isinstance(self._initial_value, str):
|
||||
return self._initial_value
|
||||
return super(_AuthenticBase, self).__str__()
|
||||
return super().__str__()
|
||||
|
||||
# NOTE(ihrachys): override deepcopy because netaddr.* classes are
|
||||
# slot-based and hence would not copy _initial_value
|
||||
|
@ -118,12 +118,12 @@ def load_class_by_alias_or_classname(namespace, name):
|
||||
# Fallback to class name
|
||||
try:
|
||||
class_to_load = importutils.import_class(name)
|
||||
except (ImportError, ValueError):
|
||||
except (ImportError, ValueError) as e:
|
||||
LOG.error("Error loading class by alias",
|
||||
exc_info=e1_info)
|
||||
LOG.error("Error loading class by class name",
|
||||
exc_info=True)
|
||||
raise ImportError(_("Class not found."))
|
||||
raise ImportError(_("Class not found.")) from e
|
||||
return class_to_load
|
||||
|
||||
|
||||
|
@ -8,6 +8,8 @@ bandit!=1.6.0,>=1.1.0 # Apache-2.0
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||
flake8-import-order==0.12 # LGPLv3
|
||||
pylint>=2.2.0 # GPLv2
|
||||
isort==4.3.21 # MIT
|
||||
python-subunit>=1.0.0 # Apache-2.0/BSD
|
||||
oslotest>=3.2.0 # Apache-2.0
|
||||
reno>=3.1.0 # Apache-2.0
|
||||
|
58
tools/coding-checks.sh
Executable file
58
tools/coding-checks.sh
Executable file
@ -0,0 +1,58 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -eu
|
||||
|
||||
usage () {
|
||||
echo "Usage: $0 [OPTION]..."
|
||||
echo "Run Neutron-lib's coding check(s)"
|
||||
echo ""
|
||||
echo " -Y, --pylint [<basecommit>] Run pylint check on the entire neutron_lib module or just files changed in basecommit (e.g. HEAD~1)"
|
||||
echo " -h, --help Print this usage message"
|
||||
echo
|
||||
exit 0
|
||||
}
|
||||
|
||||
process_options () {
|
||||
i=1
|
||||
while [ $i -le $# ]; do
|
||||
eval opt=\$$i
|
||||
case $opt in
|
||||
-h|--help) usage;;
|
||||
-Y|--pylint) pylint=1;;
|
||||
*) scriptargs="$scriptargs $opt"
|
||||
esac
|
||||
i=$((i+1))
|
||||
done
|
||||
}
|
||||
|
||||
run_pylint () {
|
||||
local target="${scriptargs:-all}"
|
||||
|
||||
if [ "$target" = "all" ]; then
|
||||
files="neutron_lib"
|
||||
else
|
||||
case "$target" in
|
||||
*HEAD~[0-9]*) files=$(git diff --diff-filter=AM --name-only $target -- "*.py");;
|
||||
*) echo "$target is an unrecognized basecommit"; exit 1;;
|
||||
esac
|
||||
fi
|
||||
|
||||
echo "Running pylint..."
|
||||
echo "You can speed this up by running it on 'HEAD~[0-9]' (e.g. HEAD~1, this change only)..."
|
||||
if [ -n "${files}" ]; then
|
||||
pylint --rcfile=.pylintrc --output-format=colorized ${files}
|
||||
else
|
||||
echo "No python changes in this commit, pylint check not required."
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
scriptargs=
|
||||
pylint=1
|
||||
|
||||
process_options $@
|
||||
|
||||
if [ $pylint -eq 1 ]; then
|
||||
run_pylint
|
||||
exit 0
|
||||
fi
|
2
tox.ini
2
tox.ini
@ -17,6 +17,7 @@ deps =
|
||||
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/master/upper-constraints.txt}
|
||||
-r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
whitelist_externals = sh
|
||||
commands =
|
||||
stestr run {posargs}
|
||||
|
||||
@ -24,6 +25,7 @@ commands =
|
||||
commands =
|
||||
flake8
|
||||
{toxinidir}/tools/check_samples.sh
|
||||
sh ./tools/coding-checks.sh --pylint '{posargs}'
|
||||
{[testenv:bandit]commands}
|
||||
|
||||
[testenv:releasenotes]
|
||||
|
Loading…
Reference in New Issue
Block a user