Pylint: Enable Check for Dangerous Default Value
Pylint currently ignores the following error: W0102: dangerous-default-value When a mutable object is assigned as a default value Each call will append to its value permanently Enable W0102 to avoid errors Change-Id: If1ab0a7ced03990e851932faeae14846dc1fb03b Story: 2004515 Task: 28268 Signed-off-by: Eric Barrett <eric.barrett@windriver.com>
This commit is contained in:
parent
c6a18c4833
commit
81ff1b841f
@ -34,7 +34,6 @@ load-plugins=
|
|||||||
# The following warnings should be fixed:
|
# The following warnings should be fixed:
|
||||||
# fixme (todo, xxx, fixme)
|
# fixme (todo, xxx, fixme)
|
||||||
# W0101: unreachable
|
# W0101: unreachable
|
||||||
# W0102: dangerous-default-value
|
|
||||||
# W0105: pointless-string-statement
|
# W0105: pointless-string-statement
|
||||||
# W0106: expression-not-assigned
|
# W0106: expression-not-assigned
|
||||||
# W0107: unnecessary-pass
|
# W0107: unnecessary-pass
|
||||||
@ -80,7 +79,7 @@ load-plugins=
|
|||||||
# E1124: redundant-keyword-arg
|
# E1124: redundant-keyword-arg
|
||||||
# E1136: unsubscriptable-object
|
# E1136: unsubscriptable-object
|
||||||
# E1205: logging-too-many-args
|
# E1205: logging-too-many-args
|
||||||
disable=C, R, fixme, W0101, W0102, W0105, W0106, W0107, W0108, W0110, W0123, W0150,
|
disable=C, R, fixme, W0101, W0105, W0106, W0107, W0108, W0110, W0123, W0150,
|
||||||
W0201, W0211, W0212, W0221, W0223, W0231, W0235, W0311, W0402, W0403, W0404,
|
W0201, W0211, W0212, W0221, W0223, W0231, W0235, W0311, W0402, W0403, W0404,
|
||||||
W0603, W0612, W0613, W0621, W0622, W0631, W0632, W0701, W0703,
|
W0603, W0612, W0613, W0621, W0622, W0631, W0632, W0701, W0703,
|
||||||
W1113, W1201, W1401, W1505,
|
W1113, W1201, W1401, W1505,
|
||||||
|
@ -336,9 +336,11 @@ class ClusterController(rest.RestController):
|
|||||||
|
|
||||||
@wsme_pecan.wsexpose(ClusterCollection, [Query], types.uuid, types.uuid,
|
@wsme_pecan.wsexpose(ClusterCollection, [Query], types.uuid, types.uuid,
|
||||||
int, wtypes.text, wtypes.text)
|
int, wtypes.text, wtypes.text)
|
||||||
def get_all(self, q=[], parent_uuid=None,
|
def get_all(self, q=None, parent_uuid=None,
|
||||||
marker=None, limit=None, sort_key='id', sort_dir='asc'):
|
marker=None, limit=None, sort_key='id', sort_dir='asc'):
|
||||||
"""Retrieve a list of Clusters."""
|
"""Retrieve a list of Clusters."""
|
||||||
|
if q is None:
|
||||||
|
q = []
|
||||||
return self._get_cluster_collection(parent_uuid, marker, limit,
|
return self._get_cluster_collection(parent_uuid, marker, limit,
|
||||||
sort_key, sort_dir, q=q)
|
sort_key, sort_dir, q=q)
|
||||||
|
|
||||||
|
@ -454,7 +454,9 @@ class KubeAppHelper(object):
|
|||||||
"state of the patch(es)."))
|
"state of the patch(es)."))
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _patch_report_app_dependencies(self, name, patches=[]):
|
def _patch_report_app_dependencies(self, name, patches=None):
|
||||||
|
if patches is None:
|
||||||
|
patches = []
|
||||||
try:
|
try:
|
||||||
system = self._dbapi.isystem_get_one()
|
system = self._dbapi.isystem_get_one()
|
||||||
patch_api.patch_report_app_dependencies(
|
patch_api.patch_report_app_dependencies(
|
||||||
|
@ -198,9 +198,11 @@ class ServiceParameterController(rest.RestController):
|
|||||||
@wsme_pecan.wsexpose(ServiceParameterCollection, [Query],
|
@wsme_pecan.wsexpose(ServiceParameterCollection, [Query],
|
||||||
types.uuid, wtypes.text,
|
types.uuid, wtypes.text,
|
||||||
wtypes.text, wtypes.text, wtypes.text)
|
wtypes.text, wtypes.text, wtypes.text)
|
||||||
def get_all(self, q=[], marker=None, limit=None,
|
def get_all(self, q=None, marker=None, limit=None,
|
||||||
sort_key='id', sort_dir='asc'):
|
sort_key='id', sort_dir='asc'):
|
||||||
"""Retrieve a list of service parameters."""
|
"""Retrieve a list of service parameters."""
|
||||||
|
if q is None:
|
||||||
|
q = []
|
||||||
sort_key = ['section', 'name']
|
sort_key = ['section', 'name']
|
||||||
return self._get_service_parameter_collection(marker, limit,
|
return self._get_service_parameter_collection(marker, limit,
|
||||||
sort_key,
|
sort_key,
|
||||||
|
@ -33,9 +33,11 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
|
|||||||
for public routes in the API.
|
for public routes in the API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, app, conf, public_api_routes=[]):
|
def __init__(self, app, conf, public_api_routes=None):
|
||||||
self._sysinv_app = app
|
self._sysinv_app = app
|
||||||
route_pattern_tpl = '%s(\.json|\.xml)?$'
|
route_pattern_tpl = '%s(\.json|\.xml)?$'
|
||||||
|
if public_api_routes is None:
|
||||||
|
public_api_routes = []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
|
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
|
||||||
|
@ -57,7 +57,7 @@ class ActivatedExtensionManager(enabled.EnabledExtensionManager):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, namespace, enabled_names, invoke_on_load=True,
|
def __init__(self, namespace, enabled_names, invoke_on_load=True,
|
||||||
invoke_args=(), invoke_kwds={}):
|
invoke_args=(), invoke_kwds=None):
|
||||||
|
|
||||||
def local_check_func(ext):
|
def local_check_func(ext):
|
||||||
return should_use_extension(namespace, ext, enabled_names)
|
return should_use_extension(namespace, ext, enabled_names)
|
||||||
@ -67,5 +67,5 @@ class ActivatedExtensionManager(enabled.EnabledExtensionManager):
|
|||||||
check_func=local_check_func,
|
check_func=local_check_func,
|
||||||
invoke_on_load=invoke_on_load,
|
invoke_on_load=invoke_on_load,
|
||||||
invoke_args=invoke_args,
|
invoke_args=invoke_args,
|
||||||
invoke_kwds=invoke_kwds,
|
invoke_kwds=invoke_kwds if invoke_kwds is not None else {},
|
||||||
)
|
)
|
||||||
|
@ -121,11 +121,13 @@ class Health(object):
|
|||||||
|
|
||||||
return success, allowed, affecting
|
return success, allowed, affecting
|
||||||
|
|
||||||
def get_alarms_degrade(self, context, alarm_ignore_list=[],
|
def get_alarms_degrade(self, context, alarm_ignore_list=None,
|
||||||
entity_instance_id_filter=""):
|
entity_instance_id_filter=""):
|
||||||
"""Return all the alarms that cause the degrade"""
|
"""Return all the alarms that cause the degrade"""
|
||||||
db_alarms = fmclient(context).alarm.list(include_suppress=True)
|
db_alarms = fmclient(context).alarm.list(include_suppress=True)
|
||||||
degrade_alarms = []
|
degrade_alarms = []
|
||||||
|
if alarm_ignore_list is None:
|
||||||
|
alarm_ignore_list = []
|
||||||
|
|
||||||
for db_alarm in db_alarms:
|
for db_alarm in db_alarms:
|
||||||
if isinstance(db_alarm, tuple):
|
if isinstance(db_alarm, tuple):
|
||||||
|
@ -55,7 +55,9 @@ class PeriodicService(rpc_service.Service, periodic_task.PeriodicTasks):
|
|||||||
context=admin_context)
|
context=admin_context)
|
||||||
|
|
||||||
|
|
||||||
def prepare_service(argv=[]):
|
def prepare_service(argv=None):
|
||||||
|
if argv is None:
|
||||||
|
argv = []
|
||||||
rpc.set_defaults(control_exchange='sysinv')
|
rpc.set_defaults(control_exchange='sysinv')
|
||||||
cfg.set_defaults(log.log_opts,
|
cfg.set_defaults(log.log_opts,
|
||||||
default_log_levels=['amqplib=WARN',
|
default_log_levels=['amqplib=WARN',
|
||||||
|
@ -2449,11 +2449,14 @@ class DockerHelper(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def make_armada_request(self, request, manifest_file='', overrides_str='',
|
def make_armada_request(self, request, manifest_file='', overrides_str='',
|
||||||
app_releases=[], logfile=None):
|
app_releases=None, logfile=None):
|
||||||
|
|
||||||
if logfile is None:
|
if logfile is None:
|
||||||
logfile = request + '.log'
|
logfile = request + '.log'
|
||||||
|
|
||||||
|
if app_releases is None:
|
||||||
|
app_releases = []
|
||||||
|
|
||||||
rc = True
|
rc = True
|
||||||
|
|
||||||
# Instruct Armada to use the tiller service since it does not properly
|
# Instruct Armada to use the tiller service since it does not properly
|
||||||
|
@ -732,7 +732,9 @@ class ConductorManager(service.PeriodicService):
|
|||||||
return line
|
return line
|
||||||
|
|
||||||
def _dnsmasq_addn_host_entry_to_string(self, ip_addr, hostname,
|
def _dnsmasq_addn_host_entry_to_string(self, ip_addr, hostname,
|
||||||
aliases=[]):
|
aliases=None):
|
||||||
|
if aliases is None:
|
||||||
|
aliases = []
|
||||||
line = "%s %s" % (ip_addr, hostname)
|
line = "%s %s" % (ip_addr, hostname)
|
||||||
for alias in aliases:
|
for alias in aliases:
|
||||||
line = "%s %s" % (line, alias)
|
line = "%s %s" % (line, alias)
|
||||||
@ -10073,7 +10075,7 @@ class ConductorManager(service.PeriodicService):
|
|||||||
"""
|
"""
|
||||||
return self._helm.get_helm_application_overrides(app_name, cnamespace)
|
return self._helm.get_helm_application_overrides(app_name, cnamespace)
|
||||||
|
|
||||||
def merge_overrides(self, context, file_overrides=[], set_overrides=[]):
|
def merge_overrides(self, context, file_overrides=None, set_overrides=None):
|
||||||
"""Merge the file and set overrides into a single chart overrides.
|
"""Merge the file and set overrides into a single chart overrides.
|
||||||
|
|
||||||
:param context: request context.
|
:param context: request context.
|
||||||
|
@ -1692,7 +1692,7 @@ class ConductorAPI(sysinv.openstack.common.rpc.proxy.RpcProxy):
|
|||||||
app_name=app_name,
|
app_name=app_name,
|
||||||
cnamespace=cnamespace))
|
cnamespace=cnamespace))
|
||||||
|
|
||||||
def merge_overrides(self, context, file_overrides=[], set_overrides=[]):
|
def merge_overrides(self, context, file_overrides=None, set_overrides=None):
|
||||||
"""Merge the file and set overrides into a single chart overrides.
|
"""Merge the file and set overrides into a single chart overrides.
|
||||||
|
|
||||||
:param context: request context.
|
:param context: request context.
|
||||||
@ -1701,6 +1701,10 @@ class ConductorAPI(sysinv.openstack.common.rpc.proxy.RpcProxy):
|
|||||||
:returns: merged overrides string
|
:returns: merged overrides string
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
if file_overrides is None:
|
||||||
|
file_overrides = []
|
||||||
|
if set_overrides is None:
|
||||||
|
set_overrides = []
|
||||||
return self.call(context,
|
return self.call(context,
|
||||||
self.make_msg('merge_overrides',
|
self.make_msg('merge_overrides',
|
||||||
file_overrides=file_overrides,
|
file_overrides=file_overrides,
|
||||||
|
@ -377,7 +377,7 @@ class HelmOperator(object):
|
|||||||
LOG.debug("Chart %s can be found in repo: %s" % (chart_name, repo))
|
LOG.debug("Chart %s can be found in repo: %s" % (chart_name, repo))
|
||||||
return metadata_name, repo, chart_tarfile
|
return metadata_name, repo, chart_tarfile
|
||||||
|
|
||||||
def merge_overrides(self, file_overrides=[], set_overrides=[]):
|
def merge_overrides(self, file_overrides=None, set_overrides=None):
|
||||||
""" Merge helm overrides together.
|
""" Merge helm overrides together.
|
||||||
|
|
||||||
:param values: A dict of different types of user override values,
|
:param values: A dict of different types of user override values,
|
||||||
@ -385,6 +385,10 @@ class HelmOperator(object):
|
|||||||
'set' (which generally specify one override).
|
'set' (which generally specify one override).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
if file_overrides is None:
|
||||||
|
file_overrides = []
|
||||||
|
if set_overrides is None:
|
||||||
|
set_overrides = []
|
||||||
# At this point we have potentially two separate types of overrides
|
# At this point we have potentially two separate types of overrides
|
||||||
# specified by system or user, values from files and values passed in
|
# specified by system or user, values from files and values passed in
|
||||||
# via --set . We need to ensure that we call helm using the same
|
# via --set . We need to ensure that we call helm using the same
|
||||||
|
@ -400,7 +400,7 @@ class OpenstackBaseHelm(base.BaseHelm):
|
|||||||
|
|
||||||
return newprivatekey, newpublickey
|
return newprivatekey, newpublickey
|
||||||
|
|
||||||
def _oslo_multistring_override(self, name=None, values=[]):
|
def _oslo_multistring_override(self, name=None, values=None):
|
||||||
"""
|
"""
|
||||||
Generate helm multistring dictionary override for specified option
|
Generate helm multistring dictionary override for specified option
|
||||||
name with multiple values.
|
name with multiple values.
|
||||||
|
@ -28,7 +28,10 @@ class DBError(Exception):
|
|||||||
|
|
||||||
class DBDuplicateEntry(DBError):
|
class DBDuplicateEntry(DBError):
|
||||||
"""Wraps an implementation specific exception."""
|
"""Wraps an implementation specific exception."""
|
||||||
def __init__(self, columns=[], inner_exception=None):
|
def __init__(self, columns=None, inner_exception=None):
|
||||||
|
if columns is None:
|
||||||
|
self.columns = []
|
||||||
|
else:
|
||||||
self.columns = columns
|
self.columns = columns
|
||||||
super(DBDuplicateEntry, self).__init__(inner_exception)
|
super(DBDuplicateEntry, self).__init__(inner_exception)
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ class CommandFilter(object):
|
|||||||
self.args = args
|
self.args = args
|
||||||
self.real_exec = None
|
self.real_exec = None
|
||||||
|
|
||||||
def get_exec(self, exec_dirs=[]):
|
def get_exec(self, exec_dirs=None):
|
||||||
"""Returns existing executable, or empty string if none found."""
|
"""Returns existing executable, or empty string if none found."""
|
||||||
if self.real_exec is not None:
|
if self.real_exec is not None:
|
||||||
return self.real_exec
|
return self.real_exec
|
||||||
@ -37,7 +37,7 @@ class CommandFilter(object):
|
|||||||
if self.exec_path.startswith('/'):
|
if self.exec_path.startswith('/'):
|
||||||
if os.access(self.exec_path, os.X_OK):
|
if os.access(self.exec_path, os.X_OK):
|
||||||
self.real_exec = self.exec_path
|
self.real_exec = self.exec_path
|
||||||
else:
|
elif exec_dirs is not None:
|
||||||
for binary_path in exec_dirs:
|
for binary_path in exec_dirs:
|
||||||
expanded_path = os.path.join(binary_path, self.exec_path)
|
expanded_path = os.path.join(binary_path, self.exec_path)
|
||||||
if os.access(expanded_path, os.X_OK):
|
if os.access(expanded_path, os.X_OK):
|
||||||
@ -49,7 +49,7 @@ class CommandFilter(object):
|
|||||||
"""Only check that the first argument (command) matches exec_path."""
|
"""Only check that the first argument (command) matches exec_path."""
|
||||||
return os.path.basename(self.exec_path) == userargs[0]
|
return os.path.basename(self.exec_path) == userargs[0]
|
||||||
|
|
||||||
def get_command(self, userargs, exec_dirs=[]):
|
def get_command(self, userargs, exec_dirs=None):
|
||||||
"""Returns command to execute (with sudo -u if run_as != root)."""
|
"""Returns command to execute (with sudo -u if run_as != root)."""
|
||||||
to_exec = self.get_exec(exec_dirs=exec_dirs) or self.exec_path
|
to_exec = self.get_exec(exec_dirs=exec_dirs) or self.exec_path
|
||||||
if (self.run_as != 'root'):
|
if (self.run_as != 'root'):
|
||||||
@ -122,7 +122,9 @@ class PathFilter(CommandFilter):
|
|||||||
args_equal_or_pass and
|
args_equal_or_pass and
|
||||||
paths_are_within_base_dirs)
|
paths_are_within_base_dirs)
|
||||||
|
|
||||||
def get_command(self, userargs, exec_dirs=[]):
|
def get_command(self, userargs, exec_dirs=None):
|
||||||
|
if exec_dirs is None:
|
||||||
|
exec_dirs = []
|
||||||
command, arguments = userargs[0], userargs[1:]
|
command, arguments = userargs[0], userargs[1:]
|
||||||
|
|
||||||
# convert path values to canonical ones; copy other args as is
|
# convert path values to canonical ones; copy other args as is
|
||||||
@ -146,7 +148,9 @@ class DnsmasqFilter(CommandFilter):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_command(self, userargs, exec_dirs=[]):
|
def get_command(self, userargs, exec_dirs=None):
|
||||||
|
if exec_dirs is None:
|
||||||
|
exec_dirs = []
|
||||||
to_exec = self.get_exec(exec_dirs=exec_dirs) or self.exec_path
|
to_exec = self.get_exec(exec_dirs=exec_dirs) or self.exec_path
|
||||||
dnsmasq_pos = userargs.index('dnsmasq')
|
dnsmasq_pos = userargs.index('dnsmasq')
|
||||||
return [to_exec] + userargs[dnsmasq_pos + 1:]
|
return [to_exec] + userargs[dnsmasq_pos + 1:]
|
||||||
|
@ -121,7 +121,7 @@ def load_filters(filters_path):
|
|||||||
return filterlist
|
return filterlist
|
||||||
|
|
||||||
|
|
||||||
def match_filter(filter_list, userargs, exec_dirs=[]):
|
def match_filter(filter_list, userargs, exec_dirs=None):
|
||||||
"""
|
"""
|
||||||
Checks user command and arguments through command filters and
|
Checks user command and arguments through command filters and
|
||||||
returns the first matching filter.
|
returns the first matching filter.
|
||||||
@ -130,6 +130,8 @@ def match_filter(filter_list, userargs, exec_dirs=[]):
|
|||||||
best filter match.
|
best filter match.
|
||||||
"""
|
"""
|
||||||
first_not_executable_filter = None
|
first_not_executable_filter = None
|
||||||
|
if exec_dirs is None:
|
||||||
|
exec_dirs = []
|
||||||
|
|
||||||
for f in filter_list:
|
for f in filter_list:
|
||||||
if f.match(userargs):
|
if f.match(userargs):
|
||||||
|
@ -68,9 +68,10 @@ def get_reqs_from_files(requirements_files):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
def parse_requirements(requirements_files=['requirements.txt',
|
def parse_requirements(requirements_files=None):
|
||||||
'tools/pip-requires']):
|
|
||||||
requirements = []
|
requirements = []
|
||||||
|
if requirements_files is None:
|
||||||
|
requirements_files = ['requirements.txt', 'tools/pip-requires']
|
||||||
for line in get_reqs_from_files(requirements_files):
|
for line in get_reqs_from_files(requirements_files):
|
||||||
# For the requirements list, we need to inject only the portion
|
# For the requirements list, we need to inject only the portion
|
||||||
# after egg= so that distutils knows the package it's looking for
|
# after egg= so that distutils knows the package it's looking for
|
||||||
@ -97,8 +98,9 @@ def parse_requirements(requirements_files=['requirements.txt',
|
|||||||
return requirements
|
return requirements
|
||||||
|
|
||||||
|
|
||||||
def parse_dependency_links(requirements_files=['requirements.txt',
|
def parse_dependency_links(requirements_files=None):
|
||||||
'tools/pip-requires']):
|
if requirements_files is None:
|
||||||
|
requirements_files = ['requirements.txt', 'tools/pip-requires']
|
||||||
dependency_links = []
|
dependency_links = []
|
||||||
# dependency_links inject alternate locations to find packages listed
|
# dependency_links inject alternate locations to find packages listed
|
||||||
# in requirements
|
# in requirements
|
||||||
|
Loading…
Reference in New Issue
Block a user