Ensure default arguments are not mutable

* add hacking check: no_mutable_default_args()
* remove the mutable default arguments "[]" or "{}" when the function
  is defined.

ref: http://docs.python-guide.org/en/latest/writing/gotchas/

Closes-Bug: #1530282
Change-Id: Ice6f7654211b73d7f8bc3ca0e4dfae3dca354397
This commit is contained in:
Javeme 2015-12-29 20:37:16 +08:00
parent a573e6d8a1
commit 3cb08bc52b
24 changed files with 87 additions and 34 deletions

View File

@ -26,6 +26,7 @@ Imports
Dictionaries/Lists
------------------
- [S360] Ensure default arguments are not mutable.
- [S368] Must use a dict comprehension instead of a dict constructor with a
sequence of key-value pairs. For more information, please refer to
http://legacy.python.org/dev/peps/pep-0274/

View File

@ -114,7 +114,8 @@ def check_plugin_name_and_version(template, plugin_names, plugin_versions):
# that the node group template usage checks there can be reused
# without incurring unnecessary dependencies
def check_node_group_template_usage(node_group_template_id,
cluster_list, cluster_template_list=[]):
cluster_list, cluster_template_list=None):
cluster_template_list = cluster_template_list or []
cluster_users = []
template_users = []

View File

@ -182,7 +182,7 @@ class AmbariPluginProvider(p.ProvisioningPluginBase):
return edp_engine.EDPOozieEngine(cluster)
return None
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for version in self.get_versions():
if not versions or version in versions:

View File

@ -71,7 +71,7 @@ class CDHPluginProvider(p.ProvisioningPluginBase):
return self._get_version_handler(
cluster.hadoop_version).get_edp_engine(cluster, job_type)
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.version_factory.get_versions():
if not versions or vers in versions:

View File

@ -94,7 +94,7 @@ class FakePluginProvider(p.ProvisioningPluginBase):
def get_edp_engine(self, cluster, job_type):
return edp_engine.FakeJobEngine()
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.get_versions():
if not versions or vers in versions:

View File

@ -349,7 +349,7 @@ class AmbariPlugin(p.ProvisioningPluginBase):
self.version_factory.get_version_handler(cluster.hadoop_version))
return version_handler.get_edp_engine(cluster, job_type)
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.version_factory.get_versions():
if not versions or vers in versions:

View File

@ -68,7 +68,7 @@ class MapRPlugin(p.ProvisioningPluginBase):
v_handler = self._get_handler(cluster.hadoop_version)
return v_handler.get_edp_engine(cluster, job_type)
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.get_versions():
if not versions or vers in versions:

View File

@ -66,7 +66,7 @@ class ProvisioningPluginBase(plugins_base.PluginInterface):
pass
@plugins_base.optional
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
return {}
@plugins_base.optional

View File

@ -502,7 +502,7 @@ class SparkProvider(p.ProvisioningPluginBase):
return None
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.get_versions():
if not versions or vers in versions:

View File

@ -108,7 +108,7 @@ class StormProvider(p.ProvisioningPluginBase):
return None
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.get_versions():
if not versions or vers in versions:

View File

@ -72,7 +72,7 @@ class VanillaProvider(p.ProvisioningPluginBase):
return self._get_version_handler(
cluster.hadoop_version).get_edp_engine(cluster, job_type)
def get_edp_job_types(self, versions=[]):
def get_edp_job_types(self, versions=None):
res = {}
for vers in self.version_factory.get_versions():
if not versions or vers in versions:

View File

@ -24,9 +24,14 @@ class HiveWorkflowCreator(base_workflow.OozieWorkflowCreator):
hive_elem = self.doc.getElementsByTagName('hive')[0]
hive_elem.setAttribute('xmlns', 'uri:oozie:hive-action:0.2')
def build_workflow_xml(self, script, job_xml, prepare={},
configuration=None, params={},
files=[], archives=[]):
def build_workflow_xml(self, script, job_xml, prepare=None,
configuration=None, params=None,
files=None, archives=None):
prepare = prepare or {}
params = params or {}
files = files or []
archives = archives or []
for k in sorted(prepare):
self._add_to_prepare_element(k, prepare[k])

View File

@ -23,12 +23,18 @@ class JavaWorkflowCreator(base_workflow.OozieWorkflowCreator):
super(JavaWorkflowCreator, self).__init__('java')
def build_workflow_xml(self, main_class,
prepare={},
prepare=None,
job_xml=None,
configuration=None,
java_opts=None,
arguments=[],
files=[], archives=[]):
arguments=None,
files=None,
archives=None):
prepare = prepare or {}
arguments = arguments or []
files = files or []
archives = archives or []
for k in sorted(prepare):
self._add_to_prepare_element(k, prepare[k])

View File

@ -21,10 +21,15 @@ class MapReduceWorkFlowCreator(base_workflow.OozieWorkflowCreator):
def __init__(self):
super(MapReduceWorkFlowCreator, self).__init__('map-reduce')
def build_workflow_xml(self, prepare={},
def build_workflow_xml(self, prepare=None,
job_xml=None, configuration=None,
files=[], archives=[],
streaming={}):
files=None, archives=None,
streaming=None):
prepare = prepare or {}
files = files or []
archives = archives or []
streaming = streaming or {}
for k in sorted(prepare):
self._add_to_prepare_element(k, prepare[k])

View File

@ -22,9 +22,15 @@ class PigWorkflowCreator(base_workflow.OozieWorkflowCreator):
def __init__(self):
super(PigWorkflowCreator, self).__init__('pig')
def build_workflow_xml(self, script_name, prepare={},
job_xml=None, configuration=None, params={},
arguments=[], files=[], archives=[]):
def build_workflow_xml(self, script_name, prepare=None,
job_xml=None, configuration=None, params=None,
arguments=None, files=None, archives=None):
prepare = prepare or {}
params = params or {}
arguments = arguments or []
files = files or []
archives = archives or []
for k in sorted(prepare):
self._add_to_prepare_element(k, prepare[k])

View File

@ -24,11 +24,16 @@ class ShellWorkflowCreator(base_workflow.OozieWorkflowCreator):
def __init__(self):
super(ShellWorkflowCreator, self).__init__('shell')
def build_workflow_xml(self, script_name, prepare={},
job_xml=None, configuration=None, env_vars={},
arguments=[], files=[]):
def build_workflow_xml(self, script_name, prepare=None,
job_xml=None, configuration=None, env_vars=None,
arguments=None, files=None):
x.add_attributes_to_element(self.doc, self.tag_name, self.SHELL_XMLNS)
prepare = prepare or {}
env_vars = env_vars or {}
arguments = arguments or []
files = files or []
for k in sorted(prepare):
self._add_to_prepare_element(k, prepare[k])

View File

@ -22,8 +22,8 @@ class Command(object):
class Config(object):
def __init__(self, option_values={}):
self.command = Command(option_values)
def __init__(self, option_values=None):
self.command = Command(option_values or {})
class Logger(object):

View File

@ -22,7 +22,8 @@ from sahara.tests.unit.db.templates import common as c
class Config(c.Config):
def __init__(self, option_values={}):
def __init__(self, option_values=None):
option_values = option_values or {}
if "name" not in option_values:
option_values["name"] = "delete"
super(Config, self).__init__(option_values)

View File

@ -78,7 +78,8 @@ worker_json = {
class Config(c.Config):
def __init__(self, option_values={}):
def __init__(self, option_values=None):
option_values = option_values or {}
if "name" not in option_values:
option_values["name"] = "update"
super(Config, self).__init__(option_values)

View File

@ -26,9 +26,9 @@ class FakeNGT(object):
class FakeCluster(object):
def __init__(self, name, node_groups=[], cluster_template_id=None):
def __init__(self, name, node_groups=None, cluster_template_id=None):
self.name = name
self.node_groups = node_groups
self.node_groups = node_groups or []
self.cluster_template_id = cluster_template_id

View File

@ -43,13 +43,13 @@ class BaseTestClusterTemplate(base.SaharaWithDbTestCase):
image_username='root', volume_type=volume_type)
return ng1, ng2
def _make_cluster(self, mng_network, ng1, ng2, anti_affinity=[]):
def _make_cluster(self, mng_network, ng1, ng2, anti_affinity=None):
return tu.create_cluster("cluster", "tenant1", "general",
"2.6.0", [ng1, ng2],
user_keypair_id='user_key',
neutron_management_network=mng_network,
default_image_id='1', image_id=None,
anti_affinity=anti_affinity)
anti_affinity=anti_affinity or [])
class TestClusterTemplate(BaseTestClusterTemplate):

View File

@ -64,3 +64,11 @@ class HackingTestCase(testtools.TestCase):
"import json", "path"))))
self.assertEqual(1, len(list(checks.use_jsonutils(
"import json as jsonutils", "path"))))
def test_no_mutable_default_args(self):
self.assertEqual(0, len(list(checks.no_mutable_default_args(
"def foo (bar):"))))
self.assertEqual(1, len(list(checks.no_mutable_default_args(
"def foo (bar=[]):"))))
self.assertEqual(1, len(list(checks.no_mutable_default_args(
"def foo (bar={}):"))))

View File

@ -117,6 +117,17 @@ def use_jsonutils(logical_line, filename):
" of json")
def no_mutable_default_args(logical_line):
"""Check to prevent mutable default argument in sahara code.
S360
"""
msg = "S360: Method's default argument shouldn't be mutable!"
mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
if mutable_default_args.match(logical_line):
yield (0, msg)
def factory(register):
register(import_db_only_in_conductor)
register(hacking_no_author_attr)
@ -130,3 +141,4 @@ def factory(register):
register(logging_checks.no_translate_debug_logs)
register(logging_checks.accepted_log_levels)
register(use_jsonutils)
register(no_mutable_default_args)

View File

@ -39,7 +39,9 @@ def start_subprocess():
stderr=subprocess.PIPE)
def run_in_subprocess(proc, func, args=(), kwargs={}, interactive=False):
def run_in_subprocess(proc, func, args=None, kwargs=None, interactive=False):
args = args or ()
kwargs = kwargs or {}
try:
pickle.dump(func, proc.stdin)
pickle.dump(args, proc.stdin)