Merge "Leverage dict comprehension in PEP-0274"
This commit is contained in:
commit
c21810df77
@ -22,3 +22,10 @@ readable. Follow these guidelines:
|
||||
Imports
|
||||
-------
|
||||
- [S366, S367] Organize your imports according to the ``Import order``
|
||||
|
||||
Dictionaries/Lists
|
||||
------------------
|
||||
|
||||
- [S368] Must use a dict comprehension instead of a dict constructor with a
|
||||
sequence of key-value pairs. For more information, please refer to
|
||||
http://legacy.python.org/dev/peps/pep-0274/
|
||||
|
@ -171,7 +171,7 @@ class ClouderaUtilsV5(cu.ClouderaUtils):
|
||||
core_site_safety_valve = ''
|
||||
if self.pu.c_helper.is_swift_enabled(cluster):
|
||||
configs = swift_helper.get_swift_configs()
|
||||
confs = dict((c['name'], c['value']) for c in configs)
|
||||
confs = {c['name']: c['value'] for c in configs}
|
||||
core_site_safety_valve = xmlutils.create_elements_xml(confs)
|
||||
all_confs = {
|
||||
'HDFS': {
|
||||
|
@ -238,7 +238,7 @@ class ClouderaUtilsV530(cu.ClouderaUtils):
|
||||
core_site_safety_valve = ''
|
||||
if self.pu.c_helper.is_swift_enabled(cluster):
|
||||
configs = swift_helper.get_swift_configs()
|
||||
confs = dict((c['name'], c['value']) for c in configs)
|
||||
confs = {c['name']: c['value'] for c in configs}
|
||||
core_site_safety_valve = xmlutils.create_elements_xml(confs)
|
||||
all_confs = {
|
||||
'HDFS': {
|
||||
|
@ -464,7 +464,7 @@ def generate_job_cleanup_config(cluster):
|
||||
|
||||
|
||||
def extract_name_values(configs):
|
||||
return dict((cfg['name'], cfg['value']) for cfg in configs)
|
||||
return {cfg['name']: cfg['value'] for cfg in configs}
|
||||
|
||||
|
||||
def make_hadoop_path(base_dirs, suffix):
|
||||
|
@ -132,7 +132,7 @@ def generate_storm_setup_script(env_configs):
|
||||
|
||||
|
||||
def extract_name_values(configs):
|
||||
return dict((cfg['name'], cfg['value']) for cfg in configs)
|
||||
return {cfg['name']: cfg['value'] for cfg in configs}
|
||||
|
||||
|
||||
def _set_config(cfg, gen_cfg, name=None):
|
||||
|
@ -450,7 +450,7 @@ def generate_setup_script(storage_paths, env_configs, append_oozie=False):
|
||||
|
||||
|
||||
def extract_name_values(configs):
|
||||
return dict((cfg['name'], cfg['value']) for cfg in configs)
|
||||
return {cfg['name']: cfg['value'] for cfg in configs}
|
||||
|
||||
|
||||
def extract_hadoop_path(lst, hadoop_dir):
|
||||
|
@ -185,8 +185,8 @@ class MapReduceFactory(BaseFactory):
|
||||
|
||||
def _get_streaming(self, job_dict):
|
||||
prefix = 'edp.streaming.'
|
||||
return dict((k[len(prefix):], v) for (k, v) in six.iteritems(
|
||||
job_dict['edp_configs']) if k.startswith(prefix))
|
||||
return {k[len(prefix):]: v for (k, v) in six.iteritems(
|
||||
job_dict['edp_configs']) if k.startswith(prefix)}
|
||||
|
||||
def get_workflow_xml(self, cluster, job_configs, input_data, output_data,
|
||||
hdfs_user):
|
||||
|
51
sahara/tests/unit/utils/test_hacking.py
Normal file
51
sahara/tests/unit/utils/test_hacking.py
Normal file
@ -0,0 +1,51 @@
|
||||
# Copyright 2015 EasyStack Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import testtools
|
||||
|
||||
from sahara.utils.hacking import checks
|
||||
|
||||
|
||||
class HackingTestCase(testtools.TestCase):
|
||||
def test_dict_constructor_with_list_copy(self):
|
||||
# Following checks for code-lines with pep8 error
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([(i, connect_info[i])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" attrs = dict([(k, _from_json(v))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" type_names = dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
"foo(param=dict((k, v) for k, v in bar.items()))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([[i,i] for i in range(3)])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dd = dict([i,i] for i in range(3))"))))
|
||||
# Following checks for ok code-lines
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict()"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" create_kwargs = dict(snapshot=snapshot,"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" self._render_dict(xml, data_el, data.__dict__)"))))
|
@ -84,6 +84,18 @@ def check_oslo_namespace_imports(logical_line):
|
||||
logical_line))
|
||||
|
||||
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
"""Check to prevent dict constructor with a sequence of key-value pairs.
|
||||
|
||||
S368
|
||||
"""
|
||||
dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)")
|
||||
|
||||
if dict_constructor_with_list_copy_re.match(logical_line):
|
||||
yield (0, 'S368: Must use a dict comprehension instead of a dict '
|
||||
'constructor with a sequence of key-value pairs.')
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(import_db_only_in_conductor)
|
||||
register(hacking_no_author_attr)
|
||||
@ -92,3 +104,4 @@ def factory(register):
|
||||
register(commit_message.OnceGitCheckCommitTitleLength)
|
||||
register(import_checks.hacking_import_groups)
|
||||
register(import_checks.hacking_import_groups_together)
|
||||
register(dict_constructor_with_list_copy)
|
||||
|
@ -106,7 +106,7 @@ class SaharaImageManager(images.ImageManager):
|
||||
"""Adds tags to the specified image."""
|
||||
tags = _ensure_tags(tags)
|
||||
|
||||
self.set_meta(image, dict((PROP_TAG + tag, True) for tag in tags))
|
||||
self.set_meta(image, {PROP_TAG + tag: True for tag in tags})
|
||||
|
||||
def untag(self, image, tags):
|
||||
"""Removes tags from the specified image."""
|
||||
|
@ -71,7 +71,7 @@ def get_images():
|
||||
|
||||
def get_limits():
|
||||
limits = client().limits.get().absolute
|
||||
return dict((l.name, l.value) for l in limits)
|
||||
return {l.name: l.value for l in limits}
|
||||
|
||||
|
||||
def get_user_keypair(cluster):
|
||||
|
@ -43,8 +43,8 @@ class BaseResource(object):
|
||||
|
||||
def to_dict(self):
|
||||
dictionary = self.__dict__.copy()
|
||||
return dict([(k, v) for k, v in dictionary.iteritems()
|
||||
if not self._filter_field(k)])
|
||||
return {k: v for k, v in dictionary.iteritems()
|
||||
if not self._filter_field(k)}
|
||||
|
||||
def as_resource(self):
|
||||
return Resource(self.__resource_name__, self.to_dict())
|
||||
|
Loading…
Reference in New Issue
Block a user