Enabled checks for E226, E251, E265
Thoas checks are also enabled in Horizon and should be enabled here as well. * E226 missing whitespace around arithmetic operator * E251 unexpected spaces around keyword / parameter equals * E265 block comment should start with '# ' Change-Id: Ib44fd1b327c23752acfc5b98cd468881b781c816
This commit is contained in:
parent
1bc3465a8b
commit
f3c15c6f54
|
@ -43,7 +43,7 @@ class CopyClusterTemplate(create_flow.ConfigureClusterTemplate):
|
|||
entry_point, *args,
|
||||
**kwargs)
|
||||
|
||||
#init Node Groups
|
||||
# init Node Groups
|
||||
|
||||
for step in self.steps:
|
||||
if isinstance(step, create_flow.ConfigureNodegroups):
|
||||
|
|
|
@ -282,7 +282,7 @@ class ConfigureClusterTemplate(whelpers.ServiceParametersWorkflow,
|
|||
plugin, hadoop_version = whelpers.\
|
||||
get_plugin_and_hadoop_version(request)
|
||||
|
||||
#TODO(nkonovalov): Fix client to support default_image_id
|
||||
# TODO(nkonovalov): Fix client to support default_image_id
|
||||
sahara.cluster_templates.create(
|
||||
context["general_cluster_template_name"],
|
||||
plugin,
|
||||
|
|
|
@ -120,7 +120,7 @@ class InstancesTable(tables.DataTable):
|
|||
|
||||
class Meta:
|
||||
name = "cluster_instances"
|
||||
#just ignoring the name
|
||||
# just ignoring the name
|
||||
verbose_name = _(" ")
|
||||
|
||||
|
||||
|
|
|
@ -197,7 +197,7 @@ class ConfigureCluster(whelpers.StatusFormatMixin, workflows.Workflow):
|
|||
def handle(self, request, context):
|
||||
try:
|
||||
sahara = saharaclient.client(request)
|
||||
#TODO(nkonovalov) Implement AJAX Node Groups
|
||||
# TODO(nkonovalov) Implement AJAX Node Groups
|
||||
node_groups = None
|
||||
|
||||
plugin, hadoop_version = whelpers.\
|
||||
|
|
|
@ -55,7 +55,7 @@ class ScaleCluster(cl_create_flow.ConfigureCluster,
|
|||
plugin = cluster.plugin_name
|
||||
hadoop_version = cluster.hadoop_version
|
||||
|
||||
#init deletable nodegroups
|
||||
# init deletable nodegroups
|
||||
deletable = dict()
|
||||
for group in cluster.node_groups:
|
||||
deletable[group["name"]] = "false"
|
||||
|
@ -70,7 +70,7 @@ class ScaleCluster(cl_create_flow.ConfigureCluster,
|
|||
entry_point, *args,
|
||||
**kwargs)
|
||||
|
||||
#init Node Groups
|
||||
# init Node Groups
|
||||
|
||||
for step in self.steps:
|
||||
if isinstance(step, clt_create_flow.ConfigureNodegroups):
|
||||
|
|
|
@ -70,7 +70,7 @@ class CopyNodegroupTemplate(create_flow.ConfigureNodegroupTemplate):
|
|||
hadoop_version)
|
||||
plugin_node_processes = plugin_details.node_processes
|
||||
for process in template.node_processes:
|
||||
#need to know the service
|
||||
# need to know the service
|
||||
_service = None
|
||||
for service, processes in plugin_node_processes.items():
|
||||
if process in processes:
|
||||
|
|
|
@ -414,7 +414,7 @@ class UITestCase(unittest2.TestCase):
|
|||
if not undelete_names and len(names) > 1:
|
||||
msg = "Success: Deleted Job binarie"
|
||||
|
||||
if undelete_names and len(names)-len(undelete_names) > 1:
|
||||
if undelete_names and len(names) - len(undelete_names) > 1:
|
||||
info_msg = 'Info: Deleted Job binarie'
|
||||
|
||||
if undelete_names and len(undelete_names) > 1:
|
||||
|
@ -456,7 +456,7 @@ class UITestCase(unittest2.TestCase):
|
|||
message = 'Success: Deleted Job execution'
|
||||
actual_message = self.find_alert_message(
|
||||
"alert-success", first_character=2,
|
||||
last_character=len(message)+2)
|
||||
last_character=len(message) + 2)
|
||||
self.assertEqual(actual_message, message)
|
||||
|
||||
def unregister_images(self, names, undelete_names=[],
|
||||
|
@ -467,7 +467,7 @@ class UITestCase(unittest2.TestCase):
|
|||
self.delete_and_validate(url, delete_button_id, names, undelete_names,
|
||||
finally_delete, succes_msg=msg,)
|
||||
|
||||
#-------------------------helpers_methods--------------------------------------
|
||||
# -------------------------helpers_methods-------------------------------------
|
||||
|
||||
@staticmethod
|
||||
def connect_to_swift():
|
||||
|
@ -627,7 +627,7 @@ class UITestCase(unittest2.TestCase):
|
|||
driver.find_element_by_id(
|
||||
"image_registry__action_register").click()
|
||||
else:
|
||||
#Add existing tags in the list
|
||||
# Add existing tags in the list
|
||||
list_for_check_tags = driver.\
|
||||
find_element(by=by.By.LINK_TEXT, value=image_name).\
|
||||
find_element_by_xpath('../../td[3]').text.split('\n')
|
||||
|
@ -669,7 +669,7 @@ class UITestCase(unittest2.TestCase):
|
|||
self.fail("Tag:%s, %s is unknown" % (first, second))
|
||||
if tags_to_remove:
|
||||
for tag in tags_to_remove:
|
||||
#click "x" in tag
|
||||
# click "x" in tag
|
||||
driver.find_element_by_xpath(
|
||||
"//div[@id='image_tags_list']//span[contains(.,'%s')]//i"
|
||||
% tag).click()
|
||||
|
@ -709,10 +709,10 @@ class UITestCase(unittest2.TestCase):
|
|||
def check_alert(self, alert, expected_message, list_obj, deleted=True):
|
||||
self.await_element(by.By.CLASS_NAME, alert)
|
||||
actual_message = self.find_alert_message(
|
||||
alert, first_character=2, last_character=len(expected_message)+2)
|
||||
alert, first_character=2, last_character=len(expected_message) + 2)
|
||||
self.assertEqual(actual_message, expected_message)
|
||||
not_expected_objs = list(set(self.find_alert_message(
|
||||
alert, first_character=len(expected_message)+2).split(
|
||||
alert, first_character=len(expected_message) + 2).split(
|
||||
", ")).symmetric_difference(set(list_obj)))
|
||||
if not_expected_objs:
|
||||
self.fail("have deleted objects: %s" % not_expected_objs)
|
||||
|
|
|
@ -38,8 +38,8 @@ class UINegativeCreateClusterTemplateTest(base.UITestCase):
|
|||
{"HDFS Parameters:io.file.buffer.size": "str"},
|
||||
{"MapReduce Parameters:mapreduce.job.counters.max":
|
||||
"str"}],
|
||||
positive=False, close_window=False, message=
|
||||
'Details, HDFS Parameters, MapReduce Parameters, '
|
||||
positive=False, close_window=False,
|
||||
message='Details, HDFS Parameters, MapReduce Parameters, '
|
||||
'Template Name:This field is required., '
|
||||
'HDFS Parameters:io.file.buffer.size:Enter a whole number., '
|
||||
'MapReduce Parameters:mapreduce.job.counters.max:'
|
||||
|
|
|
@ -35,10 +35,10 @@ class UICreateClusterTemplate(base.UITestCase):
|
|||
self.create_cluster_template(
|
||||
"selenium-clstr-tmpl", {'selenium-master': 1,
|
||||
'selenium-worker': 2},
|
||||
cfg.vanilla, anti_affinity_groups=["NN", "DN", "TT"], params=
|
||||
[{"General Parameters:Enable Swift": False},
|
||||
{"HDFS Parameters:dfs.replication": 2},
|
||||
{"MapReduce Parameters:mapred.output.compress": False}])
|
||||
cfg.vanilla, anti_affinity_groups=["NN", "DN", "TT"],
|
||||
params=[{"General Parameters:Enable Swift": False},
|
||||
{"HDFS Parameters:dfs.replication": 2},
|
||||
{"MapReduce Parameters:mapred.output.compress": False}])
|
||||
msg = 'Error: Cluster template with name \'selenium-clstr-tmpl\'' \
|
||||
' already exists'
|
||||
self.create_cluster_template('selenium-clstr-tmpl',
|
||||
|
|
|
@ -61,8 +61,8 @@ class UICreateCluster(base.UITestCase):
|
|||
{'selenium-master': 1,
|
||||
'selenium-del2': 2},
|
||||
cfg.vanilla,
|
||||
anti_affinity_groups=
|
||||
["NN", "DN", "TT", "JT"])
|
||||
anti_affinity_groups=["NN", "DN",
|
||||
"TT", "JT"])
|
||||
self.create_cluster('selenium-cl', 'selenium-cl-tmpl',
|
||||
cfg.vanilla, await_run=await_run)
|
||||
if not cfg.vanilla.skip_edp_test:
|
||||
|
|
|
@ -114,17 +114,18 @@ HdpGroup = [
|
|||
default='1.3.2',
|
||||
help="hadoop version for plugin"),
|
||||
cfg.DictOpt('processes',
|
||||
default=
|
||||
{"NN": 0, "DN": 1, "SNN": 2, "HDFS_CLIENT": 3,
|
||||
" ZOOKEEPER_SERVER": 4, "ZOOKEEPER_CLIENT": 5,
|
||||
"AMBARI_SERVER": 6, "HCAT": 7, "SQOOP": 8,
|
||||
"JT": 9, "TT": 10, "MAPREDUCE_CLIENT": 11, "HIVE_SERVER": 12,
|
||||
"HIVE_METASTORE": 13, "HIVE_CLIENT": 14, "MYSQL_SERVER": 15,
|
||||
"PIG": 16, "WEBHCAT_SERVER": 17, "OOZIE_SERVER": 18,
|
||||
"OOZIE_CLIENT": 19, "GANGLIA_SERVER": 20, "NAGIOS_SERVER": 21,
|
||||
"HBASE_MASTER": 22, "HBASE_REGIONSERVER": 23,
|
||||
"HBASE_CLIENT": 24},
|
||||
help='numbers of processes for hdp in saharadashboard'),
|
||||
default={
|
||||
"NN": 0, "DN": 1, "SNN": 2, "HDFS_CLIENT": 3,
|
||||
" ZOOKEEPER_SERVER": 4, "ZOOKEEPER_CLIENT": 5,
|
||||
"AMBARI_SERVER": 6, "HCAT": 7, "SQOOP": 8,
|
||||
"JT": 9, "TT": 10, "MAPREDUCE_CLIENT": 11,
|
||||
"HIVE_SERVER": 12, "HIVE_METASTORE": 13,
|
||||
"HIVE_CLIENT": 14, "MYSQL_SERVER": 15,
|
||||
"PIG": 16, "WEBHCAT_SERVER": 17, "OOZIE_SERVER": 18,
|
||||
"OOZIE_CLIENT": 19, "GANGLIA_SERVER": 20,
|
||||
"NAGIOS_SERVER": 21, "HBASE_MASTER": 22,
|
||||
"HBASE_REGIONSERVER": 23, "HBASE_CLIENT": 24
|
||||
}, help='numbers of processes for hdp in saharadashboard'),
|
||||
cfg.StrOpt('base_image',
|
||||
default='ib-centos-6-4-64-hdp-13',
|
||||
help="image name for start hdp cluster")
|
||||
|
|
5
tox.ini
5
tox.ini
|
@ -28,11 +28,8 @@ commands = flake8
|
|||
commands = {posargs}
|
||||
|
||||
[flake8]
|
||||
# E226 missing whitespace around arithmetic operator
|
||||
# E251 unexpected spaces around keyword / parameter equals
|
||||
# E265 block comment should start with '# '
|
||||
# H904 Wrap long lines in parentheses instead of a backslash
|
||||
ignore = E226,E251,E265,H904
|
||||
ignore = H904
|
||||
show-source = true
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,tools,horizon
|
||||
|
|
Loading…
Reference in New Issue