Enable: E128 continuation line under-indented for visual indent

Change-Id: I27ae9d6e0f3a132f1bdaa86a172adc366f451d09
Closes-Bug: #1398540
This commit is contained in:
Samta Rangare 2014-12-06 18:53:19 +05:30
parent 5ba1af5699
commit d4c76d451a
17 changed files with 76 additions and 80 deletions

View File

@ -96,10 +96,10 @@ class CeilometerDriver(DataSourceDriver):
{'fieldname': 'generated', 'translator': value_trans},
{'fieldname': 'traits', 'col': 'trait_id',
'translator': {'translation-type': 'VDICT',
'table-name': EVENT_TRAITS,
'id-col': 'trait_id',
'key-col': 'key', 'val-col': 'value',
'translator': value_trans}})}
'table-name': EVENT_TRAITS,
'id-col': 'trait_id',
'key-col': 'key', 'val-col': 'value',
'translator': value_trans}})}
def __init__(self, name='', keys='', inbox=None, datapath=None, args=None):
super(CeilometerDriver, self).__init__(name, keys, inbox,
@ -115,7 +115,7 @@ class CeilometerDriver(DataSourceDriver):
@classmethod
def get_translators(cls):
return (cls.meters_translator, cls.alarms_translator,
cls.events_translator)
cls.events_translator)
def update_from_datasource(self):
"""Read Data from Ceilometer datasource to fill

View File

@ -955,7 +955,7 @@ class DataSourceDriver(deepsix.deepSix):
else:
text = runtime.iterstr(result)
self.log("prepush_processor for <%s> returning with %s items",
dataindex, text)
dataindex, text)
return result
def d6run(self):

View File

@ -109,7 +109,7 @@ class SwiftDriver(DataSourceDriver):
generated from OBJ: CONTAINERS.
"""
row_data = SwiftDriver.convert_objs(obj,
self.containers_translator)
self.containers_translator)
container_tables = (self.CONTAINERS)
self.state[container_tables] = set()
@ -126,7 +126,7 @@ class SwiftDriver(DataSourceDriver):
generated from OBJ: OBJECTS.
"""
row_data = SwiftDriver.convert_objs(obj,
self.objects_translator)
self.objects_translator)
object_tables = (self.OBJECTS)
self.state[object_tables] = set()

View File

@ -492,7 +492,7 @@ class deepSix(greenthread.GreenThread):
def publish(self, dataindex, newdata, key=''):
self.log_debug("publishing to dataindex %s with data %s",
dataindex, newdata)
dataindex, newdata)
if dataindex not in self.pubdata:
self.pubdata[dataindex] = pubData(dataindex)

View File

@ -198,10 +198,10 @@ def load_data_service(service_name, config, cage, rootdir):
module_path = os.path.join(rootdir, module_path)
if module_name not in sys.modules:
LOG.info("Trying to create module %s from %s",
module_name, module_path)
module_name, module_path)
cage.loadModule(module_name, module_path)
LOG.info("Trying to create service %s with module %s",
service_name, module_name)
service_name, module_name)
cage.createservice(name=service_name, moduleName=module_name,
args=config)

View File

@ -74,7 +74,7 @@ class DseRuntime (runtime.Runtime, deepsix.deepSix):
def receive_data_full(self, msg):
"""Handler for when dataservice publishes full table."""
self.log("received full data msg for %s: %s",
msg.header['dataindex'], runtime.iterstr(msg.body.data))
msg.header['dataindex'], runtime.iterstr(msg.body.data))
literals = []
tablename = msg.header['dataindex']
service = msg.replyTo
@ -91,12 +91,12 @@ class DseRuntime (runtime.Runtime, deepsix.deepSix):
"Update not permitted." + '\n'.join(str(x) for x in changes))
else:
self.log("full data msg for %s caused %d changes: %s",
tablename, len(changes), runtime.iterstr(changes))
tablename, len(changes), runtime.iterstr(changes))
def receive_data_update(self, msg):
"""Handler for when dataservice publishes a delta."""
self.log("received update data msg for %s: %s",
msg.header['dataindex'], runtime.iterstr(msg.body.data))
msg.header['dataindex'], runtime.iterstr(msg.body.data))
events = msg.body.data
for event in events:
assert compile.is_atom(event.formula), \
@ -119,7 +119,7 @@ class DseRuntime (runtime.Runtime, deepsix.deepSix):
def receive_policy_update(self, msg):
self.log("received policy-update msg %s",
runtime.iterstr(msg.body.data))
runtime.iterstr(msg.body.data))
# update the policy and subscriptions to data tables.
self.last_policy_change = self.process_policy_update(msg.body.data)
@ -144,14 +144,14 @@ class DseRuntime (runtime.Runtime, deepsix.deepSix):
add = newtables - oldtables
rem = oldtables - newtables
self.log("Tables:: Old: %s, new: %s, add: %s, rem: %s",
oldtables, newtables, add, rem)
oldtables, newtables, add, rem)
# subscribe to the new tables (loading services as required)
for table in add:
if not self.reserved_tablename(table):
(service, tablename) = parse_tablename(table)
if service is not None:
self.log("Subscribing to new (service, table): (%s, %s)",
service, tablename)
service, tablename)
self.subscribe(service, tablename,
callback=self.receive_data)
@ -167,5 +167,5 @@ class DseRuntime (runtime.Runtime, deepsix.deepSix):
(service, tablename) = parse_tablename(table)
if service is not None:
self.log("Unsubscribing to new (service, table): (%s, %s)",
service, tablename)
service, tablename)
self.unsubscribe(service, tablename)

View File

@ -167,7 +167,8 @@ class iterstr(object):
def __getattribute__(self, name):
if self.__interpolated is None:
self.__interpolated = ("[" +
";".join([str(x) for x in self.__iterable]) + "]")
";".join([str(x) for x in self.__iterable])
+ "]")
return getattr(self.__interpolated, name)
@ -469,8 +470,8 @@ class TopDownTheory(Theory):
# LOG.debug("Top_down_evaluation returned: %s", bindings)
if len(bindings) > 0:
self.log(query.tablename(), "Found answer %s",
"[" + ",".join([str(query.plug(x))
for x in bindings]) + "]")
"[" + ",".join([str(query.plug(x))
for x in bindings]) + "]")
return [query.plug(x) for x in bindings]
def explain(self, query, tablenames, find_all=True):
@ -1903,7 +1904,7 @@ class MaterializedViewTheory(TopDownTheory):
bindings = self.top_down_evaluation(
delta_rule.variables(), delta_rule.body, binding)
self.log(event.formula.table, "new bindings after top-down: %s",
",".join([str(x) for x in bindings]))
",".join([str(x) for x in bindings]))
if delta_rule.trigger.is_negated():
insert_delete = not event.insert
@ -2180,7 +2181,7 @@ class Runtime (object):
tablenames = set(tablenames) | formula_tables
self.table_log(None, "Initializing tables %s with %s",
iterstr(tablenames), iterstr(actual_formulas))
iterstr(tablenames), iterstr(actual_formulas))
# implement initialization by computing the requisite
# update.
theory = self.get_target(target)
@ -2191,7 +2192,7 @@ class Runtime (object):
to_add = [Event(formula_) for formula_ in to_add]
to_rem = [Event(formula_, insert=False) for formula_ in to_rem]
self.table_log(None, "Initialize converted to update with %s and %s",
iterstr(to_add), iterstr(to_rem))
iterstr(to_add), iterstr(to_rem))
return self.update(to_add + to_rem, target=target)
def insert(self, formula, target=None):
@ -2565,14 +2566,14 @@ class Runtime (object):
# apply SEQUENCE
self.table_log(query.tablename(), "** Simulate: Applying sequence %s",
iterstr(sequence))
iterstr(sequence))
undo = self.project(sequence, theory, action_theory)
# query the resulting state
self.table_log(query.tablename(), "** Simulate: Querying %s", query)
result = th_object.select(query)
self.table_log(query.tablename(), "Result of %s is %s", query,
iterstr(result))
iterstr(result))
# rollback the changes
self.table_log(query.tablename(), "** Simulate: Rolling back")
self.project(undo, theory, action_theory)
@ -2614,7 +2615,7 @@ class Runtime (object):
computes that rerouting. Returns a Theory object.
"""
self.table_log(None, "Computing route for theory %s and events %s",
theory.name, iterstr(events))
theory.name, iterstr(events))
# Since Enforcement includes Classify and Classify includes Database,
# any operation on data needs to be funneled into Enforcement.
# Enforcement pushes it down to the others and then
@ -2684,7 +2685,7 @@ class Runtime (object):
# instantiate action using prior results
newth.define(last_results)
self.table_log(tablename, "newth (with prior results) %s",
iterstr(newth.content()))
iterstr(newth.content()))
bindings = actth.top_down_evaluation(
formula.variables(), formula.body, find_all=False)
if len(bindings) == 0:
@ -2694,8 +2695,8 @@ class Runtime (object):
assert all(not lit.is_negated() for lit in grounds)
newth.define(grounds)
self.table_log(tablename,
"newth contents (after action insertion): %s",
iterstr(newth.content()))
"newth contents (after action insertion): %s",
iterstr(newth.content()))
# self.table_log(tablename, "action contents: %s",
# iterstr(actth.content()))
# self.table_log(tablename, "action.includes[1] contents: %s",

View File

@ -164,7 +164,7 @@ def binding_str(binding):
"""Handles string conversion of either dictionary or Unifier."""
if isinstance(binding, dict):
s = ",".join(["{}: {}".format(str(var), str(val))
for var, val in binding.iteritems()])
for var, val in binding.iteritems()])
return '{' + s + '}'
else:
return str(binding)

View File

@ -83,8 +83,8 @@ class TestDatasourceDriver(base.TestCase):
'selector-type': 'DICT_SELECTOR',
'field-translators':
({'fieldname': 'thing', 'translator': self.val_trans},
{'fieldname': 'level3',
'translator': level3_translator})}
{'fieldname': 'level3',
'translator': level3_translator})}
level1_translator = {
'translation-type': 'HDICT',

View File

@ -67,22 +67,22 @@ class TestSwiftDriver(base.TestCase):
if object_list[0][5] == 'container1':
self.assertEqual(('2200', '2014-11-06T05:40:34.052100',
'9204776814ca62c92c7996de725ecc6b', 'file-1',
'application/octet-stream',
'container1'), object_list[0])
'9204776814ca62c92c7996de725ecc6b', 'file-1',
'application/octet-stream',
'container1'), object_list[0])
self.assertEqual(('2350', '2014-11-06T05:39:57.424800',
'c2b86044dd50a29d60c0e92e23e3ceea', 'file-2',
'application/octet-stream',
'container2'), object_list[1])
'c2b86044dd50a29d60c0e92e23e3ceea', 'file-2',
'application/octet-stream',
'container2'), object_list[1])
if object_list[1][5] == 'container1':
self.assertEqual(('2200', '2014-11-06T05:40:34.052100',
'9204776814ca62c92c7996de725ecc6b', 'file-1',
'application/octet-stream',
'container1'), object_list[1])
'9204776814ca62c92c7996de725ecc6b', 'file-1',
'application/octet-stream',
'container1'), object_list[1])
self.assertEqual(('2350', '2014-11-06T05:39:57.424800',
'c2b86044dd50a29d60c0e92e23e3ceea', 'file-2',
'application/octet-stream',
'container2'), object_list[0])
'c2b86044dd50a29d60c0e92e23e3ceea', 'file-2',
'application/octet-stream',
'container2'), object_list[0])

View File

@ -161,9 +161,9 @@ class TestRuntime(base.TestCase):
def showdb(self, run):
LOG.debug("Resulting DB: %s",
run.theory[run.CLASSIFY_THEORY].database |
run.theory[run.DATABASE] |
run.theory[run.ENFORCEMENT_THEORY].database)
run.theory[run.CLASSIFY_THEORY].database |
run.theory[run.DATABASE] |
run.theory[run.ENFORCEMENT_THEORY].database)
def insert(self, run, alist, target=None):
if target is None:

View File

@ -206,8 +206,8 @@ class TestReorder(base.TestCase):
"Cycle")
self.check_err("p(x) :- not q(x, y), r(y)",
["q(x, y)"],
"Partially safe")
["q(x, y)"],
"Partially safe")
def test_reorder_builtins_negation(self):
self.check("p(x) :- not q(z), plus(x, y, z), s(x), s(y)",
@ -222,8 +222,8 @@ class TestReorder(base.TestCase):
def test_unsafe_builtins_negation(self):
self.check_err("p(x) :- plus(x, y, z), not q(x, y)",
['plus(x,y,z)', 'q(x,y)'],
'Unsafe cycle')
['plus(x,y,z)', 'q(x,y)'],
'Unsafe cycle')
self.check_err("p(x) :- plus(x, y, z), plus(z, w, t), not q(z, t),"
" s(x), t(y)",

View File

@ -360,8 +360,8 @@ class TestCompiler(base.TestCase):
rule = compile.parse1(code_string)
errs = f(rule, run.theory)
self.assertTrue(any(emsg in str(err) for err in errs),
msg + ":: Failed to find error message '" + emsg +
"' in: " + ";".join(str(e) for e in errs))
msg + ":: Failed to find error message '" + emsg +
"' in: " + ";".join(str(e) for e in errs))
# no errors
rule = compile.parse1('p(x) :- q(x), mod1:p(x, y, z), mod2:q(x, y), '

View File

@ -64,7 +64,7 @@ class TestRuntime(base.TestCase):
def showdb(self, run):
LOG.debug("Resulting DB: %s",
run.theory[MAT_THEORY].database | run.theory[DB_THEORY])
run.theory[MAT_THEORY].database | run.theory[DB_THEORY])
def test_database(self):
"""Test Database with insert/delete."""

View File

@ -122,8 +122,8 @@ class TestRuntime(base.TestCase):
self.assertEqual(
run.select('p(x)', 'test1'), 'p(1)', 'Policy creation')
run.delete_policy('test1')
self.assertEqual(
set(run.get_policy_names()), set(original), 'Policy deletion')
self.assertEqual(set(run.get_policy_names()),
set(original), 'Policy deletion')
def test_multi_policy(self):
"""Test ability to create/delete multiple policies."""
@ -153,25 +153,21 @@ class TestRuntime(base.TestCase):
# policy types
run = runtime.Runtime()
run.create_policy('test1', kind=run.NONRECURSIVE_POLICY_TYPE)
self.assertTrue(
isinstance(run.get_policy('test1'),
runtime.NonrecursiveRuleTheory),
'Nonrecursive policy addition')
self.assertTrue(isinstance(run.get_policy('test1'),
runtime.NonrecursiveRuleTheory),
'Nonrecursive policy addition')
run.create_policy('test2', kind=run.ACTION_POLICY_TYPE)
self.assertTrue(
isinstance(run.get_policy('test2'),
runtime.ActionTheory),
'Action policy addition')
self.assertTrue(isinstance(run.get_policy('test2'),
runtime.ActionTheory),
'Action policy addition')
run.create_policy('test3', kind=run.DATABASE_POLICY_TYPE)
self.assertTrue(
isinstance(run.get_policy('test3'),
runtime.Database),
'Database policy addition')
self.assertTrue(isinstance(run.get_policy('test3'),
runtime.Database),
'Database policy addition')
run.create_policy('test4', kind=run.MATERIALIZED_POLICY_TYPE)
self.assertTrue(
isinstance(run.get_policy('test4'),
runtime.MaterializedViewTheory),
'Materialized policy addition')
self.assertTrue(isinstance(run.get_policy('test4'),
runtime.MaterializedViewTheory),
'Materialized policy addition')
def test_policy_errors(self):
"""Test errors for multiple policies."""

View File

@ -48,7 +48,7 @@ class TestUnify(base.TestCase):
LOG.debug("unifier2: %s", str_uni(unifier2))
if changes is not None:
LOG.debug("changes: %s",
";".join([str(x) for x in changes]))
";".join([str(x) for x in changes]))
if msg is not None:
self.open(msg)
@ -68,17 +68,17 @@ class TestUnify(base.TestCase):
print_unifiers(changes)
if not p1p == p2p:
LOG.debug("Failure: bi-unify(%s, %s) produced %s and %s",
p1, p2, str_uni(unifier1), str_uni(unifier2))
p1, p2, str_uni(unifier1), str_uni(unifier2))
LOG.debug("plug(%s, %s) = %s", p1, str_uni(unifier1), p1p)
LOG.debug("plug(%s, %s) = %s", p2, str_uni(unifier2), p2p)
self.fail()
if change_num is not None and len(changes) != change_num:
LOG.debug("Failure: bi-unify(%s, %s) produced %s and %s",
p1, p2, str_uni(unifier1), str_uni(unifier2))
p1, p2, str_uni(unifier1), str_uni(unifier2))
LOG.debug("plug(%s, %s) = %s", p1, str_uni(unifier1), p1p)
LOG.debug("plug(%s, %s) = %s", p2, str_uni(unifier2), p2p)
LOG.debug("Expected %s changes; computed %s changes",
change_num, len(changes))
change_num, len(changes))
self.fail()
LOG.debug("unifier1: %s", str_uni(unifier1))
LOG.debug("unifier2: %s", str_uni(unifier2))
@ -107,7 +107,7 @@ class TestUnify(base.TestCase):
changes = unify.bi_unify_atoms(p1, unifier1, p2, unifier2)
if changes is not None:
LOG.debug("Failure failure: bi-unify(%s, %s) produced %s and %s",
p1, p2, unifier1, unifier2)
p1, p2, unifier1, unifier2)
LOG.debug("plug(%s, %s) = %s", p1, unifier1, p1.plug(unifier1))
LOG.debug("plug(%s, %s) = %s", p2, unifier2, p2.plug(unifier2))
self.fail()

View File

@ -34,7 +34,6 @@ commands = python setup.py testr --no-parallel --testr-args='test_benchmark {pos
testr slowest --all
[flake8]
# E128 continuation line under-indented for visual indent
# E129 visually indented line with same indent as next logical line
# F402 import module shadowed by loop variable
# H237 module is removed in Python 3
@ -46,6 +45,6 @@ commands = python setup.py testr --no-parallel --testr-args='test_benchmark {pos
show-source = True
ignore = E128,E129,F402,H237,H305,H405,H904,H302
ignore = E129,F402,H237,H305,H405,H904,H302
builtins = _
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,*thirdparty/*,CongressLexer.py,CongressParser.py