Enable E265 check

E265 block comment should start with ‘# ‘

Change-Id: I53a81e7fa0c4bfadb3e5e4d54bda06703583ec9f
This commit is contained in:
Aaron Rosen 2014-10-09 22:13:16 -07:00
parent b1380ebf26
commit dda28fc8af
11 changed files with 67 additions and 76 deletions

View File

@ -46,7 +46,7 @@ class ApiApplication(object):
msg = _("Handling request '%(meth)s %(path)s' with %(hndlr)s") msg = _("Handling request '%(meth)s %(path)s' with %(hndlr)s")
LOG.debug(msg % {"meth": request.method, "path": request.path, LOG.debug(msg % {"meth": request.method, "path": request.path,
"hndlr": str(handler)}) "hndlr": str(handler)})
#TODO(pballand): validation # TODO(pballand): validation
response = handler.handle_request(request) response = handler.handle_request(request)
else: else:
response = NOT_FOUND_RESPONSE response = NOT_FOUND_RESPONSE

View File

@ -202,9 +202,9 @@ class CeilometerDriver(DataSourceDriver):
threshold_rule_id = str(uuid.uuid1()) threshold_rule_id = str(uuid.uuid1())
for s, t in v.items(): for s, t in v.items():
if type(t) != type(list()) and type(t) != type(dict()): if type(t) != type(list()) and type(t) != type(dict()):
#FIXME(madhumohan): Dirty workaround. A cleaner # FIXME(madhumohan): Dirty workaround. A cleaner
#approach is required to handled None object in the # approach is required to handled None object in
#data # the data
if t is None: if t is None:
t = 'None' t = 'None'
row_thres_tuple = (threshold_rule_id, s, t) row_thres_tuple = (threshold_rule_id, s, t)
@ -231,9 +231,9 @@ class CeilometerDriver(DataSourceDriver):
max_event_index = max(key_to_index.values()) + 1 max_event_index = max(key_to_index.values()) + 1
t_list = [] t_list = []
t_trait_list = [] t_trait_list = []
#TODO(madhumohan): Need a modular implementation of the below loop for # TODO(madhumohan): Need a modular implementation of the below loop for
#better readability and maintainability. Also for flexible translation # better readability and maintainability. Also for flexible translation
#all types of nested datastructure in the data. # all types of nested datastructure in the data.
for k in obj: for k in obj:
if type(k) != type(dict()): if type(k) != type(dict()):
k_dict = k.to_dict() k_dict = k.to_dict()
@ -247,19 +247,19 @@ class CeilometerDriver(DataSourceDriver):
if trait['name'] == 'payload': if trait['name'] == 'payload':
t_dict = eval(trait['value']) t_dict = eval(trait['value'])
for s, t in t_dict.items(): for s, t in t_dict.items():
#FIXME(madhumohan): Dictionary items within the payload are # FIXME(madhumohan): Dictionary items within the payload
#handled as additional fields in the payload # are handled as additional fields in the payload
#table. Need a better way to handle # table. Need a better way to handle
#dictionaries or other structures within payload # dictionaries or other structures within payload
#Nested dictionaries in the payload are skipped # Nested dictionaries in the payload are skipped
#Lists within the dictionaries are also ignored # Lists within the dictionaries are also ignored
if type(t) == type(dict()): if type(t) == type(dict()):
for n, m in t.items(): for n, m in t.items():
if type(m) != type(dict()) and \ if type(m) != type(dict()) and \
type(m) != type(list()): type(m) != type(list()):
#FIXME(madhumohan): Dirty workaround. A cleaner # FIXME(madhumohan): Dirty workaround. A cleaner
#approach is required to handled None object in the # approach is required to handled None object in
#data # the data
if m is None: if m is None:
m = 'None' m = 'None'
row_trait_tuple = \ row_trait_tuple = \
@ -268,9 +268,9 @@ class CeilometerDriver(DataSourceDriver):
row_trait_tuple) row_trait_tuple)
else: else:
if type(t) != type(list()): if type(t) != type(list()):
#FIXME(madhumohan): Dirty workaround. A cleaner # FIXME(madhumohan): Dirty workaround. A cleaner
#approach is required to handled None object in # approach is required to handled None object in
#the data # the data
if t is None: if t is None:
t = 'None' t = 'None'
row_trait_tuple = (trait_id, s, t) row_trait_tuple = (trait_id, s, t)
@ -298,7 +298,7 @@ def main():
print "Tuple Names : " + str(driver.get_tuple_names()) print "Tuple Names : " + str(driver.get_tuple_names())
print ("Tuple Metadata - : " + print ("Tuple Metadata - : " +
str(CeilometerDriver.get_schema())) str(CeilometerDriver.get_schema()))
#sync with the ceilometer service # sync with the ceilometer service
driver.update_from_datasource() driver.update_from_datasource()
print "Meters: %s" % driver.get_all(driver.METERS) print "Meters: %s" % driver.get_all(driver.METERS)
print "Alarms: %s" % driver.get_all(driver.ALARMS) print "Alarms: %s" % driver.get_all(driver.ALARMS)

View File

@ -56,7 +56,7 @@ class TestCeilometerDriver(base.TestCase):
self.assertIsNotNone(meter_list) self.assertIsNotNone(meter_list)
self.assertEqual(2, len(meter_list)) self.assertEqual(2, len(meter_list))
#Verifying individual tuple data # Verifying individual tuple data
self.assertEqual(('aW5zdGFuY2UtMDAwMDAwMDEtYTI1N2JhMT', self.assertEqual(('aW5zdGFuY2UtMDAwMDAwMDEtYTI1N2JhMT',
'network.incoming.bytes', 'network.incoming.bytes',
'cumulative', 'cumulative',
@ -128,7 +128,7 @@ class TestCeilometerDriver(base.TestCase):
thresh_rule_id1 = alarm_threshold_rule[0][0] thresh_rule_id1 = alarm_threshold_rule[0][0]
thresh_rule_id2 = alarm_threshold_rule[3][0] thresh_rule_id2 = alarm_threshold_rule[3][0]
#Verifying individual tuple data # Verifying individual tuple data
self.assertEqual(('d1b2b7a7-9512-4290-97ca-2580ed72c375', self.assertEqual(('d1b2b7a7-9512-4290-97ca-2580ed72c375',
'cpu_high', 'insufficient data', 'True', 'cpu_high', 'insufficient data', 'True',
thresh_rule_id1, thresh_rule_id1,
@ -175,7 +175,7 @@ class TestCeilometerDriver(base.TestCase):
event_trait1 = event_trait_list[0][0] event_trait1 = event_trait_list[0][0]
event_trait2 = event_trait_list[3][0] event_trait2 = event_trait_list[3][0]
#check an individual user entry # check an individual user entry
self.assertEqual(('6834861c-ccb3-4c6f-ac00-fe8fe1ad4ed4', self.assertEqual(('6834861c-ccb3-4c6f-ac00-fe8fe1ad4ed4',
'image.create', 'image.create',
'2014-09-29T08:19:45.556301', '2014-09-29T08:19:45.556301',

View File

@ -251,7 +251,7 @@ class TestNeutronDriver(base.TestCase):
sec_grp[d['id']]) sec_grp[d['id']])
#### Tests for DataSourceDriver # Tests for DataSourceDriver
# Note: these tests are really testing the functionality of the class # Note: these tests are really testing the functionality of the class
# DataSourceDriver, but it's useful to use an actual subclass so # DataSourceDriver, but it's useful to use an actual subclass so
# we can test the functionality end-to-end. We use Neutron for # we can test the functionality end-to-end. We use Neutron for
@ -656,7 +656,7 @@ network2 = {'networks': [
'provider:segmentation_id': 4}]} 'provider:segmentation_id': 4}]}
## Sample responses from neutron-client, after parsing # Sample responses from neutron-client, after parsing
network_response = \ network_response = \
{'networks': {'networks':
[{'status': 'ACTIVE', [{'status': 'ACTIVE',

View File

@ -43,7 +43,7 @@ class CongressException (Exception):
############################################################################## ##############################################################################
## Internal representation of policy language # Internal representation of policy language
############################################################################## ##############################################################################
class Location (object): class Location (object):
@ -775,7 +775,7 @@ def is_extended_datalog(x):
############################################################################## ##############################################################################
## Compiler # Compiler
############################################################################## ##############################################################################
@ -826,7 +826,7 @@ class Compiler (object):
############################################################################## ##############################################################################
## External syntax: datalog # External syntax: datalog
############################################################################## ##############################################################################
class CongressSyntax(object): class CongressSyntax(object):
@ -1011,7 +1011,7 @@ def print_tree(tree, text, kids, ind=0):
############################################################################## ##############################################################################
## Mains # Mains
############################################################################## ##############################################################################
def parse(policy_string): def parse(policy_string):

View File

@ -21,7 +21,7 @@ from unify import bi_unify_lists
from builtin.congressbuiltin import CongressBuiltinCategoryMap as cbcmap from builtin.congressbuiltin import CongressBuiltinCategoryMap as cbcmap
from builtin.congressbuiltin import start_builtin_map as initbuiltin from builtin.congressbuiltin import start_builtin_map as initbuiltin
#FIXME there is a circular import here because compile.py imports runtime.py # FIXME there is a circular import here because compile.py imports runtime.py
import compile import compile
from congress.openstack.common import log as logging from congress.openstack.common import log as logging
import unify import unify
@ -87,7 +87,7 @@ class ExecutionLogger(object):
############################################################################## ##############################################################################
## Events # Events
############################################################################## ##############################################################################
class EventQueue(object): class EventQueue(object):
@ -167,7 +167,7 @@ def string_to_database(string):
############################################################################## ##############################################################################
## Logical Building Blocks # Logical Building Blocks
############################################################################## ##############################################################################
class Proof(object): class Proof(object):
@ -237,7 +237,7 @@ class DeltaRule(object):
############################################################################## ##############################################################################
## Abstract Theories # Abstract Theories
############################################################################## ##############################################################################
class Theory(object): class Theory(object):
@ -390,7 +390,7 @@ class TopDownTheory(Theory):
iterstr(self.support))) iterstr(self.support)))
######################################### #########################################
## External interface # External interface
def __init__(self, name=None, abbr=None): def __init__(self, name=None, abbr=None):
super(TopDownTheory, self).__init__(name=name, abbr=abbr) super(TopDownTheory, self).__init__(name=name, abbr=abbr)
@ -523,7 +523,7 @@ class TopDownTheory(Theory):
return list(set(caller.results)) return list(set(caller.results))
######################################### #########################################
## Internal implementation # Internal implementation
def top_down_eval(self, context, caller): def top_down_eval(self, context, caller):
"""Compute all instances of LITERALS (from LITERAL_INDEX and above) """Compute all instances of LITERALS (from LITERAL_INDEX and above)
@ -751,7 +751,7 @@ class TopDownTheory(Theory):
return False return False
######################################### #########################################
## Routines for specialization # Routines for specialization
@classmethod @classmethod
def new_bi_unifier(cls, dictionary=None): def new_bi_unifier(cls, dictionary=None):
@ -811,7 +811,7 @@ class TopDownTheory(Theory):
############################################################################## ##############################################################################
## Concrete Theory: Database # Concrete Theory: Database
############################################################################## ##############################################################################
@ -1120,21 +1120,14 @@ class Database(TopDownTheory):
self.log(atom.table, "First tuple in table {}".format(table)) self.log(atom.table, "First tuple in table {}".format(table))
return return
else: else:
#self.log(table, "Not first tuple in table {}".format(table))
for existingtuple in self.data[table]: for existingtuple in self.data[table]:
assert(existingtuple.proofs is not None) assert(existingtuple.proofs is not None)
if existingtuple.tuple == dbtuple.tuple: if existingtuple.tuple == dbtuple.tuple:
# self.log(table, "Found existing tuple: {}".format(
# str(existingtuple)))
assert(existingtuple.proofs is not None) assert(existingtuple.proofs is not None)
existingtuple.proofs |= dbtuple.proofs existingtuple.proofs |= dbtuple.proofs
# self.log(table,
# "Updated tuple: {}".format(str(existingtuple)))
assert(existingtuple.proofs is not None) assert(existingtuple.proofs is not None)
return return
self.data[table].append(dbtuple) self.data[table].append(dbtuple)
#self.log(table, "current contents of {}: {}".format(table,
# iterstr(self.data[table])))
def delete_actual(self, atom, proofs=None): def delete_actual(self, atom, proofs=None):
"""Workhorse for deleting ATOM from the DB, along with the proofs """Workhorse for deleting ATOM from the DB, along with the proofs
@ -1147,7 +1140,6 @@ class Database(TopDownTheory):
return return
for i in xrange(0, len(self.data[table])): for i in xrange(0, len(self.data[table])):
existingtuple = self.data[table][i] existingtuple = self.data[table][i]
#self.log(table, "Checking tuple {}".format(str(existingtuple)))
if existingtuple.tuple == dbtuple.tuple: if existingtuple.tuple == dbtuple.tuple:
existingtuple.proofs -= dbtuple.proofs existingtuple.proofs -= dbtuple.proofs
if len(existingtuple.proofs) == 0: if len(existingtuple.proofs) == 0:
@ -1175,7 +1167,7 @@ class Database(TopDownTheory):
############################################################################## ##############################################################################
## Concrete Theories: other # Concrete Theories: other
############################################################################## ##############################################################################
class NonrecursiveRuleTheory(TopDownTheory): class NonrecursiveRuleTheory(TopDownTheory):
@ -1189,7 +1181,7 @@ class NonrecursiveRuleTheory(TopDownTheory):
for rule in rules: for rule in rules:
self.insert(rule) self.insert(rule)
############### External Interface ############### # External Interface
# SELECT implemented by TopDownTheory # SELECT implemented by TopDownTheory
@ -1267,7 +1259,7 @@ class NonrecursiveRuleTheory(TopDownTheory):
return None return None
return len(self.contents[tablename][0].head.arguments) return len(self.contents[tablename][0].head.arguments)
############### Internal Interface ############### # Internal Interface
def insert_actual(self, rule): def insert_actual(self, rule):
"""Insert RULE and return True if there was a change. """Insert RULE and return True if there was a change.
@ -1336,7 +1328,7 @@ class ActionTheory(NonrecursiveRuleTheory):
# Would like to mark some tables as only being defined # Would like to mark some tables as only being defined
# for certain bound/free arguments and take that into # for certain bound/free arguments and take that into
# account when doing error checking. # account when doing error checking.
#errors.extend(compile.rule_negation_safety(event.formula)) # errors.extend(compile.rule_negation_safety(event.formula))
if event.insert: if event.insert:
current.add(event.formula) current.add(event.formula)
else: else:
@ -1613,7 +1605,7 @@ class MaterializedViewTheory(TopDownTheory):
'database': self.database.tracer, 'database': self.database.tracer,
'delta_rules': self.delta_rules.tracer} 'delta_rules': self.delta_rules.tracer}
############### External Interface ############### # External Interface
# SELECT is handled by TopDownTheory # SELECT is handled by TopDownTheory
@ -1682,7 +1674,7 @@ class MaterializedViewTheory(TopDownTheory):
return result return result
return self.delta_rules.get_arity_self(tablename) return self.delta_rules.get_arity_self(tablename)
############### Interface implementation ############### # Interface implementation
def explain_aux(self, query, depth): def explain_aux(self, query, depth):
self.log(query.table, "Explaining {}".format(str(query)), depth) self.log(query.table, "Explaining {}".format(str(query)), depth)
@ -1875,7 +1867,7 @@ class MaterializedViewTheory(TopDownTheory):
############################################################################## ##############################################################################
## Runtime # Runtime
############################################################################## ##############################################################################
class Runtime (object): class Runtime (object):
@ -1954,7 +1946,7 @@ class Runtime (object):
tracer = Tracer() tracer = Tracer()
self.set_tracer(tracer) self.set_tracer(tracer)
############### External interface ############### # External interface
def dump_dir(self, path): def dump_dir(self, path):
"""Dump each theory into its own file within the """Dump each theory into its own file within the
directory PATH. The name of the file is the name of directory PATH. The name of the file is the name of
@ -2179,13 +2171,13 @@ class Runtime (object):
def reserved_tablename(self, name): def reserved_tablename(self, name):
return name.startswith('___') return name.startswith('___')
############### Internal interface ############### # Internal interface
## Translate different representations of formulas into # Translate different representations of formulas into
## the compiler's internal representation and then invoke # the compiler's internal representation and then invoke
## appropriate theory's version of the API. # appropriate theory's version of the API.
## Arguments that are strings are suffixed with _string. # Arguments that are strings are suffixed with _string.
## All other arguments are instances of Theory, Literal, etc. # All other arguments are instances of Theory, Literal, etc.
################################### ###################################
# Update policies and data. # Update policies and data.
@ -2450,7 +2442,7 @@ class Runtime (object):
return (result, tracer.get_value()) return (result, tracer.get_value())
return result return result
############### Helpers ############### # Helpers
def react_to_changes(self, changes): def react_to_changes(self, changes):
"""Filters changes and executes actions contained therein.""" """Filters changes and executes actions contained therein."""
@ -2509,7 +2501,7 @@ class Runtime (object):
newth = NonrecursiveRuleTheory(abbr="Temp") newth = NonrecursiveRuleTheory(abbr="Temp")
newth.tracer.trace('*') newth.tracer.trace('*')
actth.includes.append(newth) actth.includes.append(newth)
#TODO(thinrichs): turn 'includes' into an object that guarantees # TODO(thinrichs): turn 'includes' into an object that guarantees
# there are no cycles through inclusion. Otherwise we get # there are no cycles through inclusion. Otherwise we get
# infinite loops # infinite loops
if actth is not policyth: if actth is not policyth:

View File

@ -317,7 +317,7 @@ class TestRuntime(unittest.TestCase):
"\n".join([str(x) for x in errs]))) "\n".join([str(x) for x in errs])))
return return
#### Ports # Ports
query = 'neutron:port(x1, x2, x3, x4, x5, x6, x7, x8, x9)' query = 'neutron:port(x1, x2, x3, x4, x5, x6, x7, x8, x9)'
acts = 'neutron:create_port("net1", 17), sys:user("tim") :- true' acts = 'neutron:create_port("net1", 17), sys:user("tim") :- true'
correct = ('neutron:port(id, "net1", name, mac, "null",' correct = ('neutron:port(id, "net1", name, mac, "null",'
@ -345,7 +345,7 @@ class TestRuntime(unittest.TestCase):
correct = '' correct = ''
check(query, acts, correct, 'Port create, update, delete') check(query, acts, correct, 'Port create, update, delete')
#### Networks # Networks
query = ('neutron:network(id, name, status, admin_state, shared,' query = ('neutron:network(id, name, status, admin_state, shared,'
'tenenant_id)') 'tenenant_id)')
acts = 'neutron:create_network(17), sys:user("tim") :- true' acts = 'neutron:create_network(17), sys:user("tim") :- true'
@ -369,7 +369,7 @@ class TestRuntime(unittest.TestCase):
correct = '' correct = ''
check(query, acts, correct, 'Network creation, update') check(query, acts, correct, 'Network creation, update')
#### Subnets # Subnets
query = ('neutron:subnet(id, name, network_id, ' query = ('neutron:subnet(id, name, network_id, '
'gateway_ip, ip_version, cidr, enable_dhcp, tenant_id)') 'gateway_ip, ip_version, cidr, enable_dhcp, tenant_id)')
acts = ('neutron:create_subnet("net1", "10.0.0.1/24", 17), ' acts = ('neutron:create_subnet("net1", "10.0.0.1/24", 17), '

View File

@ -15,7 +15,7 @@
import uuid import uuid
#FIXME there is a circular import here because compile.py imports unify.py # FIXME there is a circular import here because compile.py imports unify.py
import compile import compile
from congress.openstack.common import log as logging from congress.openstack.common import log as logging

View File

@ -67,7 +67,7 @@ def create_api_server(conf, name, host, port, workers):
def serve(*servers): def serve(*servers):
if max([server[1].workers for server in servers]) > 1: if max([server[1].workers for server in servers]) > 1:
#TODO(arosen) - need to provide way to communicate with DSE services # TODO(arosen) - need to provide way to communicate with DSE services
launcher = service.ProcessLauncher() launcher = service.ProcessLauncher()
else: else:
launcher = service.ServiceLauncher() launcher = service.ServiceLauncher()
@ -164,14 +164,14 @@ def initialize_resources(resource_mgr, cage):
resource_mgr.register_handler(ds_element_handler) resource_mgr.register_handler(ds_element_handler)
# TODO(pballand) register models for schema and status # TODO(pballand) register models for schema and status
#schema_path = "%s/schema" % ds_path # schema_path = "%s/schema" % ds_path
#schema_element_handler = ElementHandler(schema_path, XXX, # schema_element_handler = ElementHandler(schema_path, XXX,
# "schema") # "schema")
#resource_mgr.register_handler(schema_element_handler) # resource_mgr.register_handler(schema_element_handler)
#status_path = "%s/status" % ds_path # status_path = "%s/status" % ds_path
#status_element_handler = ElementHandler(status_path, XXX, # status_element_handler = ElementHandler(status_path, XXX,
# "status") # "status")
#resource_mgr.register_handler(status_element_handler) # resource_mgr.register_handler(status_element_handler)
tables = cage.service_object('api-table') tables = cage.service_object('api-table')
resource_mgr.register_model('tables', tables) resource_mgr.register_model('tables', tables)

View File

@ -28,7 +28,7 @@ class ServerTest(testtools.TestCase):
super(ServerTest, self).setUp() super(ServerTest, self).setUp()
self.host = '127.0.0.1' self.host = '127.0.0.1'
self.port = '1234' self.port = '1234'
#FIXME(arosen) - we need to inherit from a base class that does this. # FIXME(arosen) - we need to inherit from a base class that does this.
config.setup_logging() config.setup_logging()
@mock.patch('eventlet.listen') @mock.patch('eventlet.listen')

View File

@ -28,7 +28,6 @@ commands = python setup.py testr --coverage --testr-args='{posargs}'
# E128 continuation line under-indented for visual indent # E128 continuation line under-indented for visual indent
# E129 visually indented line with same indent as next logical line # E129 visually indented line with same indent as next logical line
# E251 unexpected spaces around keyword / parameter equals # E251 unexpected spaces around keyword / parameter equals
# E265 block comment should start with #
# E713 test for membership should be not in # E713 test for membership should be not in
# F402 import module shadowed by loop variable # F402 import module shadowed by loop variable
# F811 redefinition of unused variable # F811 redefinition of unused variable
@ -52,6 +51,6 @@ commands = python setup.py testr --coverage --testr-args='{posargs}'
show-source = True show-source = True
ignore = E125,E126,E128,E129,E251,E265,E713,F402,F811,F812,H237,H305,H307,H401,H402,H404,H405,H904,H302,H231,E122,E113,H301,H233,E112,E303 ignore = E125,E126,E128,E129,E251,E713,F402,F811,F812,H237,H305,H307,H401,H402,H404,H405,H904,H302,H231,E122,E113,H301,H233,E112,E303
builtins = _ builtins = _
exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,*thirdparty/*,CongressLexer.py,CongressParser.py exclude=.venv,.git,.tox,dist,doc,*openstack/common*,*lib/python*,*egg,build,*thirdparty/*,CongressLexer.py,CongressParser.py