Enable pep8 on ./tools directory

This patch fixes the pep8 issues of the tool scripts and
configures tox to run pep8 on the tools directory.

Change-Id: Ifed21e19dd2b382790a1e2a90d5153a8845c4b64
This commit is contained in:
Balazs Gibizer 2014-11-24 16:01:03 +01:00
parent 30824df1a4
commit 062ac3313d
6 changed files with 31 additions and 32 deletions

View File

@ -17,13 +17,14 @@
"""pylint error checking."""
from __future__ import print_function
import json
import re
import sys
from pylint import lint
from pylint.reporters import text
from six.moves import cStringIO as StringIO
from six.moves import cStringIO as StringIO # noqa
# These variables will be useful if we will need to skip some pylint checks
ignore_codes = []
@ -65,8 +66,9 @@ class LintOutput(object):
@classmethod
def from_msg_to_dict(cls, msg):
"""From the output of pylint msg, to a dict, where each key
is a unique error identifier, value is a list of LintOutput
"""From the output of pylint msg, to a dict.
Each key is a unique error identifier, value is a list of LintOutput
"""
result = {}
for line in msg.splitlines():
@ -108,9 +110,9 @@ class ErrorKeys(object):
@classmethod
def print_json(cls, errors, output=sys.stdout):
print >>output, "# automatically generated by tools/lintstack.py"
print("# automatically generated by tools/lintstack.py", file=output)
for i in sorted(errors.keys()):
print >>output, json.dumps(i)
print(json.dumps(i), file=output)
@classmethod
def from_file(cls, filename):
@ -134,7 +136,7 @@ def run_pylint():
def generate_error_keys(msg=None):
print "Generating", KNOWN_PYLINT_EXCEPTIONS_FILE
print("Generating", KNOWN_PYLINT_EXCEPTIONS_FILE)
if msg is None:
msg = run_pylint()
errors = LintOutput.from_msg_to_dict(msg)
@ -143,41 +145,41 @@ def generate_error_keys(msg=None):
def validate(newmsg=None):
print "Loading", KNOWN_PYLINT_EXCEPTIONS_FILE
print("Loading", KNOWN_PYLINT_EXCEPTIONS_FILE)
known = ErrorKeys.from_file(KNOWN_PYLINT_EXCEPTIONS_FILE)
if newmsg is None:
print "Running pylint. Be patient..."
print("Running pylint. Be patient...")
newmsg = run_pylint()
errors = LintOutput.from_msg_to_dict(newmsg)
print ("Unique errors reported by pylint: was %d, now %d."
% (len(known), len(errors)))
print("Unique errors reported by pylint: was %d, now %d."
% (len(known), len(errors)))
passed = True
for err_key, err_list in errors.items():
for err in err_list:
if err_key not in known:
print err.lintoutput
print
print(err.lintoutput)
print()
passed = False
if passed:
print "Congrats! pylint check passed."
print("Congrats! pylint check passed.")
redundant = known - set(errors.keys())
if redundant:
print "Extra credit: some known pylint exceptions disappeared."
print("Extra credit: some known pylint exceptions disappeared.")
for i in sorted(redundant):
print json.dumps(i)
print "Consider regenerating the exception file if you will."
print(json.dumps(i))
print("Consider regenerating the exception file if you will.")
else:
print ("Please fix the errors above. If you believe they are false"
" positives, run 'tools/lintstack.py generate' to overwrite.")
print("Please fix the errors above. If you believe they are false"
" positives, run 'tools/lintstack.py generate' to overwrite.")
sys.exit(1)
def usage():
print """Usage: tools/lintstack.py [generate|validate]
print("""Usage: tools/lintstack.py [generate|validate]
To generate pylint_exceptions file: tools/lintstack.py generate
To validate the current commit: tools/lintstack.py
"""
""")
def main():

View File

@ -56,7 +56,6 @@ def make_test_data(conn, name, meter_type, unit, volume, random_min,
increment = datetime.timedelta(minutes=interval)
print('Adding new events for meter %s.' % (name))
# Generate events
n = 0
@ -70,7 +69,6 @@ def make_test_data(conn, name, meter_type, unit, volume, random_min,
else:
total_volume += random.uniform(random_min, random_max)
c = sample.Sample(name=name,
type=meter_type,
unit=unit,
@ -191,7 +189,7 @@ def main():
start = datetime.datetime.utcnow() - datetime.timedelta(days=args.start)
end = datetime.datetime.utcnow() + datetime.timedelta(days=args.end)
make_test_data(conn = conn,
make_test_data(conn=conn,
name=args.counter,
meter_type=args.type,
unit=args.unit,

View File

@ -56,7 +56,7 @@ def make_test_data(conn, start, end, interval, event_types):
data = []
for i in range(event_types):
traits = [models.Trait('id1_%d' % i, 1, str(uuid.uuid4())),
models.Trait('id2_%d' % i, 2, random.randint(1,10)),
models.Trait('id2_%d' % i, 2, random.randint(1, 10)),
models.Trait('id3_%d' % i, 3, random.random()),
models.Trait('id4_%d' % i, 4, timestamp)]
data.append(models.Event(str(uuid.uuid4()),
@ -121,8 +121,7 @@ def main():
start=start,
end=end,
interval=args.interval,
event_types=args.event_types
)
event_types=args.event_types)
if __name__ == '__main__':

View File

@ -53,9 +53,8 @@ def show_resources(db, args):
value = totals[0]['max']
else:
value = totals[0]['sum']
print(' %s (%s): %s' % \
(meter['counter_name'], meter['counter_type'],
value))
print(' %s (%s): %s' %
(meter['counter_name'], meter['counter_type'], value))
def show_total_resources(db, args):
@ -115,7 +114,7 @@ def main(argv):
extra_args = cfg.CONF(
sys.argv[1:],
# NOTE(dhellmann): Read the configuration file(s) for the
#ceilometer collector by default.
# ceilometer collector by default.
default_config_files=['/etc/ceilometer/ceilometer.conf'],
)
db = storage.get_connection_from_config(cfg.CONF)

View File

@ -17,6 +17,7 @@ from oslo.config import cfg
from ceilometer import storage
def main(argv):
cfg.CONF([], project='ceilometer')
if os.getenv("CEILOMETER_TEST_HBASE_URL"):

View File

@ -70,7 +70,7 @@ commands =
[flake8]
ignore =
builtins = _
exclude=.venv,.git,.tox,dist,doc,./ceilometer/openstack/common,*lib/python*,*egg,tools,nova_tests,build
exclude=.venv,.git,.tox,dist,doc,./ceilometer/openstack/common,*lib/python*,*egg,nova_tests,build
show-source = True
[hacking]