Add tox, pep8, pyflakes, testr support
Also fix the python to comply. This has to be done as a single change otherwise we'll never bootstrap gating. Change-Id: I4a21d57e0341802a1652428dee16c60abb30251d
This commit is contained in:
parent
6b4095a520
commit
54acd090a8
@ -1,4 +1,4 @@
|
||||
[gerrit]
|
||||
host=review.stackforge.org
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=heat-api/heat-jeos.git
|
||||
project=openstack/heat-cfntools.git
|
||||
|
@ -28,17 +28,14 @@ import logging
|
||||
import os
|
||||
import os.path
|
||||
import pwd
|
||||
import re
|
||||
try:
|
||||
import rpmUtils.updates as rpmupdates
|
||||
import rpmUtils.miscutils as rpmutils
|
||||
import rpmUtils.updates as rpmupdates
|
||||
rpmutils_present = True
|
||||
except:
|
||||
except ImportError:
|
||||
rpmutils_present = False
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from urllib2 import urlopen, Request
|
||||
from urlparse import urlparse, urlunparse
|
||||
|
||||
# Override BOTO_CONFIG, which makes boto look only at the specified
|
||||
# config file, instead of the default locations
|
||||
@ -94,7 +91,7 @@ class HupConfig(object):
|
||||
try:
|
||||
with open(self.credential_file) as f:
|
||||
self.credentials = f.read()
|
||||
except:
|
||||
except Exception:
|
||||
raise Exception("invalid credentials file %s" %
|
||||
self.credential_file)
|
||||
|
||||
@ -117,7 +114,7 @@ class HupConfig(object):
|
||||
resources = []
|
||||
for h in self.hooks:
|
||||
r = self.hooks[h].resource_name_get()
|
||||
if not r in resources:
|
||||
if r not in resources:
|
||||
resources.append(self.hooks[h].resource_name_get())
|
||||
return resources
|
||||
|
||||
@ -191,8 +188,8 @@ class CommandRunner(object):
|
||||
self._stdout = output[0]
|
||||
self._stderr = output[1]
|
||||
if self._status:
|
||||
LOG.debug("Return code of %d after executing: '%s'" % \
|
||||
(self._status, cmd))
|
||||
LOG.debug("Return code of %d after executing: '%s'" % (
|
||||
self._status, cmd))
|
||||
if self._next:
|
||||
self._next.run()
|
||||
return self
|
||||
@ -811,12 +808,14 @@ class ConfigsetsHandler(object):
|
||||
" section" % item)
|
||||
self.expand_sets(self.configsets[item], executionlist)
|
||||
if not executionlist:
|
||||
raise Exception("Requested configSet %s empty?" % self.selectedsets)
|
||||
raise Exception(
|
||||
"Requested configSet %s empty?" % self.selectedsets)
|
||||
|
||||
return executionlist
|
||||
|
||||
|
||||
def metadata_server_port(datafile='/var/lib/heat-cfntools/cfn-metadata-server'):
|
||||
def metadata_server_port(
|
||||
datafile='/var/lib/heat-cfntools/cfn-metadata-server'):
|
||||
"""
|
||||
Return the the metadata server port
|
||||
reads the :NNNN from the end of the URL in cfn-metadata-server
|
||||
@ -1069,13 +1068,14 @@ class Metadata(object):
|
||||
# which aligns better with all the existing calls
|
||||
# see https://github.com/boto/boto/pull/857
|
||||
resource_detail = res['DescribeStackResourceResponse'][
|
||||
'DescribeStackResourceResult']['StackResourceDetail']
|
||||
'DescribeStackResourceResult']['StackResourceDetail']
|
||||
return resource_detail['Metadata']
|
||||
|
||||
def retrieve(self,
|
||||
meta_str=None,
|
||||
default_path='/var/lib/heat-cfntools/cfn-init-data',
|
||||
last_path='/tmp/last_metadata'):
|
||||
def retrieve(
|
||||
self,
|
||||
meta_str=None,
|
||||
default_path='/var/lib/heat-cfntools/cfn-init-data',
|
||||
last_path='/tmp/last_metadata'):
|
||||
"""
|
||||
Read the metadata from the given filename
|
||||
"""
|
||||
@ -1129,7 +1129,7 @@ class Metadata(object):
|
||||
om = hashlib.md5()
|
||||
om.update(lm.read())
|
||||
old_md5 = om.hexdigest()
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
if old_md5 != current_md5:
|
||||
self._has_changed = True
|
||||
@ -1148,8 +1148,8 @@ class Metadata(object):
|
||||
Should find the AWS::CloudFormation::Init json key
|
||||
"""
|
||||
is_valid = self._metadata and \
|
||||
self._init_key in self._metadata and \
|
||||
self._metadata[self._init_key]
|
||||
self._init_key in self._metadata and \
|
||||
self._metadata[self._init_key]
|
||||
if is_valid:
|
||||
self._metadata = self._metadata[self._init_key]
|
||||
return is_valid
|
||||
|
@ -14,20 +14,20 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import boto.cloudformation as cfn
|
||||
import json
|
||||
import mox
|
||||
from testtools import TestCase
|
||||
from testtools.matchers import FileContains
|
||||
from tempfile import NamedTemporaryFile
|
||||
from boto.cloudformation import CloudFormationConnection
|
||||
import tempfile
|
||||
import testtools
|
||||
import testtools.matchers as ttm
|
||||
|
||||
from heat_cfntools.cfntools import cfn_helper
|
||||
|
||||
|
||||
class TestCfnHelper(TestCase):
|
||||
class TestCfnHelper(testtools.TestCase):
|
||||
|
||||
def _check_metadata_content(self, content, value):
|
||||
with NamedTemporaryFile() as metadata_info:
|
||||
with tempfile.NamedTemporaryFile() as metadata_info:
|
||||
metadata_info.write(content)
|
||||
metadata_info.flush()
|
||||
port = cfn_helper.metadata_server_port(metadata_info.name)
|
||||
@ -75,7 +75,7 @@ class TestCfnHelper(TestCase):
|
||||
|
||||
def test_parse_creds_file(self):
|
||||
def parse_creds_test(file_contents, creds_match):
|
||||
with NamedTemporaryFile(mode='w') as fcreds:
|
||||
with tempfile.NamedTemporaryFile(mode='w') as fcreds:
|
||||
fcreds.write(file_contents)
|
||||
fcreds.flush()
|
||||
creds = cfn_helper.parse_creds_file(fcreds.name)
|
||||
@ -94,7 +94,7 @@ class TestCfnHelper(TestCase):
|
||||
)
|
||||
|
||||
|
||||
class TestMetadataRetrieve(TestCase):
|
||||
class TestMetadataRetrieve(testtools.TestCase):
|
||||
|
||||
def test_metadata_retrieve_files(self):
|
||||
|
||||
@ -104,19 +104,19 @@ class TestMetadataRetrieve(TestCase):
|
||||
|
||||
md = cfn_helper.Metadata('teststack', None)
|
||||
|
||||
with NamedTemporaryFile() as last_file:
|
||||
with tempfile.NamedTemporaryFile() as last_file:
|
||||
pass
|
||||
|
||||
with NamedTemporaryFile(mode='w+') as default_file:
|
||||
with tempfile.NamedTemporaryFile(mode='w+') as default_file:
|
||||
default_file.write(md_str)
|
||||
default_file.flush()
|
||||
self.assertThat(default_file.name, FileContains(md_str))
|
||||
self.assertThat(default_file.name, ttm.FileContains(md_str))
|
||||
|
||||
md.retrieve(
|
||||
default_path=default_file.name,
|
||||
last_path=last_file.name)
|
||||
|
||||
self.assertThat(last_file.name, FileContains(md_str))
|
||||
self.assertThat(last_file.name, ttm.FileContains(md_str))
|
||||
self.assertDictEqual(md_data, md._metadata)
|
||||
|
||||
md = cfn_helper.Metadata('teststack', None)
|
||||
@ -128,9 +128,9 @@ class TestMetadataRetrieve(TestCase):
|
||||
def test_metadata_retrieve_none(self):
|
||||
|
||||
md = cfn_helper.Metadata('teststack', None)
|
||||
with NamedTemporaryFile() as last_file:
|
||||
with tempfile.NamedTemporaryFile() as last_file:
|
||||
pass
|
||||
with NamedTemporaryFile() as default_file:
|
||||
with tempfile.NamedTemporaryFile() as default_file:
|
||||
pass
|
||||
|
||||
md.retrieve(
|
||||
@ -170,29 +170,30 @@ class TestMetadataRetrieve(TestCase):
|
||||
"/tmp/foo": {"content": "bar"}}}}}
|
||||
|
||||
m = mox.Mox()
|
||||
m.StubOutWithMock(CloudFormationConnection, 'describe_stack_resource')
|
||||
m.StubOutWithMock(
|
||||
cfn.CloudFormationConnection, 'describe_stack_resource')
|
||||
|
||||
CloudFormationConnection.describe_stack_resource(
|
||||
cfn.CloudFormationConnection.describe_stack_resource(
|
||||
'teststack', None).MultipleTimes().AndReturn({
|
||||
'DescribeStackResourceResponse':
|
||||
{'DescribeStackResourceResult':
|
||||
{'StackResourceDetail':
|
||||
{'Metadata': md_data}}}
|
||||
})
|
||||
'DescribeStackResourceResponse': {
|
||||
'DescribeStackResourceResult': {
|
||||
'StackResourceDetail': {'Metadata': md_data}}}})
|
||||
|
||||
m.ReplayAll()
|
||||
try:
|
||||
md = cfn_helper.Metadata('teststack', None,
|
||||
md = cfn_helper.Metadata(
|
||||
'teststack',
|
||||
None,
|
||||
access_key='foo',
|
||||
secret_key='bar')
|
||||
md.retrieve()
|
||||
self.assertDictEqual(md_data, md._metadata)
|
||||
|
||||
with NamedTemporaryFile(mode='w') as fcreds:
|
||||
with tempfile.NamedTemporaryFile(mode='w') as fcreds:
|
||||
fcreds.write('AWSAccessKeyId=foo\nAWSSecretKey=bar\n')
|
||||
fcreds.flush()
|
||||
md = cfn_helper.Metadata('teststack', None,
|
||||
credentials_file=fcreds.name)
|
||||
md = cfn_helper.Metadata(
|
||||
'teststack', None, credentials_file=fcreds.name)
|
||||
md.retrieve()
|
||||
self.assertDictEqual(md_data, md._metadata)
|
||||
|
||||
@ -202,11 +203,11 @@ class TestMetadataRetrieve(TestCase):
|
||||
|
||||
def test_cfn_init(self):
|
||||
|
||||
with NamedTemporaryFile(mode='w+') as foo_file:
|
||||
with tempfile.NamedTemporaryFile(mode='w+') as foo_file:
|
||||
md_data = {"AWS::CloudFormation::Init": {"config": {"files": {
|
||||
foo_file.name: {"content": "bar"}}}}}
|
||||
|
||||
md = cfn_helper.Metadata('teststack', None)
|
||||
md.retrieve(meta_str=md_data)
|
||||
md.cfn_init()
|
||||
self.assertThat(foo_file.name, FileContains('bar'))
|
||||
self.assertThat(foo_file.name, ttm.FileContains('bar'))
|
||||
|
2
setup.py
2
setup.py
@ -24,7 +24,7 @@ setuptools.setup(
|
||||
name='heat-cfntools',
|
||||
version='1.2',
|
||||
description='Tools required to be installed on Heat '
|
||||
'provisioned cloud instances',
|
||||
'provisioned cloud instances',
|
||||
long_description=read('README.rst'),
|
||||
license='Apache License (2.0)',
|
||||
author='Heat API Developers',
|
||||
|
15
tools/flakes.py
Normal file
15
tools/flakes.py
Normal file
@ -0,0 +1,15 @@
|
||||
"""
|
||||
wrapper for pyflakes to ignore gettext based warning:
|
||||
"undefined name '_'"
|
||||
|
||||
Synced in from openstack-common
|
||||
"""
|
||||
import sys
|
||||
|
||||
import pyflakes.checker
|
||||
from pyflakes.scripts import pyflakes
|
||||
|
||||
if __name__ == "__main__":
|
||||
orig_builtins = set(pyflakes.checker._MAGIC_GLOBALS)
|
||||
pyflakes.checker._MAGIC_GLOBALS = orig_builtins | set(['_'])
|
||||
sys.exit(pyflakes.main())
|
637
tools/hacking.py
Executable file
637
tools/hacking.py
Executable file
@ -0,0 +1,637 @@
|
||||
#!/usr/bin/env python
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2012, Cloudscaling
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""nova HACKING file compliance testing
|
||||
|
||||
Built on top of pep8.py
|
||||
"""
|
||||
|
||||
import imp
|
||||
import inspect
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tokenize
|
||||
import warnings
|
||||
|
||||
import pep8
|
||||
|
||||
# Don't need this for testing
|
||||
logging.disable('LOG')
|
||||
|
||||
#N1xx comments
|
||||
#N2xx except
|
||||
#N3xx imports
|
||||
#N4xx docstrings
|
||||
#N5xx dictionaries/lists
|
||||
#N6xx calling methods
|
||||
#N7xx localization
|
||||
#N8xx git commit messages
|
||||
#N9xx other
|
||||
|
||||
IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate', 'nova.db.sqlalchemy.session',
|
||||
'nova.openstack.common.log.logging',
|
||||
'nova.db.sqlalchemy.migration.versioning_api', 'paste']
|
||||
# Paste is missing a __init__ in top level directory
|
||||
START_DOCSTRING_TRIPLE = ['u"""', 'r"""', '"""', "u'''", "r'''", "'''"]
|
||||
END_DOCSTRING_TRIPLE = ['"""', "'''"]
|
||||
VERBOSE_MISSING_IMPORT = os.getenv('HACKING_VERBOSE_MISSING_IMPORT', 'False')
|
||||
|
||||
_missingImport = set([])
|
||||
|
||||
|
||||
# Monkey patch broken excluded filter in pep8
|
||||
# See https://github.com/jcrocholl/pep8/pull/111
|
||||
def excluded(self, filename):
|
||||
"""Check if options.exclude contains a pattern that matches filename."""
|
||||
basename = os.path.basename(filename)
|
||||
return any((pep8.filename_match(filename, self.options.exclude,
|
||||
default=False),
|
||||
pep8.filename_match(basename, self.options.exclude,
|
||||
default=False)))
|
||||
|
||||
|
||||
def input_dir(self, dirname):
|
||||
"""Check all files in this directory and all subdirectories."""
|
||||
dirname = dirname.rstrip('/')
|
||||
if self.excluded(dirname):
|
||||
return 0
|
||||
counters = self.options.report.counters
|
||||
verbose = self.options.verbose
|
||||
filepatterns = self.options.filename
|
||||
runner = self.runner
|
||||
for root, dirs, files in os.walk(dirname):
|
||||
if verbose:
|
||||
print('directory ' + root)
|
||||
counters['directories'] += 1
|
||||
for subdir in sorted(dirs):
|
||||
if self.excluded(os.path.join(root, subdir)):
|
||||
dirs.remove(subdir)
|
||||
for filename in sorted(files):
|
||||
# contain a pattern that matches?
|
||||
if ((pep8.filename_match(filename, filepatterns) and
|
||||
not self.excluded(filename))):
|
||||
runner(os.path.join(root, filename))
|
||||
|
||||
|
||||
def is_import_exception(mod):
|
||||
return (mod in IMPORT_EXCEPTIONS or
|
||||
any(mod.startswith(m + '.') for m in IMPORT_EXCEPTIONS))
|
||||
|
||||
|
||||
def import_normalize(line):
|
||||
# convert "from x import y" to "import x.y"
|
||||
# handle "from x import y as z" to "import x.y as z"
|
||||
split_line = line.split()
|
||||
if ("import" in line and line.startswith("from ") and "," not in line and
|
||||
split_line[2] == "import" and split_line[3] != "*" and
|
||||
split_line[1] != "__future__" and
|
||||
(len(split_line) == 4 or
|
||||
(len(split_line) == 6 and split_line[4] == "as"))):
|
||||
return "import %s.%s" % (split_line[1], split_line[3])
|
||||
else:
|
||||
return line
|
||||
|
||||
|
||||
def nova_todo_format(physical_line, tokens):
|
||||
"""Check for 'TODO()'.
|
||||
|
||||
nova HACKING guide recommendation for TODO:
|
||||
Include your name with TODOs as in "#TODO(termie)"
|
||||
|
||||
Okay: #TODO(sdague)
|
||||
N101: #TODO fail
|
||||
N101: #TODO (jogo) fail
|
||||
"""
|
||||
# TODO(sdague): TODO check shouldn't fail inside of space
|
||||
pos = physical_line.find('TODO')
|
||||
pos1 = physical_line.find('TODO(')
|
||||
pos2 = physical_line.find('#') # make sure it's a comment
|
||||
if (pos != pos1 and pos2 >= 0 and pos2 < pos and len(tokens) == 0):
|
||||
return pos, "N101: Use TODO(NAME)"
|
||||
|
||||
|
||||
def nova_except_format(logical_line):
|
||||
r"""Check for 'except:'.
|
||||
|
||||
nova HACKING guide recommends not using except:
|
||||
Do not write "except:", use "except Exception:" at the very least
|
||||
|
||||
Okay: except Exception:
|
||||
N201: except:
|
||||
"""
|
||||
if logical_line.startswith("except:"):
|
||||
yield 6, "N201: no 'except:' at least use 'except Exception:'"
|
||||
|
||||
|
||||
def nova_except_format_assert(logical_line):
|
||||
r"""Check for 'assertRaises(Exception'.
|
||||
|
||||
nova HACKING guide recommends not using assertRaises(Exception...):
|
||||
Do not use overly broad Exception type
|
||||
|
||||
Okay: self.assertRaises(NovaException)
|
||||
N202: self.assertRaises(Exception)
|
||||
"""
|
||||
if logical_line.startswith("self.assertRaises(Exception"):
|
||||
yield 1, "N202: assertRaises Exception too broad"
|
||||
|
||||
|
||||
modules_cache = dict((mod, True) for mod in tuple(sys.modules.keys())
|
||||
+ sys.builtin_module_names)
|
||||
|
||||
RE_RELATIVE_IMPORT = re.compile('^from\s*[.]')
|
||||
|
||||
|
||||
def nova_import_rules(logical_line):
|
||||
r"""Check for imports.
|
||||
|
||||
nova HACKING guide recommends one import per line:
|
||||
Do not import more than one module per line
|
||||
|
||||
Examples:
|
||||
Okay: from nova.compute import api
|
||||
N301: from nova.compute import api, utils
|
||||
|
||||
|
||||
Imports should usually be on separate lines.
|
||||
|
||||
nova HACKING guide recommends importing only modules:
|
||||
Do not import objects, only modules
|
||||
|
||||
Examples:
|
||||
Okay: from os import path
|
||||
Okay: import os.path
|
||||
Okay: from nova.compute import rpcapi
|
||||
N302: from os.path import dirname as dirname2
|
||||
N303: from os.path import *
|
||||
N304: from .compute import rpcapi
|
||||
"""
|
||||
#NOTE(afazekas): An old style relative import example will not be able to
|
||||
# pass the doctest, since the relativity depends on the file's locality
|
||||
|
||||
def is_module_for_sure(mod, search_path=sys.path):
|
||||
try:
|
||||
while '.' in mod:
|
||||
pack_name, _sep, mod = mod.partition('.')
|
||||
f, p, d = imp.find_module(pack_name, search_path)
|
||||
search_path = [p]
|
||||
imp.find_module(mod, search_path)
|
||||
except ImportError:
|
||||
return False
|
||||
return True
|
||||
|
||||
def is_module_for_sure_cached(mod):
|
||||
if mod in modules_cache:
|
||||
return modules_cache[mod]
|
||||
res = is_module_for_sure(mod)
|
||||
modules_cache[mod] = res
|
||||
return res
|
||||
|
||||
def is_module(mod):
|
||||
"""Checks for non module imports.
|
||||
|
||||
If can't find module on first try, recursively check for the parent
|
||||
modules.
|
||||
When parsing 'from x import y,' x is the parent.
|
||||
"""
|
||||
if is_module_for_sure_cached(mod):
|
||||
return True
|
||||
parts = mod.split('.')
|
||||
for i in range(len(parts) - 1, 0, -1):
|
||||
path = '.'.join(parts[0:i])
|
||||
if is_module_for_sure_cached(path):
|
||||
return False
|
||||
_missingImport.add(mod)
|
||||
return True
|
||||
|
||||
current_path = os.path.dirname(pep8.current_file)
|
||||
current_mod = os.path.basename(pep8.current_file)
|
||||
if current_mod[-3:] == ".py":
|
||||
current_mod = current_mod[:-3]
|
||||
|
||||
split_line = logical_line.split()
|
||||
split_line_len = len(split_line)
|
||||
if (split_line[0] in ('import', 'from') and split_line_len > 1 and
|
||||
not is_import_exception(split_line[1])):
|
||||
pos = logical_line.find(',')
|
||||
if pos != -1:
|
||||
if split_line[0] == 'from':
|
||||
yield pos, "N301: one import per line"
|
||||
return # ',' is not supported by the N302 checker yet
|
||||
pos = logical_line.find('*')
|
||||
if pos != -1:
|
||||
yield pos, "N303: No wildcard (*) import."
|
||||
return
|
||||
|
||||
if split_line_len in (2, 4, 6) and split_line[1] != "__future__":
|
||||
if 'from' == split_line[0] and split_line_len > 3:
|
||||
mod = '.'.join((split_line[1], split_line[3]))
|
||||
if is_import_exception(mod):
|
||||
return
|
||||
if RE_RELATIVE_IMPORT.search(logical_line):
|
||||
yield logical_line.find('.'), ("N304: No "
|
||||
"relative imports. '%s' is a relative import"
|
||||
% logical_line)
|
||||
return
|
||||
|
||||
if not is_module(mod):
|
||||
yield 0, ("N302: import only modules."
|
||||
"'%s' does not import a module" % logical_line)
|
||||
return
|
||||
|
||||
#NOTE(afazekas): import searches first in the package
|
||||
# The import keyword just imports modules
|
||||
# The guestfs module now imports guestfs
|
||||
mod = split_line[1]
|
||||
if (current_mod != mod and
|
||||
not is_module_for_sure_cached(mod) and
|
||||
is_module_for_sure(mod, [current_path])):
|
||||
yield 0, ("N304: No relative imports."
|
||||
" '%s' is a relative import"
|
||||
% logical_line)
|
||||
|
||||
|
||||
#TODO(jogo): import template: N305
|
||||
|
||||
|
||||
def nova_import_alphabetical(logical_line, blank_lines, previous_logical,
|
||||
indent_level, previous_indent_level):
|
||||
r"""Check for imports in alphabetical order.
|
||||
|
||||
nova HACKING guide recommendation for imports:
|
||||
imports in human alphabetical order
|
||||
|
||||
Okay: import os\nimport sys\n\nimport nova\nfrom nova import test
|
||||
N306: import sys\nimport os
|
||||
"""
|
||||
# handle import x
|
||||
# use .lower since capitalization shouldn't dictate order
|
||||
split_line = import_normalize(logical_line.strip()).lower().split()
|
||||
split_previous = import_normalize(previous_logical.strip()).lower().split()
|
||||
|
||||
if blank_lines < 1 and indent_level == previous_indent_level:
|
||||
length = [2, 4]
|
||||
if (len(split_line) in length and len(split_previous) in length and
|
||||
split_line[0] == "import" and split_previous[0] == "import"):
|
||||
if split_line[1] < split_previous[1]:
|
||||
yield (0, "N306: imports not in alphabetical order (%s, %s)"
|
||||
% (split_previous[1], split_line[1]))
|
||||
|
||||
|
||||
def nova_import_no_db_in_virt(logical_line, filename):
|
||||
"""Check for db calls from nova/virt
|
||||
|
||||
As of grizzly-2 all the database calls have been removed from
|
||||
nova/virt, and we want to keep it that way.
|
||||
|
||||
N307
|
||||
"""
|
||||
if "nova/virt" in filename and not filename.endswith("fake.py"):
|
||||
if logical_line.startswith("from nova import db"):
|
||||
yield (0, "N307: nova.db import not allowed in nova/virt/*")
|
||||
|
||||
|
||||
def in_docstring_position(previous_logical):
|
||||
return (previous_logical.startswith("def ") or
|
||||
previous_logical.startswith("class "))
|
||||
|
||||
|
||||
def nova_docstring_start_space(physical_line, previous_logical):
|
||||
r"""Check for docstring not start with space.
|
||||
|
||||
nova HACKING guide recommendation for docstring:
|
||||
Docstring should not start with space
|
||||
|
||||
Okay: def foo():\n '''This is good.'''
|
||||
N401: def foo():\n ''' This is not.'''
|
||||
"""
|
||||
# short circuit so that we don't fail on our own fail test
|
||||
# when running under external pep8
|
||||
if physical_line.find("N401: def foo()") != -1:
|
||||
return
|
||||
|
||||
# it's important that we determine this is actually a docstring,
|
||||
# and not a doc block used somewhere after the first line of a
|
||||
# function def
|
||||
if in_docstring_position(previous_logical):
|
||||
pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE])
|
||||
if pos != -1 and len(physical_line) > pos + 4:
|
||||
if physical_line[pos + 3] == ' ':
|
||||
return (pos, "N401: docstring should not start with"
|
||||
" a space")
|
||||
|
||||
|
||||
def nova_docstring_one_line(physical_line):
|
||||
r"""Check one line docstring end.
|
||||
|
||||
nova HACKING guide recommendation for one line docstring:
|
||||
A one line docstring looks like this and ends in punctuation.
|
||||
|
||||
Okay: '''This is good.'''
|
||||
Okay: '''This is good too!'''
|
||||
Okay: '''How about this?'''
|
||||
N402: '''This is not'''
|
||||
N402: '''Bad punctuation,'''
|
||||
"""
|
||||
#TODO(jogo) make this apply to multi line docstrings as well
|
||||
line = physical_line.lstrip()
|
||||
|
||||
if line.startswith('"') or line.startswith("'"):
|
||||
pos = max([line.find(i) for i in START_DOCSTRING_TRIPLE]) # start
|
||||
end = max([line[-4:-1] == i for i in END_DOCSTRING_TRIPLE]) # end
|
||||
|
||||
if pos != -1 and end and len(line) > pos + 4:
|
||||
if line[-5] not in ['.', '?', '!']:
|
||||
return pos, "N402: one line docstring needs punctuation."
|
||||
|
||||
|
||||
def nova_docstring_multiline_end(physical_line, previous_logical):
|
||||
r"""Check multi line docstring end.
|
||||
|
||||
nova HACKING guide recommendation for docstring:
|
||||
Docstring should end on a new line
|
||||
|
||||
Okay: '''foobar\nfoo\nbar\n'''
|
||||
N403: def foo():\n'''foobar\nfoo\nbar\n d'''\n\n
|
||||
"""
|
||||
if in_docstring_position(previous_logical):
|
||||
pos = max(physical_line.find(i) for i in END_DOCSTRING_TRIPLE)
|
||||
if pos != -1 and len(physical_line) == pos + 4:
|
||||
if physical_line.strip() not in START_DOCSTRING_TRIPLE:
|
||||
return (pos, "N403: multi line docstring end on new line")
|
||||
|
||||
|
||||
def nova_docstring_multiline_start(physical_line, previous_logical, tokens):
|
||||
r"""Check multi line docstring start with summary.
|
||||
|
||||
nova HACKING guide recommendation for docstring:
|
||||
Docstring should start with A multi line docstring has a one-line summary
|
||||
|
||||
Okay: '''foobar\nfoo\nbar\n'''
|
||||
N404: def foo():\n'''\nfoo\nbar\n''' \n\n
|
||||
"""
|
||||
if in_docstring_position(previous_logical):
|
||||
pos = max([physical_line.find(i) for i in START_DOCSTRING_TRIPLE])
|
||||
# start of docstring when len(tokens)==0
|
||||
if len(tokens) == 0 and pos != -1 and len(physical_line) == pos + 4:
|
||||
if physical_line.strip() in START_DOCSTRING_TRIPLE:
|
||||
return (pos, "N404: multi line docstring "
|
||||
"should start with a summary")
|
||||
|
||||
|
||||
def nova_no_cr(physical_line):
|
||||
r"""Check that we only use newlines not cariage returns.
|
||||
|
||||
Okay: import os\nimport sys
|
||||
# pep8 doesn't yet replace \r in strings, will work on an
|
||||
# upstream fix
|
||||
N901 import os\r\nimport sys
|
||||
"""
|
||||
pos = physical_line.find('\r')
|
||||
if pos != -1 and pos == (len(physical_line) - 2):
|
||||
return (pos, "N901: Windows style line endings not allowed in code")
|
||||
|
||||
|
||||
FORMAT_RE = re.compile("%(?:"
|
||||
"%|" # Ignore plain percents
|
||||
"(\(\w+\))?" # mapping key
|
||||
"([#0 +-]?" # flag
|
||||
"(?:\d+|\*)?" # width
|
||||
"(?:\.\d+)?" # precision
|
||||
"[hlL]?" # length mod
|
||||
"\w))") # type
|
||||
|
||||
|
||||
class LocalizationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def check_i18n():
|
||||
"""Generator that checks token stream for localization errors.
|
||||
|
||||
Expects tokens to be ``send``ed one by one.
|
||||
Raises LocalizationError if some error is found.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
token_type, text, _, _, line = yield
|
||||
except GeneratorExit:
|
||||
return
|
||||
|
||||
if (token_type == tokenize.NAME and text == "_" and
|
||||
not line.startswith('def _(msg):')):
|
||||
|
||||
while True:
|
||||
token_type, text, start, _, _ = yield
|
||||
if token_type != tokenize.NL:
|
||||
break
|
||||
if token_type != tokenize.OP or text != "(":
|
||||
continue # not a localization call
|
||||
|
||||
format_string = ''
|
||||
while True:
|
||||
token_type, text, start, _, _ = yield
|
||||
if token_type == tokenize.STRING:
|
||||
format_string += eval(text)
|
||||
elif token_type == tokenize.NL:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
if not format_string:
|
||||
raise LocalizationError(start,
|
||||
"N701: Empty localization string")
|
||||
if token_type != tokenize.OP:
|
||||
raise LocalizationError(start,
|
||||
"N701: Invalid localization call")
|
||||
if text != ")":
|
||||
if text == "%":
|
||||
raise LocalizationError(start,
|
||||
"N702: Formatting operation should be outside"
|
||||
" of localization method call")
|
||||
elif text == "+":
|
||||
raise LocalizationError(start,
|
||||
"N702: Use bare string concatenation instead"
|
||||
" of +")
|
||||
else:
|
||||
raise LocalizationError(start,
|
||||
"N702: Argument to _ must be just a string")
|
||||
|
||||
format_specs = FORMAT_RE.findall(format_string)
|
||||
positional_specs = [(key, spec) for key, spec in format_specs
|
||||
if not key and spec]
|
||||
# not spec means %%, key means %(smth)s
|
||||
if len(positional_specs) > 1:
|
||||
raise LocalizationError(start,
|
||||
"N703: Multiple positional placeholders")
|
||||
|
||||
|
||||
def nova_localization_strings(logical_line, tokens):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: _("This is fine")
|
||||
Okay: _("This is also fine %s")
|
||||
N701: _('')
|
||||
N702: _("Bob" + " foo")
|
||||
N702: _("Bob %s" % foo)
|
||||
# N703 check is not quite right, disabled by removing colon
|
||||
N703 _("%s %s" % (foo, bar))
|
||||
"""
|
||||
# TODO(sdague) actually get these tests working
|
||||
gen = check_i18n()
|
||||
next(gen)
|
||||
try:
|
||||
map(gen.send, tokens)
|
||||
gen.close()
|
||||
except LocalizationError as e:
|
||||
yield e.args
|
||||
|
||||
#TODO(jogo) Dict and list objects
|
||||
|
||||
|
||||
def nova_is_not(logical_line):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: if x is not y
|
||||
N901: if not X is Y
|
||||
N901: if not X.B is Y
|
||||
"""
|
||||
split_line = logical_line.split()
|
||||
if (len(split_line) == 5 and split_line[0] == 'if' and
|
||||
split_line[1] == 'not' and split_line[3] == 'is'):
|
||||
yield (logical_line.find('not'), "N901: Use the 'is not' "
|
||||
"operator for when testing for unequal identities")
|
||||
|
||||
|
||||
def nova_not_in(logical_line):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: if x not in y
|
||||
Okay: if not (X in Y or X is Z)
|
||||
Okay: if not (X in Y)
|
||||
N902: if not X in Y
|
||||
N902: if not X.B in Y
|
||||
"""
|
||||
split_line = logical_line.split()
|
||||
if (len(split_line) == 5 and split_line[0] == 'if' and
|
||||
split_line[1] == 'not' and split_line[3] == 'in' and not
|
||||
split_line[2].startswith('(')):
|
||||
yield (logical_line.find('not'), "N902: Use the 'not in' "
|
||||
"operator for collection membership evaluation")
|
||||
|
||||
current_file = ""
|
||||
|
||||
|
||||
def readlines(filename):
|
||||
"""Record the current file being tested."""
|
||||
pep8.current_file = filename
|
||||
return open(filename).readlines()
|
||||
|
||||
|
||||
def add_nova():
|
||||
"""Monkey patch in nova guidelines.
|
||||
|
||||
Look for functions that start with nova_ and have arguments
|
||||
and add them to pep8 module
|
||||
Assumes you know how to write pep8.py checks
|
||||
"""
|
||||
for name, function in globals().items():
|
||||
if not inspect.isfunction(function):
|
||||
continue
|
||||
args = inspect.getargspec(function)[0]
|
||||
if args and name.startswith("nova"):
|
||||
exec("pep8.%s = %s" % (name, name))
|
||||
|
||||
|
||||
def once_git_check_commit_title():
|
||||
"""Check git commit messages.
|
||||
|
||||
nova HACKING recommends not referencing a bug or blueprint in first line,
|
||||
it should provide an accurate description of the change
|
||||
N801
|
||||
N802 Title limited to 50 chars
|
||||
"""
|
||||
#Get title of most recent commit
|
||||
|
||||
subp = subprocess.Popen(['git', 'log', '--no-merges', '--pretty=%s', '-1'],
|
||||
stdout=subprocess.PIPE)
|
||||
title = subp.communicate()[0]
|
||||
if subp.returncode:
|
||||
raise Exception("git log failed with code %s" % subp.returncode)
|
||||
|
||||
#From https://github.com/openstack/openstack-ci-puppet
|
||||
# /blob/master/modules/gerrit/manifests/init.pp#L74
|
||||
#Changeid|bug|blueprint
|
||||
git_keywords = (r'(I[0-9a-f]{8,40})|'
|
||||
'([Bb]ug|[Ll][Pp])[\s\#:]*(\d+)|'
|
||||
'([Bb]lue[Pp]rint|[Bb][Pp])[\s\#:]*([A-Za-z0-9\\-]+)')
|
||||
GIT_REGEX = re.compile(git_keywords)
|
||||
|
||||
error = False
|
||||
#NOTE(jogo) if match regex but over 3 words, acceptable title
|
||||
if GIT_REGEX.search(title) is not None and len(title.split()) <= 3:
|
||||
print ("N801: git commit title ('%s') should provide an accurate "
|
||||
"description of the change, not just a reference to a bug "
|
||||
"or blueprint" % title.strip())
|
||||
error = True
|
||||
if len(title.decode('utf-8')) > 72:
|
||||
print ("N802: git commit title ('%s') should be under 50 chars"
|
||||
% title.strip())
|
||||
error = True
|
||||
return error
|
||||
|
||||
imports_on_separate_lines_N301_compliant = r"""
|
||||
Imports should usually be on separate lines.
|
||||
|
||||
Okay: import os\nimport sys
|
||||
E401: import sys, os
|
||||
|
||||
N301: from subprocess import Popen, PIPE
|
||||
Okay: from myclas import MyClass
|
||||
Okay: from foo.bar.yourclass import YourClass
|
||||
Okay: import myclass
|
||||
Okay: import foo.bar.yourclass
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
#include nova path
|
||||
sys.path.append(os.getcwd())
|
||||
#Run once tests (not per line)
|
||||
once_error = once_git_check_commit_title()
|
||||
#NOVA error codes start with an N
|
||||
pep8.SELFTEST_REGEX = re.compile(r'(Okay|[EWN]\d{3}):\s(.*)')
|
||||
pep8.ERRORCODE_REGEX = re.compile(r'[EWN]\d{3}')
|
||||
add_nova()
|
||||
pep8.current_file = current_file
|
||||
pep8.readlines = readlines
|
||||
pep8.StyleGuide.excluded = excluded
|
||||
pep8.StyleGuide.input_dir = input_dir
|
||||
# we need to kill this doctring otherwise the self tests fail
|
||||
pep8.imports_on_separate_lines.__doc__ = \
|
||||
imports_on_separate_lines_N301_compliant
|
||||
|
||||
try:
|
||||
pep8._main()
|
||||
sys.exit(once_error)
|
||||
finally:
|
||||
if len(_missingImport) > 0:
|
||||
print >> sys.stderr, ("%i imports missing in this test environment"
|
||||
% len(_missingImport))
|
199
tools/lintstack.py
Executable file
199
tools/lintstack.py
Executable file
@ -0,0 +1,199 @@
|
||||
#!/usr/bin/env python
|
||||
# vim: tabstop=4 shiftwidth=4 softtabstop=4
|
||||
|
||||
# Copyright (c) 2012, AT&T Labs, Yun Mao <yunmao@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""pylint error checking."""
|
||||
|
||||
import cStringIO as StringIO
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
|
||||
from pylint import lint
|
||||
from pylint.reporters import text
|
||||
|
||||
# Note(maoy): E1103 is error code related to partial type inference
|
||||
ignore_codes = ["E1103"]
|
||||
# Note(maoy): the error message is the pattern of E0202. It should be ignored
|
||||
# for nova.tests modules
|
||||
ignore_messages = ["An attribute affected in nova.tests"]
|
||||
# Note(maoy): we ignore all errors in openstack.common because it should be
|
||||
# checked elsewhere. We also ignore nova.tests for now due to high false
|
||||
# positive rate.
|
||||
ignore_modules = ["nova/openstack/common/", "nova/tests/"]
|
||||
|
||||
KNOWN_PYLINT_EXCEPTIONS_FILE = "tools/pylint_exceptions"
|
||||
|
||||
|
||||
class LintOutput(object):
|
||||
|
||||
_cached_filename = None
|
||||
_cached_content = None
|
||||
|
||||
def __init__(self, filename, lineno, line_content, code, message,
|
||||
lintoutput):
|
||||
self.filename = filename
|
||||
self.lineno = lineno
|
||||
self.line_content = line_content
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.lintoutput = lintoutput
|
||||
|
||||
@classmethod
|
||||
def from_line(cls, line):
|
||||
m = re.search(r"(\S+):(\d+): \[(\S+)(, \S+)?] (.*)", line)
|
||||
matched = m.groups()
|
||||
filename, lineno, code, message = (matched[0], int(matched[1]),
|
||||
matched[2], matched[-1])
|
||||
if cls._cached_filename != filename:
|
||||
with open(filename) as f:
|
||||
cls._cached_content = list(f.readlines())
|
||||
cls._cached_filename = filename
|
||||
line_content = cls._cached_content[lineno - 1].rstrip()
|
||||
return cls(filename, lineno, line_content, code, message,
|
||||
line.rstrip())
|
||||
|
||||
@classmethod
|
||||
def from_msg_to_dict(cls, msg):
|
||||
"""From the output of pylint msg, to a dict, where each key
|
||||
is a unique error identifier, value is a list of LintOutput
|
||||
"""
|
||||
result = {}
|
||||
for line in msg.splitlines():
|
||||
obj = cls.from_line(line)
|
||||
if obj.is_ignored():
|
||||
continue
|
||||
key = obj.key()
|
||||
if key not in result:
|
||||
result[key] = []
|
||||
result[key].append(obj)
|
||||
return result
|
||||
|
||||
def is_ignored(self):
|
||||
if self.code in ignore_codes:
|
||||
return True
|
||||
if any(self.filename.startswith(name) for name in ignore_modules):
|
||||
return True
|
||||
if any(msg in self.message for msg in ignore_messages):
|
||||
return True
|
||||
return False
|
||||
|
||||
def key(self):
|
||||
if self.code in ["E1101", "E1103"]:
|
||||
# These two types of errors are like Foo class has no member bar.
|
||||
# We discard the source code so that the error will be ignored
|
||||
# next time another Foo.bar is encountered.
|
||||
return self.message, ""
|
||||
return self.message, self.line_content.strip()
|
||||
|
||||
def json(self):
|
||||
return json.dumps(self.__dict__)
|
||||
|
||||
def review_str(self):
|
||||
return ("File %(filename)s\nLine %(lineno)d:%(line_content)s\n"
|
||||
"%(code)s: %(message)s" % self.__dict__)
|
||||
|
||||
|
||||
class ErrorKeys(object):
|
||||
|
||||
@classmethod
|
||||
def print_json(cls, errors, output=sys.stdout):
|
||||
print >>output, "# automatically generated by tools/lintstack.py"
|
||||
for i in sorted(errors.keys()):
|
||||
print >>output, json.dumps(i)
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, filename):
|
||||
keys = set()
|
||||
for line in open(filename):
|
||||
if line and line[0] != "#":
|
||||
d = json.loads(line)
|
||||
keys.add(tuple(d))
|
||||
return keys
|
||||
|
||||
|
||||
def run_pylint():
|
||||
buff = StringIO.StringIO()
|
||||
reporter = text.ParseableTextReporter(output=buff)
|
||||
args = ["--include-ids=y", "-E", "nova"]
|
||||
lint.Run(args, reporter=reporter, exit=False)
|
||||
val = buff.getvalue()
|
||||
buff.close()
|
||||
return val
|
||||
|
||||
|
||||
def generate_error_keys(msg=None):
|
||||
print "Generating", KNOWN_PYLINT_EXCEPTIONS_FILE
|
||||
if msg is None:
|
||||
msg = run_pylint()
|
||||
errors = LintOutput.from_msg_to_dict(msg)
|
||||
with open(KNOWN_PYLINT_EXCEPTIONS_FILE, "w") as f:
|
||||
ErrorKeys.print_json(errors, output=f)
|
||||
|
||||
|
||||
def validate(newmsg=None):
|
||||
print "Loading", KNOWN_PYLINT_EXCEPTIONS_FILE
|
||||
known = ErrorKeys.from_file(KNOWN_PYLINT_EXCEPTIONS_FILE)
|
||||
if newmsg is None:
|
||||
print "Running pylint. Be patient..."
|
||||
newmsg = run_pylint()
|
||||
errors = LintOutput.from_msg_to_dict(newmsg)
|
||||
|
||||
print "Unique errors reported by pylint: was %d, now %d." \
|
||||
% (len(known), len(errors))
|
||||
passed = True
|
||||
for err_key, err_list in errors.items():
|
||||
for err in err_list:
|
||||
if err_key not in known:
|
||||
print err.lintoutput
|
||||
print
|
||||
passed = False
|
||||
if passed:
|
||||
print "Congrats! pylint check passed."
|
||||
redundant = known - set(errors.keys())
|
||||
if redundant:
|
||||
print "Extra credit: some known pylint exceptions disappeared."
|
||||
for i in sorted(redundant):
|
||||
print json.dumps(i)
|
||||
print "Consider regenerating the exception file if you will."
|
||||
else:
|
||||
print ("Please fix the errors above. If you believe they are false"
|
||||
" positives, run 'tools/lintstack.py generate' to overwrite.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def usage():
|
||||
print """Usage: tools/lintstack.py [generate|validate]
|
||||
To generate pylint_exceptions file: tools/lintstack.py generate
|
||||
To validate the current commit: tools/lintstack.py
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
option = "validate"
|
||||
if len(sys.argv) > 1:
|
||||
option = sys.argv[1]
|
||||
if option == "generate":
|
||||
generate_error_keys()
|
||||
elif option == "validate":
|
||||
validate()
|
||||
else:
|
||||
usage()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
59
tools/lintstack.sh
Executable file
59
tools/lintstack.sh
Executable file
@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Copyright (c) 2012-2013, AT&T Labs, Yun Mao <yunmao@gmail.com>
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Use lintstack.py to compare pylint errors.
|
||||
# We run pylint twice, once on HEAD, once on the code before the latest
|
||||
# commit for review.
|
||||
set -e
|
||||
TOOLS_DIR=$(cd $(dirname "$0") && pwd)
|
||||
# Get the current branch name.
|
||||
GITHEAD=`git rev-parse --abbrev-ref HEAD`
|
||||
if [[ "$GITHEAD" == "HEAD" ]]; then
|
||||
# In detached head mode, get revision number instead
|
||||
GITHEAD=`git rev-parse HEAD`
|
||||
echo "Currently we are at commit $GITHEAD"
|
||||
else
|
||||
echo "Currently we are at branch $GITHEAD"
|
||||
fi
|
||||
|
||||
cp -f $TOOLS_DIR/lintstack.py $TOOLS_DIR/lintstack.head.py
|
||||
|
||||
if git rev-parse HEAD^2 2>/dev/null; then
|
||||
# The HEAD is a Merge commit. Here, the patch to review is
|
||||
# HEAD^2, the master branch is at HEAD^1, and the patch was
|
||||
# written based on HEAD^2~1.
|
||||
PREV_COMMIT=`git rev-parse HEAD^2~1`
|
||||
git checkout HEAD~1
|
||||
# The git merge is necessary for reviews with a series of patches.
|
||||
# If not, this is a no-op so won't hurt either.
|
||||
git merge $PREV_COMMIT
|
||||
else
|
||||
# The HEAD is not a merge commit. This won't happen on gerrit.
|
||||
# Most likely you are running against your own patch locally.
|
||||
# We assume the patch to examine is HEAD, and we compare it against
|
||||
# HEAD~1
|
||||
git checkout HEAD~1
|
||||
fi
|
||||
|
||||
# First generate tools/pylint_exceptions from HEAD~1
|
||||
$TOOLS_DIR/lintstack.head.py generate
|
||||
# Then use that as a reference to compare against HEAD
|
||||
git checkout $GITHEAD
|
||||
$TOOLS_DIR/lintstack.head.py
|
||||
echo "Check passed. FYI: the pylint exceptions are:"
|
||||
cat $TOOLS_DIR/pylint_exceptions
|
||||
|
15
tools/pyflakes-bypass.py
Normal file
15
tools/pyflakes-bypass.py
Normal file
@ -0,0 +1,15 @@
|
||||
from pyflakes.scripts import pyflakes
|
||||
from pyflakes.checker import Checker
|
||||
|
||||
|
||||
def report_with_bypass(self, messageClass, *args, **kwargs):
|
||||
text_lineno = args[0] - 1
|
||||
with open(self.filename, 'r') as code:
|
||||
if code.readlines()[text_lineno].find('pyflakes_bypass') >= 0:
|
||||
return
|
||||
self.messages.append(messageClass(self.filename, *args, **kwargs))
|
||||
|
||||
# monkey patch checker to support bypass
|
||||
Checker.report = report_with_bypass
|
||||
|
||||
pyflakes.main()
|
19
tools/run_pep8.sh
Executable file
19
tools/run_pep8.sh
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
# This is used by run_tests.sh and tox.ini
|
||||
python tools/hacking.py --doctest
|
||||
|
||||
# Until all these issues get fixed, ignore.
|
||||
PEP8='python tools/hacking.py --ignore=N101,N201,N202,N301,N302,N303,N304,N305,N306,N401,N402,N403,N404,N702,N703,N801,N902'
|
||||
|
||||
EXCLUDE='--exclude=.venv,.git,.tox,dist,doc,*lib/python*'
|
||||
EXCLUDE+=',*egg,build,*tools*'
|
||||
|
||||
# Check all .py files
|
||||
${PEP8} ${EXCLUDE} .
|
||||
|
||||
# Check binaries without py extension
|
||||
#${PEP8} bin/cfn-create-aws-symlinks bin/cfn-get-metadata bin/cfn-hup bin/cfn-init bin/cfn-push-stats bin/cfn-signal
|
||||
|
||||
! python tools/pyflakes-bypass.py heat_cfntools/ | grep "imported but unused\|redefinition of function"
|
@ -1 +1,5 @@
|
||||
testtools
|
||||
mox==0.5.3
|
||||
pep8==1.3.4
|
||||
pyflakes
|
||||
testrepository>=0.0.13
|
||||
testtools>=0.9.27
|
41
tox.ini
Normal file
41
tox.ini
Normal file
@ -0,0 +1,41 @@
|
||||
[tox]
|
||||
envlist = py26,py27,pep8
|
||||
|
||||
[testenv]
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
LANG=en_US.UTF-8
|
||||
LANGUAGE=en_US:en
|
||||
LC_ALL=C
|
||||
deps = -r{toxinidir}/tools/pip-requires
|
||||
-r{toxinidir}/tools/test-requires
|
||||
commands = python setup.py testr --slowest --testr-args='{posargs}'
|
||||
|
||||
[tox:jenkins]
|
||||
sitepackages = True
|
||||
downloadcache = ~/cache/pip
|
||||
|
||||
[testenv:pep8]
|
||||
deps=
|
||||
pep8==1.3.3
|
||||
pyflakes
|
||||
commands = bash tools/run_pep8.sh
|
||||
|
||||
[testenv:pylint]
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
deps = -r{toxinidir}/tools/pip-requires
|
||||
pylint==0.26.0
|
||||
commands = bash tools/lintstack.sh
|
||||
|
||||
[testenv:pyflakes]
|
||||
deps = pyflakes
|
||||
commands = python tools/flakes.py heat_cfntools
|
||||
|
||||
[testenv:cover]
|
||||
# Also do not run test_coverage_ext tests while gathering coverage as those
|
||||
# tests conflict with coverage.
|
||||
commands =
|
||||
python setup.py testr --coverage \
|
||||
--testr-args='^(?!.*test.*coverage).*$'
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
Loading…
Reference in New Issue
Block a user