Merge "Update hacking for Python3"
This commit is contained in:
commit
2b39a61ecf
@ -53,7 +53,6 @@ openstackdocs_bug_project = 'barbican'
|
||||
|
||||
copyright = u'2016, OpenStack contributors'
|
||||
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
# language = None
|
||||
|
@ -81,5 +81,6 @@ def main():
|
||||
except RuntimeError as e:
|
||||
fail(1, e)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
@ -188,5 +188,6 @@ def main():
|
||||
rewrapper.execute(args.dry_run)
|
||||
rewrapper.pkcs11.return_session(rewrapper.hsm_session)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -167,5 +167,6 @@ def main():
|
||||
)
|
||||
migrator.execute(args.dry_run)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -73,5 +73,6 @@ def main():
|
||||
except RuntimeError as e:
|
||||
fail(1, e)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -352,6 +352,7 @@ def set_middleware_defaults():
|
||||
'PATCH']
|
||||
)
|
||||
|
||||
|
||||
CONF = new_config()
|
||||
LOG = logging.getLogger(__name__)
|
||||
parse_args(CONF)
|
||||
|
@ -88,7 +88,7 @@ def get_base_url_from_request():
|
||||
# FIXME: implement SERVER_NAME lookup if HTTP_HOST is not set
|
||||
if p_url.path:
|
||||
# Remove the version from the path to extract the base path
|
||||
base_path = re.sub('/v[0-9\.]+$', '', p_url.path)
|
||||
base_path = re.sub(r'/v[0-9\.]+$', '', p_url.path)
|
||||
base_url = '%s://%s%s' % (scheme, netloc, base_path)
|
||||
else:
|
||||
base_url = '%s://%s' % (scheme, netloc)
|
||||
@ -204,7 +204,7 @@ def is_multiple_backends_enabled():
|
||||
secretstore_conf = config.get_module_config('secretstore')
|
||||
except KeyError:
|
||||
# Ensure module is initialized
|
||||
from barbican.plugin.interface import secret_store # nopep8
|
||||
from barbican.plugin.interface import secret_store # noqa: F401
|
||||
secretstore_conf = config.get_module_config('secretstore')
|
||||
return secretstore_conf.secretstore.enable_multiple_secret_stores
|
||||
|
||||
|
@ -17,7 +17,8 @@ import ast
|
||||
import re
|
||||
import six
|
||||
|
||||
import pep8
|
||||
from hacking import core
|
||||
import pycodestyle
|
||||
|
||||
|
||||
"""
|
||||
@ -61,7 +62,7 @@ class BaseASTChecker(ast.NodeVisitor):
|
||||
CHECK_DESC = 'No check message specified'
|
||||
|
||||
def __init__(self, tree, filename):
|
||||
"""This object is created automatically by pep8.
|
||||
"""This object is created automatically by pycodestyle.
|
||||
|
||||
:param tree: an AST tree
|
||||
:param filename: name of the file being analyzed
|
||||
@ -71,12 +72,13 @@ class BaseASTChecker(ast.NodeVisitor):
|
||||
self._errors = []
|
||||
|
||||
def run(self):
|
||||
"""Called automatically by pep8."""
|
||||
"""Called automatically by pycodestyle."""
|
||||
self.visit(self._tree)
|
||||
return self._errors
|
||||
|
||||
def add_error(self, node, message=None):
|
||||
"""Add an error caused by a node to the list of errors for pep8."""
|
||||
|
||||
message = message or self.CHECK_DESC
|
||||
error = (node.lineno, node.col_offset, message, self.__class__)
|
||||
self._errors.append(error)
|
||||
@ -98,6 +100,8 @@ class CheckLoggingFormatArgs(BaseASTChecker):
|
||||
The format arguments should not be a tuple as it is easy to miss.
|
||||
|
||||
"""
|
||||
name = "check_logging_format_args"
|
||||
version = "1.0"
|
||||
|
||||
CHECK_DESC = 'B310 Log method arguments should not be a tuple.'
|
||||
LOG_METHODS = [
|
||||
@ -154,59 +158,13 @@ class CheckLoggingFormatArgs(BaseASTChecker):
|
||||
return super(CheckLoggingFormatArgs, self).generic_visit(node)
|
||||
|
||||
|
||||
class CheckForStrUnicodeExc(BaseASTChecker):
|
||||
"""Checks for the use of str() or unicode() on an exception.
|
||||
|
||||
This currently only handles the case where str() or unicode()
|
||||
is used in the scope of an exception handler. If the exception
|
||||
is passed into a function, returned from an assertRaises, or
|
||||
used on an exception created in the same scope, this does not
|
||||
catch it.
|
||||
"""
|
||||
|
||||
CHECK_DESC = ('B314 str() and unicode() cannot be used on an '
|
||||
'exception. Remove or use six.text_type()')
|
||||
|
||||
def __init__(self, tree, filename):
|
||||
super(CheckForStrUnicodeExc, self).__init__(tree, filename)
|
||||
self.name = []
|
||||
self.already_checked = []
|
||||
|
||||
# Python 2
|
||||
def visit_TryExcept(self, node):
|
||||
for handler in node.handlers:
|
||||
if handler.name:
|
||||
self.name.append(handler.name.id)
|
||||
super(CheckForStrUnicodeExc, self).generic_visit(node)
|
||||
self.name = self.name[:-1]
|
||||
else:
|
||||
super(CheckForStrUnicodeExc, self).generic_visit(node)
|
||||
|
||||
# Python 3
|
||||
def visit_ExceptHandler(self, node):
|
||||
if node.name:
|
||||
self.name.append(node.name)
|
||||
super(CheckForStrUnicodeExc, self).generic_visit(node)
|
||||
self.name = self.name[:-1]
|
||||
else:
|
||||
super(CheckForStrUnicodeExc, self).generic_visit(node)
|
||||
|
||||
def visit_Call(self, node):
|
||||
if self._check_call_names(node, ['str', 'unicode']):
|
||||
if node not in self.already_checked:
|
||||
self.already_checked.append(node)
|
||||
if isinstance(node.args[0], ast.Name):
|
||||
if node.args[0].id in self.name:
|
||||
self.add_error(node.args[0])
|
||||
super(CheckForStrUnicodeExc, self).generic_visit(node)
|
||||
|
||||
|
||||
def check_oslo_namespace_imports(logical_line, physical_line, filename):
|
||||
@core.flake8ext
|
||||
def check_oslo_namespace_imports(physical_line, logical_line, filename):
|
||||
"""'oslo_' should be used instead of 'oslo.'
|
||||
|
||||
B317
|
||||
"""
|
||||
if pep8.noqa(physical_line):
|
||||
if pycodestyle.noqa(physical_line):
|
||||
return
|
||||
if re.match(oslo_namespace_imports, logical_line):
|
||||
msg = ("B317: '%s' must be used instead of '%s'.") % (
|
||||
@ -215,6 +173,7 @@ def check_oslo_namespace_imports(logical_line, physical_line, filename):
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
"""Use a dict comprehension instead of a dict constructor
|
||||
|
||||
@ -227,6 +186,7 @@ def dict_constructor_with_list_copy(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_xrange(logical_line):
|
||||
"""Do not use 'xrange'
|
||||
|
||||
@ -236,6 +196,7 @@ def no_xrange(logical_line):
|
||||
yield(0, "B319: Do not use xrange().")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def validate_assertTrue(logical_line):
|
||||
"""Use 'assertTrue' instead of 'assertEqual'
|
||||
|
||||
@ -247,6 +208,7 @@ def validate_assertTrue(logical_line):
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def validate_assertIsNone(logical_line):
|
||||
"""Use 'assertIsNone' instead of 'assertEqual'
|
||||
|
||||
@ -258,6 +220,7 @@ def validate_assertIsNone(logical_line):
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_log_warn_check(logical_line):
|
||||
"""Disallow 'LOG.warn'
|
||||
|
||||
@ -268,6 +231,7 @@ def no_log_warn_check(logical_line):
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def validate_assertIsNotNone(logical_line):
|
||||
"""Use 'assertIsNotNone'
|
||||
|
||||
@ -279,15 +243,3 @@ def validate_assertIsNotNone(logical_line):
|
||||
" of using assertNotEqual(None, value) or"
|
||||
" assertIsNot(None, value).")
|
||||
yield(0, msg)
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(CheckForStrUnicodeExc)
|
||||
register(CheckLoggingFormatArgs)
|
||||
register(check_oslo_namespace_imports)
|
||||
register(dict_constructor_with_list_copy)
|
||||
register(no_xrange)
|
||||
register(validate_assertTrue)
|
||||
register(validate_assertIsNone)
|
||||
register(no_log_warn_check)
|
||||
register(validate_assertIsNotNone)
|
||||
|
@ -54,7 +54,7 @@ def cleanup_unassociated_projects():
|
||||
for model in project_children_tables:
|
||||
sub_query = sub_query.outerjoin(model,
|
||||
models.Project.id == model.project_id)
|
||||
sub_query = sub_query.filter(model.id == None) # nopep8
|
||||
sub_query = sub_query.filter(model.id == None) # noqa
|
||||
sub_query = sub_query.subquery()
|
||||
sub_query = sa_sql.select([sub_query])
|
||||
query = session.query(models.Project)
|
||||
@ -89,7 +89,7 @@ def cleanup_parent_with_no_child(parent_model, child_model,
|
||||
session = repo.get_session()
|
||||
sub_query = session.query(parent_model.id)
|
||||
sub_query = sub_query.outerjoin(child_model)
|
||||
sub_query = sub_query.filter(child_model.id == None) # nopep8
|
||||
sub_query = sub_query.filter(child_model.id == None) # noqa
|
||||
sub_query = sub_query.subquery()
|
||||
sub_query = sa_sql.select([sub_query])
|
||||
query = session.query(parent_model)
|
||||
|
@ -1395,19 +1395,19 @@ class ContainerRepo(BaseRepo):
|
||||
|
||||
|
||||
class ContainerSecretRepo(BaseRepo):
|
||||
"""Repository for the ContainerSecret entity."""
|
||||
def _do_entity_name(self):
|
||||
"""Sub-class hook: return entity name, such as for debugging."""
|
||||
return "ContainerSecret"
|
||||
"""Repository for the ContainerSecret entity."""
|
||||
def _do_entity_name(self):
|
||||
"""Sub-class hook: return entity name, such as for debugging."""
|
||||
return "ContainerSecret"
|
||||
|
||||
def _do_build_get_query(self, entity_id, external_project_id, session):
|
||||
"""Sub-class hook: build a retrieve query."""
|
||||
return session.query(models.ContainerSecret
|
||||
).filter_by(id=entity_id)
|
||||
def _do_build_get_query(self, entity_id, external_project_id, session):
|
||||
"""Sub-class hook: build a retrieve query."""
|
||||
return session.query(models.ContainerSecret
|
||||
).filter_by(id=entity_id)
|
||||
|
||||
def _do_validate(self, values):
|
||||
"""Sub-class hook: validate values."""
|
||||
pass
|
||||
def _do_validate(self, values):
|
||||
"""Sub-class hook: validate values."""
|
||||
pass
|
||||
|
||||
|
||||
class ContainerConsumerRepo(BaseRepo):
|
||||
|
@ -133,6 +133,7 @@ def create_connection(conf, subsystem_path):
|
||||
connection.set_authentication_cert(pem_path)
|
||||
return connection
|
||||
|
||||
|
||||
crypto = _setup_nss_db_services(CONF)
|
||||
if crypto:
|
||||
crypto.initialize()
|
||||
|
@ -83,7 +83,7 @@ def set_subject_X509Name(target, dn):
|
||||
elif name.lower() == 'l':
|
||||
target.L = value
|
||||
elif name.lower() == 'o':
|
||||
target.O = value
|
||||
target.O = value # noqa: E741
|
||||
return target
|
||||
|
||||
|
||||
|
@ -12,23 +12,21 @@
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import sys
|
||||
import textwrap
|
||||
from unittest import mock
|
||||
|
||||
import ddt
|
||||
import pep8
|
||||
import pycodestyle
|
||||
|
||||
from barbican.hacking import checks
|
||||
from barbican.tests import utils
|
||||
import oslotest
|
||||
|
||||
|
||||
@ddt.ddt
|
||||
class HackingTestCase(utils.BaseTestCase):
|
||||
class HackingTestCase(oslotest.base.BaseTestCase):
|
||||
"""Hacking test cases
|
||||
|
||||
This class tests the hacking checks in barbican.hacking.checks by passing
|
||||
strings to the check methods like the pep8/flake8 parser would. The parser
|
||||
strings to the check methods like the pycodestyle/flake8 parser would. The
|
||||
parser
|
||||
loops over each line in the file and then passes the parameters to the
|
||||
check method. The parameter names in the check method dictate what type of
|
||||
object is passed to the check method. The parameter types are::
|
||||
@ -48,7 +46,7 @@ class HackingTestCase(utils.BaseTestCase):
|
||||
indent_level: indentation (with tabs expanded to multiples of 8)
|
||||
previous_indent_level: indentation on previous line
|
||||
previous_logical: previous logical line
|
||||
filename: Path of the file being run through pep8
|
||||
filename: Path of the file being run through pycodestyle
|
||||
|
||||
When running a test on a check method the return will be False/None if
|
||||
there is no violation in the sample input. If there is an error a tuple is
|
||||
@ -57,16 +55,16 @@ class HackingTestCase(utils.BaseTestCase):
|
||||
should pass.
|
||||
"""
|
||||
|
||||
# We are patching pep8 so that only the check under test is actually
|
||||
# We are patching pycodestyle so that only the check under test is actually
|
||||
# installed.
|
||||
@mock.patch('pep8._checks',
|
||||
@mock.patch('pycodestyle._checks',
|
||||
{'physical_line': {}, 'logical_line': {}, 'tree': {}})
|
||||
def _run_check(self, code, checker, filename=None):
|
||||
pep8.register_check(checker)
|
||||
pycodestyle.register_check(checker)
|
||||
|
||||
lines = textwrap.dedent(code).strip().splitlines(True)
|
||||
|
||||
checker = pep8.Checker(filename=filename, lines=lines)
|
||||
checker = pycodestyle.Checker(filename=filename, lines=lines)
|
||||
checker.check_all()
|
||||
checker.report._deferred_print.sort()
|
||||
return checker.report._deferred_print
|
||||
@ -92,95 +90,6 @@ class HackingTestCase(utils.BaseTestCase):
|
||||
"""
|
||||
self._assert_has_no_errors(code, checker)
|
||||
|
||||
@ddt.data(*checks.CheckLoggingFormatArgs.LOG_METHODS)
|
||||
def test_logging_with_tuple_argument(self, log_method):
|
||||
checker = checks.CheckLoggingFormatArgs
|
||||
code = """
|
||||
import logging
|
||||
LOG = logging.getLogger()
|
||||
LOG.{0}("Volume %s caught fire and is at %d degrees C and "
|
||||
"climbing.", ('volume1', 500))
|
||||
"""
|
||||
if (sys.version_info >= (3, 8)):
|
||||
exc_errors = [(4, 20, 'B310')]
|
||||
else:
|
||||
exc_errors = [(4, 21, 'B310')]
|
||||
self._assert_has_errors(code.format(log_method), checker,
|
||||
expected_errors=exc_errors)
|
||||
|
||||
def test_str_on_exception(self):
|
||||
|
||||
checker = checks.CheckForStrUnicodeExc
|
||||
code = """
|
||||
def f(a, b):
|
||||
try:
|
||||
p = str(a) + str(b)
|
||||
except ValueError as e:
|
||||
p = str(e)
|
||||
return p
|
||||
"""
|
||||
errors = [(5, 16, 'B314')]
|
||||
self._assert_has_errors(code, checker, expected_errors=errors)
|
||||
|
||||
def test_no_str_unicode_on_exception(self):
|
||||
checker = checks.CheckForStrUnicodeExc
|
||||
code = """
|
||||
def f(a, b):
|
||||
try:
|
||||
p = unicode(a) + str(b)
|
||||
except ValueError as e:
|
||||
p = e
|
||||
return p
|
||||
"""
|
||||
self._assert_has_no_errors(code, checker)
|
||||
|
||||
def test_unicode_on_exception(self):
|
||||
checker = checks.CheckForStrUnicodeExc
|
||||
code = """
|
||||
def f(a, b):
|
||||
try:
|
||||
p = str(a) + str(b)
|
||||
except ValueError as e:
|
||||
p = unicode(e)
|
||||
return p
|
||||
"""
|
||||
errors = [(5, 20, 'B314')]
|
||||
self._assert_has_errors(code, checker, expected_errors=errors)
|
||||
|
||||
def test_str_on_multiple_exceptions(self):
|
||||
checker = checks.CheckForStrUnicodeExc
|
||||
code = """
|
||||
def f(a, b):
|
||||
try:
|
||||
p = str(a) + str(b)
|
||||
except ValueError as e:
|
||||
try:
|
||||
p = unicode(a) + unicode(b)
|
||||
except ValueError as ve:
|
||||
p = str(e) + str(ve)
|
||||
p = e
|
||||
return p
|
||||
"""
|
||||
errors = [(8, 20, 'B314'), (8, 29, 'B314')]
|
||||
self._assert_has_errors(code, checker, expected_errors=errors)
|
||||
|
||||
def test_str_unicode_on_multiple_exceptions(self):
|
||||
checker = checks.CheckForStrUnicodeExc
|
||||
code = """
|
||||
def f(a, b):
|
||||
try:
|
||||
p = str(a) + str(b)
|
||||
except ValueError as e:
|
||||
try:
|
||||
p = unicode(a) + unicode(b)
|
||||
except ValueError as ve:
|
||||
p = str(e) + unicode(ve)
|
||||
p = str(e)
|
||||
return p
|
||||
"""
|
||||
errors = [(8, 20, 'B314'), (8, 33, 'B314'), (9, 16, 'B314')]
|
||||
self._assert_has_errors(code, checker, expected_errors=errors)
|
||||
|
||||
def test_dict_constructor_with_list_copy(self):
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([(i, connect_info[i])"))))
|
||||
|
@ -36,6 +36,7 @@ def get_default_container_create_data(secret):
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
create_container_data = {
|
||||
"name": "containername",
|
||||
"type": "generic",
|
||||
|
@ -3,11 +3,10 @@
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
# hacking should appear first in case something else depends on pep8
|
||||
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
|
||||
hacking>=3.0,<3.1.0 # Apache-2.0
|
||||
pyflakes>=2.1.1
|
||||
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
ddt>=1.0.1 # MIT
|
||||
oslotest>=3.2.0 # Apache-2.0
|
||||
pykmip>=0.7.0 # Apache 2.0 License
|
||||
stestr>=2.0.0 # Apache-2.0
|
||||
|
19
tox.ini
19
tox.ini
@ -69,7 +69,7 @@ commands = oslo_debug_helper -t barbican/tests {posargs}
|
||||
# without installing barbican.
|
||||
install_command = /bin/echo {packages}
|
||||
commands =
|
||||
pip install "hacking>=0.10.0,<0.11"
|
||||
pip install "hacking>=3.0,<3.1.0"
|
||||
flake8 barbican setup.py
|
||||
|
||||
[testenv:docs]
|
||||
@ -151,6 +151,10 @@ ignore-path = .venv,.git,.tox,.tmp,*barbican/locale*,*lib/python*,barbican.egg*,
|
||||
filename = *.py,app.wsgi
|
||||
exclude = .git,.idea,.tox,bin,dist,debian,rpmbuild,tools,*.egg-info,*.eggs,contrib,
|
||||
*docs/target,*.egg,build
|
||||
# E402 module level import not at top of file
|
||||
# W503 line break before binary operator
|
||||
# W504 line break after binary operator
|
||||
ignore = E402,W503,W504,
|
||||
|
||||
[testenv:bandit]
|
||||
deps = -r{toxinidir}/test-requirements.txt
|
||||
@ -168,5 +172,14 @@ commands = bindep test
|
||||
envdir = {toxworkdir}/pep8
|
||||
commands = oslopolicy-sample-generator --config-file=etc/oslo-config-generator/policy.conf
|
||||
|
||||
[hacking]
|
||||
local-check-factory = barbican.hacking.checks.factory
|
||||
[flake8:local-plugins]
|
||||
extension =
|
||||
B310 = checks:CheckLoggingFormatArgs
|
||||
B311 = checks:validate_assertIsNone
|
||||
B312 = checks:validate_assertTrue
|
||||
B317 = checks:check_oslo_namespace_imports
|
||||
B318 = checks:dict_constructor_with_list_copy
|
||||
B319 = checks:no_xrange
|
||||
B320 = checks:no_log_warn_check
|
||||
B321 = checks:validate_assertIsNotNone
|
||||
paths = ./barbican/hacking
|
||||
|
Loading…
Reference in New Issue
Block a user