Merge "Update hacking for Python3"
This commit is contained in:
commit
a7334a6150
@ -22,11 +22,9 @@ eventlet==0.20.0
|
||||
extras==1.0.0
|
||||
fasteners==0.14.1
|
||||
fixtures==3.0.0
|
||||
flake8==2.5.5
|
||||
future==0.16.0
|
||||
futurist==1.6.0
|
||||
greenlet==0.4.13
|
||||
hacking==0.12.0
|
||||
idna==2.6
|
||||
imagesize==1.0.0
|
||||
iso8601==0.1.11
|
||||
@ -71,13 +69,11 @@ oslotest==3.2.0
|
||||
Paste==2.0.3
|
||||
PasteDeploy==1.5.2
|
||||
pbr==2.0.0
|
||||
pep8==1.5.7
|
||||
pika-pool==0.1.3
|
||||
pika==0.10.0
|
||||
prettytable==0.7.2
|
||||
psycopg2==2.7
|
||||
pycadf==2.7.0
|
||||
pyflakes==0.8.1
|
||||
Pygments==2.2.0
|
||||
pyinotify==0.9.6
|
||||
PyMySQL==0.7.6
|
||||
|
@ -97,7 +97,7 @@ class ProjectMapper(APIMapper):
|
||||
# NOTE(abhishekk): project_id parameter is only valid if its hex
|
||||
# or hex + dashes (note, integers are a subset of this). This
|
||||
# is required to hand our overlaping routes issues.
|
||||
project_id_regex = '[0-9a-f\-]+'
|
||||
project_id_regex = r'[0-9a-f\-]+'
|
||||
if CONF.osapi_v1.project_id_regex:
|
||||
project_id_regex = CONF.osapi_v1.project_id_regex
|
||||
|
||||
@ -250,7 +250,7 @@ class APIRouterV1(base_wsgi.Router):
|
||||
**kargs)
|
||||
|
||||
if resource.custom_routes_fn:
|
||||
resource.custom_routes_fn(mapper, wsgi_resource)
|
||||
resource.custom_routes_fn(mapper, wsgi_resource)
|
||||
|
||||
def _register_controllers(self, ext):
|
||||
"""Register controllers defined by the extensions
|
||||
|
@ -94,7 +94,7 @@ class MasakariManager(manager.Manager):
|
||||
self.driver.execute_process_failure(
|
||||
context, process_name, host_name,
|
||||
notification.notification_uuid)
|
||||
except exception.SkipProcessRecoveryException as e:
|
||||
except exception.SkipProcessRecoveryException:
|
||||
notification_status = fields.NotificationStatus.FINISHED
|
||||
except (exception.MasakariException,
|
||||
exception.ProcessRecoveryFailureException) as e:
|
||||
@ -144,7 +144,7 @@ class MasakariManager(manager.Manager):
|
||||
except exception.IgnoreInstanceRecoveryException as e:
|
||||
notification_status = fields.NotificationStatus.IGNORED
|
||||
exception_info = e
|
||||
except exception.SkipInstanceRecoveryException as e:
|
||||
except exception.SkipInstanceRecoveryException:
|
||||
notification_status = fields.NotificationStatus.FINISHED
|
||||
except (exception.MasakariException,
|
||||
exception.InstanceRecoveryFailureException) as e:
|
||||
@ -230,7 +230,7 @@ class MasakariManager(manager.Manager):
|
||||
notification.notification_uuid,
|
||||
update_host_method=update_host_method,
|
||||
reserved_host_list=reserved_host_list)
|
||||
except exception.SkipHostRecoveryException as e:
|
||||
except exception.SkipHostRecoveryException:
|
||||
notification_status = fields.NotificationStatus.FINISHED
|
||||
except (exception.HostRecoveryFailureException,
|
||||
exception.ReservedHostsUnavailable,
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
import re
|
||||
|
||||
from hacking import core
|
||||
|
||||
"""
|
||||
Guidelines for writing new hacking checks
|
||||
@ -39,10 +40,10 @@ rule_default_re = re.compile(r".*RuleDefault\(")
|
||||
policy_enforce_re = re.compile(r".*_ENFORCER\.enforce\(")
|
||||
asse_trueinst_re = re.compile(
|
||||
r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, "
|
||||
"(\w|\.|\'|\"|\[|\])+\)\)")
|
||||
r"(\w|\.|\'|\"|\[|\])+\)\)")
|
||||
asse_equal_type_re = re.compile(
|
||||
r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), "
|
||||
"(\w|\.|\'|\"|\[|\])+\)")
|
||||
r"(\w|\.|\'|\"|\[|\])+\)")
|
||||
asse_equal_in_end_with_true_or_false_re = re.compile(
|
||||
r"assertEqual\("r"(\w|[][.'\"])+ in (\w|[][.'\", ])+, (True|False)\)")
|
||||
asse_equal_in_start_with_true_or_false_re = re.compile(
|
||||
@ -72,7 +73,7 @@ asse_raises_regexp = re.compile(r"assertRaisesRegexp\(")
|
||||
conf_attribute_set_re = re.compile(r"CONF\.[a-z0-9_.]+\s*=\s*\w")
|
||||
translated_log = re.compile(
|
||||
r"(.)*LOG\.(audit|error|info|critical|exception)"
|
||||
"\(\s*_\(\s*('|\")")
|
||||
r"\(\s*_\(\s*('|\")")
|
||||
mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
|
||||
string_translation = re.compile(r"[^_]*_\(\s*('|\")")
|
||||
underscore_import_check = re.compile(r"(.)*import _(.)*")
|
||||
@ -99,6 +100,7 @@ log_translation_re = re.compile(
|
||||
})
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_db_session_in_public_api(logical_line, filename):
|
||||
if "db/api.py" in filename:
|
||||
if session_check.match(logical_line):
|
||||
@ -106,6 +108,7 @@ def no_db_session_in_public_api(logical_line, filename):
|
||||
" session")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def use_timeutils_utcnow(logical_line, filename):
|
||||
# tools are OK to use the standard datetime module
|
||||
if "/tools/" in filename:
|
||||
@ -121,6 +124,7 @@ def use_timeutils_utcnow(logical_line, filename):
|
||||
yield (pos, msg % f)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def capital_cfg_help(logical_line, tokens):
|
||||
msg = "M303: capitalize help string"
|
||||
|
||||
@ -132,6 +136,7 @@ def capital_cfg_help(logical_line, tokens):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_true_instance(logical_line):
|
||||
"""Check for assertTrue(isinstance(a, b)) sentences
|
||||
|
||||
@ -142,6 +147,7 @@ def assert_true_instance(logical_line):
|
||||
"not allowed")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_equal_type(logical_line):
|
||||
"""Check for assertEqual(type(A), B) sentences
|
||||
|
||||
@ -151,6 +157,7 @@ def assert_equal_type(logical_line):
|
||||
yield (0, "M306: assertEqual(type(A), B) sentences not allowed")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_translate_logs(logical_line):
|
||||
"""Check for 'LOG.*(_*("'
|
||||
|
||||
@ -165,6 +172,7 @@ def no_translate_logs(logical_line):
|
||||
yield (0, "M308: Log messages should not be translated")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_import_translation_in_tests(logical_line, filename):
|
||||
"""Check for 'from masakari.i18n import _'
|
||||
M309
|
||||
@ -175,6 +183,7 @@ def no_import_translation_in_tests(logical_line, filename):
|
||||
yield (0, "M309 Don't import translation in tests")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_setting_conf_directly_in_tests(logical_line, filename):
|
||||
"""Check for setting CONF.* attributes directly in tests
|
||||
|
||||
@ -192,12 +201,14 @@ def no_setting_conf_directly_in_tests(logical_line, filename):
|
||||
"instead")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_mutable_default_args(logical_line):
|
||||
msg = "M315: Method's default argument shouldn't be mutable!"
|
||||
if mutable_default_args.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_explicit_underscore_import(logical_line, filename):
|
||||
"""Check for explicit import of the _ function
|
||||
|
||||
@ -220,6 +231,7 @@ def check_explicit_underscore_import(logical_line, filename):
|
||||
"import of _ !")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def use_jsonutils(logical_line, filename):
|
||||
# tools are OK to use the standard json module
|
||||
if "/tools/" in filename:
|
||||
@ -235,6 +247,7 @@ def use_jsonutils(logical_line, filename):
|
||||
yield (pos, msg % {'fun': f[:-1]})
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_true_or_false_with_in(logical_line):
|
||||
"""Check for assertTrue/False(A in B), assertTrue/False(A not in B),
|
||||
assertTrue/False(A in B, message) or assertTrue/False(A not in B, message)
|
||||
@ -250,6 +263,7 @@ def assert_true_or_false_with_in(logical_line):
|
||||
"contents.")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_raises_regexp(logical_line):
|
||||
"""Check for usage of deprecated assertRaisesRegexp
|
||||
|
||||
@ -261,6 +275,7 @@ def assert_raises_regexp(logical_line):
|
||||
"of assertRaisesRegexp")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
msg = ("M320: Must use a dict comprehension instead of a dict "
|
||||
"constructor with a sequence of key-value pairs.")
|
||||
@ -268,6 +283,7 @@ def dict_constructor_with_list_copy(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def assert_equal_in(logical_line):
|
||||
"""Check for assertEqual(A in B, True), assertEqual(True, A in B),
|
||||
assertEqual(A in B, False) or assertEqual(False, A in B) sentences
|
||||
@ -282,7 +298,8 @@ def assert_equal_in(logical_line):
|
||||
"contents.")
|
||||
|
||||
|
||||
def check_greenthread_spawns(logical_line, physical_line, filename):
|
||||
@core.flake8ext
|
||||
def check_greenthread_spawns(logical_line, filename):
|
||||
"""Check for use of greenthread.spawn(), greenthread.spawn_n(),
|
||||
eventlet.spawn(), and eventlet.spawn_n()
|
||||
|
||||
@ -299,6 +316,7 @@ def check_greenthread_spawns(logical_line, physical_line, filename):
|
||||
yield (0, msg % {'spawn': match.group('spawn_part')})
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_no_contextlib_nested(logical_line, filename):
|
||||
msg = ("M323: contextlib.nested is deprecated. With Python 2.7"
|
||||
"and later the with-statement supports multiple nested objects. "
|
||||
@ -310,6 +328,7 @@ def check_no_contextlib_nested(logical_line, filename):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_config_option_in_central_place(logical_line, filename):
|
||||
msg = ("M324: Config options should be in the central location "
|
||||
"'/masakari/conf/*'. Do not declare new config options outside "
|
||||
@ -334,6 +353,7 @@ def check_config_option_in_central_place(logical_line, filename):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_doubled_words(physical_line, filename):
|
||||
"""Check for the common doubled-word typos
|
||||
|
||||
@ -347,6 +367,7 @@ def check_doubled_words(physical_line, filename):
|
||||
return (0, msg % {'word': match.group(1)})
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_python3_no_iteritems(logical_line):
|
||||
msg = ("M326: Use dict.items() instead of dict.iteritems().")
|
||||
|
||||
@ -354,6 +375,7 @@ def check_python3_no_iteritems(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_python3_no_iterkeys(logical_line):
|
||||
msg = ("M327: Use 'for key in dict' instead of 'for key in "
|
||||
"dict.iterkeys()'.")
|
||||
@ -362,6 +384,7 @@ def check_python3_no_iterkeys(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_python3_no_itervalues(logical_line):
|
||||
msg = ("M328: Use dict.values() instead of dict.itervalues().")
|
||||
|
||||
@ -369,6 +392,7 @@ def check_python3_no_itervalues(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_os_popen(logical_line):
|
||||
"""Disallow 'os.popen('
|
||||
|
||||
@ -383,6 +407,7 @@ def no_os_popen(logical_line):
|
||||
'Replace it using subprocess module. ')
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def no_log_warn(logical_line):
|
||||
"""Disallow 'LOG.warn('
|
||||
|
||||
@ -397,6 +422,7 @@ def no_log_warn(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def yield_followed_by_space(logical_line):
|
||||
"""Yield should be followed by a space.
|
||||
|
||||
@ -414,6 +440,7 @@ def yield_followed_by_space(logical_line):
|
||||
"M332: Yield keyword should be followed by a space.")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_policy_registration_in_central_place(logical_line, filename):
|
||||
msg = ('M333: Policy registration should be in the central location '
|
||||
'"/masakari/policies/*".')
|
||||
@ -428,6 +455,7 @@ def check_policy_registration_in_central_place(logical_line, filename):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_policy_enforce(logical_line, filename):
|
||||
"""Look for uses of masakari.policy._ENFORCER.enforce()
|
||||
|
||||
@ -436,7 +464,7 @@ def check_policy_enforce(logical_line, filename):
|
||||
Uses of _ENFORCER.enforce could allow unregistered policies to be used, so
|
||||
this check looks for uses of that method.
|
||||
|
||||
M333
|
||||
M334
|
||||
"""
|
||||
|
||||
msg = ('M334: masakari.policy._ENFORCER.enforce() should not be used. '
|
||||
@ -444,33 +472,3 @@ def check_policy_enforce(logical_line, filename):
|
||||
|
||||
if policy_enforce_re.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(no_db_session_in_public_api)
|
||||
register(use_timeutils_utcnow)
|
||||
register(capital_cfg_help)
|
||||
register(no_import_translation_in_tests)
|
||||
register(assert_true_instance)
|
||||
register(assert_equal_type)
|
||||
register(assert_raises_regexp)
|
||||
register(no_translate_logs)
|
||||
register(no_setting_conf_directly_in_tests)
|
||||
register(no_mutable_default_args)
|
||||
register(check_explicit_underscore_import)
|
||||
register(use_jsonutils)
|
||||
register(assert_true_or_false_with_in)
|
||||
register(dict_constructor_with_list_copy)
|
||||
register(assert_equal_in)
|
||||
register(check_no_contextlib_nested)
|
||||
register(check_greenthread_spawns)
|
||||
register(check_config_option_in_central_place)
|
||||
register(check_doubled_words)
|
||||
register(check_python3_no_iteritems)
|
||||
register(check_python3_no_iterkeys)
|
||||
register(check_python3_no_itervalues)
|
||||
register(no_os_popen)
|
||||
register(no_log_warn)
|
||||
register(yield_followed_by_space)
|
||||
register(check_policy_registration_in_central_place)
|
||||
register(check_policy_enforce)
|
||||
|
@ -71,6 +71,7 @@ def _patch_mock_to_raise_for_invalid_assert_calls():
|
||||
mock.Mock.__getattr__ = raise_for_invalid_assert_calls(
|
||||
mock.Mock.__getattr__)
|
||||
|
||||
|
||||
# NOTE(abhishekk): needs to be called only once at import time
|
||||
# to patch the mock lib
|
||||
_patch_mock_to_raise_for_invalid_assert_calls()
|
||||
|
@ -41,6 +41,7 @@ def _make_hosts_list(hosts_list):
|
||||
return host_obj.Host(objects=[
|
||||
_make_host_obj(a) for a in hosts_list])
|
||||
|
||||
|
||||
HOST_LIST = [
|
||||
{"name": "host_1", "id": "1", "reserved": False,
|
||||
"on_maintenance": False, "type": "fake",
|
||||
|
@ -52,6 +52,7 @@ def _make_notifications_list(notifications_list):
|
||||
return notification_obj.Notification(objects=[
|
||||
_make_notification_obj(a) for a in notifications_list])
|
||||
|
||||
|
||||
NOTIFICATION_DATA = {"type": "VM", "id": 1,
|
||||
"payload":
|
||||
{'event': 'STOPPED', 'host_status': 'NORMAL',
|
||||
|
@ -40,6 +40,7 @@ def _make_segments_list(segments_list):
|
||||
return segment_obj.FailoverSegment(objects=[
|
||||
_make_segment_obj(a) for a in segments_list])
|
||||
|
||||
|
||||
FAILOVER_SEGMENT_LIST = [
|
||||
{"name": "segment1", "id": "1", "service_type": "COMPUTE",
|
||||
"recovery_method": "auto", "uuid": uuidsentinel.fake_segment,
|
||||
|
@ -62,6 +62,7 @@ def _fake_host(**kwargs):
|
||||
fake_host.update(kwargs)
|
||||
return fake_host
|
||||
|
||||
|
||||
fake_host = _fake_host()
|
||||
|
||||
|
||||
|
@ -66,6 +66,7 @@ def _fake_object_notification(**kwargs):
|
||||
fake_notification.update(kwargs)
|
||||
return fake_notification
|
||||
|
||||
|
||||
fake_object_notification = _fake_object_notification()
|
||||
|
||||
fake_db_notification = _fake_db_notification()
|
||||
|
@ -16,7 +16,7 @@ import textwrap
|
||||
from unittest import mock
|
||||
|
||||
import ddt
|
||||
import pep8
|
||||
import pycodestyle
|
||||
|
||||
from masakari.hacking import checks
|
||||
from masakari import test
|
||||
@ -25,7 +25,9 @@ from masakari import test
|
||||
@ddt.ddt
|
||||
class HackingTestCase(test.NoDBTestCase):
|
||||
"""This class tests the hacking checks in masakari.hacking.checks by
|
||||
passing strings to the check methods like the pep8/flake8 parser would.
|
||||
passing strings to the check methods like the pycodestyle/flake8 parser
|
||||
would.
|
||||
|
||||
The parser loops over each line in the file and then passes the
|
||||
parameters to the check method. The parameter names in the check method
|
||||
dictate what type of object is passed to the check method.
|
||||
@ -47,7 +49,7 @@ class HackingTestCase(test.NoDBTestCase):
|
||||
indent_level: indentation (with tabs expanded to multiples of 8)
|
||||
previous_indent_level: indentation on previous line
|
||||
previous_logical: previous logical line
|
||||
filename: Path of the file being run through pep8
|
||||
filename: Path of the file being run through pycodestyle
|
||||
|
||||
When running a test on a check method the return will be False/None if
|
||||
there is no violation in the sample input. If there is an error a tuple is
|
||||
@ -213,16 +215,16 @@ class HackingTestCase(test.NoDBTestCase):
|
||||
"msg = _('My message')",
|
||||
"masakari/tests/other_files3.py"))), 0)
|
||||
|
||||
# We are patching pep8 so that only the check under test is actually
|
||||
# We are patching pycodestyle so that only the check under test is actually
|
||||
# installed.
|
||||
@mock.patch('pep8._checks',
|
||||
@mock.patch('pycodestyle._checks',
|
||||
{'physical_line': {}, 'logical_line': {}, 'tree': {}})
|
||||
def _run_check(self, code, checker, filename=None):
|
||||
pep8.register_check(checker)
|
||||
pycodestyle.register_check(checker)
|
||||
|
||||
lines = textwrap.dedent(code).lstrip().splitlines(True)
|
||||
|
||||
checker = pep8.Checker(filename=filename, lines=lines)
|
||||
checker = pycodestyle.Checker(filename=filename, lines=lines)
|
||||
checker.check_all()
|
||||
checker.report._deferred_print.sort()
|
||||
return checker.report._deferred_print
|
||||
|
@ -2,7 +2,7 @@
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
||||
hacking>=3.0,<3.1.0 # Apache-2.0
|
||||
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
ddt>=1.0.1 # MIT
|
||||
|
35
tox.ini
35
tox.ini
@ -98,14 +98,45 @@ show-source = True
|
||||
# [H203] Use assertIs(Not)None to check for None.
|
||||
# [H904] Delay string interpolations at logging calls.
|
||||
enable-extensions = H106,H203,H904
|
||||
ignore = E123,E125,E128,E731,H405
|
||||
# [W504] line break after binary operator (use W503 instead)
|
||||
ignore = E123,E125,E128,E731,H405,W504
|
||||
builtins = _
|
||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build
|
||||
|
||||
[hacking]
|
||||
local-check-factory = masakari.hacking.checks.factory
|
||||
import_exceptions = masakari.i18n
|
||||
|
||||
[flake8:local-plugins]
|
||||
extension =
|
||||
M301 = checks:no_db_session_in_public_api
|
||||
M302 = checks:use_timeutils_utcnow
|
||||
M303 = checks:capital_cfg_help
|
||||
M305 = checks:assert_true_instance
|
||||
M306 = checks:assert_equal_type
|
||||
M308 = checks:no_translate_logs
|
||||
M309 = checks:no_import_translation_in_tests
|
||||
M310 = checks:no_setting_conf_directly_in_tests
|
||||
M315 = checks:no_mutable_default_args
|
||||
M316 = checks:check_explicit_underscore_import
|
||||
M317 = checks:use_jsonutils
|
||||
M318 = checks:assert_true_or_false_with_in
|
||||
M319 = checks:assert_raises_regexp
|
||||
M320 = checks:dict_constructor_with_list_copy
|
||||
M321 = checks:assert_equal_in
|
||||
M322 = checks:check_greenthread_spawns
|
||||
M323 = checks:check_no_contextlib_nested
|
||||
M324 = checks:check_config_option_in_central_place
|
||||
M325 = checks:check_doubled_words
|
||||
M326 = checks:check_python3_no_iteritems
|
||||
M327 = checks:check_python3_no_iterkeys
|
||||
M328 = checks:check_python3_no_itervalues
|
||||
M329 = checks:no_os_popen
|
||||
M331 = checks:no_log_warn
|
||||
M332 = checks:yield_followed_by_space
|
||||
M333 = checks:check_policy_registration_in_central_place
|
||||
M334 = checks:check_policy_enforce
|
||||
paths = ./masakari/hacking
|
||||
|
||||
[testenv:lower-constraints]
|
||||
deps =
|
||||
-c{toxinidir}/lower-constraints.txt
|
||||
|
Loading…
x
Reference in New Issue
Block a user