Move localization checks into their own file
Move H7xx checks Change-Id: Ie795f8db42797a12ca6f8f5bd37a10784c72931a
This commit is contained in:
parent
2c092eedff
commit
316a9e511c
116
hacking/checks/localization.py
Normal file
116
hacking/checks/localization.py
Normal file
@ -0,0 +1,116 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
from hacking import core
|
||||
|
||||
|
||||
FORMAT_RE = re.compile("%(?:"
|
||||
"%|" # Ignore plain percents
|
||||
"(\(\w+\))?" # mapping key
|
||||
"([#0 +-]?" # flag
|
||||
"(?:\d+|\*)?" # width
|
||||
"(?:\.\d+)?" # precision
|
||||
"[hlL]?" # length mod
|
||||
"\w))") # type
|
||||
|
||||
|
||||
class LocalizationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def check_i18n():
|
||||
"""Generator that checks token stream for localization errors.
|
||||
|
||||
Expects tokens to be ``send``ed one by one.
|
||||
Raises LocalizationError if some error is found.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
token_type, text, _, _, line = yield
|
||||
except GeneratorExit:
|
||||
return
|
||||
|
||||
if (token_type == tokenize.NAME and text == "_" and
|
||||
not line.startswith('def _(msg):')):
|
||||
|
||||
while True:
|
||||
token_type, text, start, _, _ = yield
|
||||
if token_type != tokenize.NL:
|
||||
break
|
||||
if token_type != tokenize.OP or text != "(":
|
||||
continue # not a localization call
|
||||
|
||||
format_string = ''
|
||||
while True:
|
||||
token_type, text, start, _, _ = yield
|
||||
if token_type == tokenize.STRING:
|
||||
format_string += eval(text)
|
||||
elif token_type == tokenize.NL:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
if not format_string:
|
||||
raise LocalizationError(
|
||||
start, "H701: Empty localization string")
|
||||
if token_type != tokenize.OP:
|
||||
raise LocalizationError(
|
||||
start, "H701: Invalid localization call")
|
||||
if text != ")":
|
||||
if text == "%":
|
||||
raise LocalizationError(
|
||||
start,
|
||||
"H702: Formatting operation should be outside"
|
||||
" of localization method call")
|
||||
elif text == "+":
|
||||
raise LocalizationError(
|
||||
start,
|
||||
"H702: Use bare string concatenation instead of +")
|
||||
else:
|
||||
raise LocalizationError(
|
||||
start, "H702: Argument to _ must be just a string")
|
||||
|
||||
format_specs = FORMAT_RE.findall(format_string)
|
||||
positional_specs = [(key, spec) for key, spec in format_specs
|
||||
if not key and spec]
|
||||
# not spec means %%, key means %(smth)s
|
||||
if len(positional_specs) > 1:
|
||||
raise LocalizationError(
|
||||
start, "H703: Multiple positional placeholders")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def hacking_localization_strings(logical_line, tokens):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: _("This is fine")
|
||||
Okay: _("This is also fine %s")
|
||||
Okay: _("So is this %s, %(foo)s") % {foo: 'foo'}
|
||||
H701: _('')
|
||||
H702: _("Bob" + " foo")
|
||||
H702: _("Bob %s" % foo)
|
||||
# H703 check is not quite right, disabled by removing colon
|
||||
H703 _("%s %s" % (foo, bar))
|
||||
"""
|
||||
# TODO(sdague) actually get these tests working
|
||||
gen = check_i18n()
|
||||
next(gen)
|
||||
try:
|
||||
list(map(gen.send, tokens))
|
||||
gen.close()
|
||||
except LocalizationError as e:
|
||||
yield e.args
|
||||
|
||||
# TODO(jogo) Dict and list objects
|
101
hacking/core.py
101
hacking/core.py
@ -24,7 +24,6 @@ import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tokenize
|
||||
|
||||
import pbr.util
|
||||
import pep8
|
||||
@ -100,106 +99,6 @@ def import_normalize(line):
|
||||
return line
|
||||
|
||||
|
||||
FORMAT_RE = re.compile("%(?:"
|
||||
"%|" # Ignore plain percents
|
||||
"(\(\w+\))?" # mapping key
|
||||
"([#0 +-]?" # flag
|
||||
"(?:\d+|\*)?" # width
|
||||
"(?:\.\d+)?" # precision
|
||||
"[hlL]?" # length mod
|
||||
"\w))") # type
|
||||
|
||||
|
||||
class LocalizationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def check_i18n():
|
||||
"""Generator that checks token stream for localization errors.
|
||||
|
||||
Expects tokens to be ``send``ed one by one.
|
||||
Raises LocalizationError if some error is found.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
token_type, text, _, _, line = yield
|
||||
except GeneratorExit:
|
||||
return
|
||||
|
||||
if (token_type == tokenize.NAME and text == "_" and
|
||||
not line.startswith('def _(msg):')):
|
||||
|
||||
while True:
|
||||
token_type, text, start, _, _ = yield
|
||||
if token_type != tokenize.NL:
|
||||
break
|
||||
if token_type != tokenize.OP or text != "(":
|
||||
continue # not a localization call
|
||||
|
||||
format_string = ''
|
||||
while True:
|
||||
token_type, text, start, _, _ = yield
|
||||
if token_type == tokenize.STRING:
|
||||
format_string += eval(text)
|
||||
elif token_type == tokenize.NL:
|
||||
pass
|
||||
else:
|
||||
break
|
||||
|
||||
if not format_string:
|
||||
raise LocalizationError(
|
||||
start, "H701: Empty localization string")
|
||||
if token_type != tokenize.OP:
|
||||
raise LocalizationError(
|
||||
start, "H701: Invalid localization call")
|
||||
if text != ")":
|
||||
if text == "%":
|
||||
raise LocalizationError(
|
||||
start,
|
||||
"H702: Formatting operation should be outside"
|
||||
" of localization method call")
|
||||
elif text == "+":
|
||||
raise LocalizationError(
|
||||
start,
|
||||
"H702: Use bare string concatenation instead of +")
|
||||
else:
|
||||
raise LocalizationError(
|
||||
start, "H702: Argument to _ must be just a string")
|
||||
|
||||
format_specs = FORMAT_RE.findall(format_string)
|
||||
positional_specs = [(key, spec) for key, spec in format_specs
|
||||
if not key and spec]
|
||||
# not spec means %%, key means %(smth)s
|
||||
if len(positional_specs) > 1:
|
||||
raise LocalizationError(
|
||||
start, "H703: Multiple positional placeholders")
|
||||
|
||||
|
||||
@flake8ext
|
||||
def hacking_localization_strings(logical_line, tokens):
|
||||
r"""Check localization in line.
|
||||
|
||||
Okay: _("This is fine")
|
||||
Okay: _("This is also fine %s")
|
||||
Okay: _("So is this %s, %(foo)s") % {foo: 'foo'}
|
||||
H701: _('')
|
||||
H702: _("Bob" + " foo")
|
||||
H702: _("Bob %s" % foo)
|
||||
# H703 check is not quite right, disabled by removing colon
|
||||
H703 _("%s %s" % (foo, bar))
|
||||
"""
|
||||
# TODO(sdague) actually get these tests working
|
||||
gen = check_i18n()
|
||||
next(gen)
|
||||
try:
|
||||
list(map(gen.send, tokens))
|
||||
gen.close()
|
||||
except LocalizationError as e:
|
||||
yield e.args
|
||||
|
||||
# TODO(jogo) Dict and list objects
|
||||
|
||||
|
||||
@flake8ext
|
||||
def hacking_is_not(logical_line):
|
||||
r"""Check for use of 'is not' for testing unequal identities.
|
||||
|
@ -51,7 +51,7 @@ flake8.extension =
|
||||
H404 = hacking.checks.docstrings:hacking_docstring_multiline_start
|
||||
H405 = hacking.checks.docstrings:hacking_docstring_summary
|
||||
H501 = hacking.checks.dictlist:hacking_no_locals
|
||||
H700 = hacking.core:hacking_localization_strings
|
||||
H700 = hacking.checks.localization:hacking_localization_strings
|
||||
H801 = hacking.core:OnceGitCheckCommitTitleBug
|
||||
H802 = hacking.core:OnceGitCheckCommitTitleLength
|
||||
H803 = hacking.core:OnceGitCheckCommitTitlePeriodEnding
|
||||
|
Loading…
Reference in New Issue
Block a user