Merge "Improved localization testing."

This commit is contained in:
Jenkins
2012-04-10 19:14:02 +00:00
committed by Gerrit Code Review
2 changed files with 94 additions and 2 deletions

View File

@@ -350,8 +350,9 @@ def _set_ubuntu_networking(network_details=None):
interface_file.write('\ngateway %s' % gateway)
interface_file.write('\nnetmask %s' % subnet_mask)
interface_file.write('\naddress %s\n' % ip_address)
logging.debug(_("Successfully configured NIC %d with "
"NIC info %s") % (device, network_detail))
logging.debug(_("Successfully configured NIC %(device)d with "
"NIC info %(detail)s"), {'device': device,
'detail': network_detail))
interface_file.close()
if all_dns_servers:

View File

@@ -25,6 +25,8 @@ import inspect
import os
import re
import sys
import tokenize
import traceback
import pep8
@@ -35,6 +37,7 @@ import pep8
#N4xx docstrings
#N5xx dictionaries/lists
#N6xx Calling methods
#N7xx localization
IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate', 'nova.db.sqlalchemy.session']
@@ -167,6 +170,94 @@ def nova_import_module_only(logical_line):
# TODO(jogo) handle "from x import *"
FORMAT_RE = re.compile("%(?:"
"%|" # Ignore plain percents
"(\(\w+\))?" # mapping key
"([#0 +-]?" # flag
"(?:\d+|\*)?" # width
"(?:\.\d+)?" # precision
"[hlL]?" # length mod
"\w))") # type
class LocalizationError(Exception):
pass
def check_l18n():
"""Generator that checks token stream for localization errors.
Expects tokens to be ``send``ed one by one.
Raises LocalizationError if some error is found.
"""
while True:
try:
token_type, text, _, _, _ = yield
except GeneratorExit:
return
if token_type == tokenize.NAME and text == "_":
while True:
token_type, text, start, _, _ = yield
if token_type != tokenize.NL:
break
if token_type != tokenize.OP or text != "(":
continue # not a localization call
format_string = ''
while True:
token_type, text, start, _, _ = yield
if token_type == tokenize.STRING:
format_string += eval(text)
elif token_type == tokenize.NL:
pass
else:
break
if not format_string:
raise LocalizationError(start,
"NOVA N701: Empty localization string")
if token_type != tokenize.OP:
raise LocalizationError(start,
"NOVA N701: Invalid localization call")
if text != ")":
if text == "%":
raise LocalizationError(start,
"NOVA N702: Formatting operation should be outside"
" of localization method call")
elif text == "+":
raise LocalizationError(start,
"NOVA N702: Use bare string concatenation instead"
" of +")
else:
raise LocalizationError(start,
"NOVA N702: Argument to _ must be just a string")
format_specs = FORMAT_RE.findall(format_string)
positional_specs = [(key, spec) for key, spec in format_specs
if not key and spec]
# not spec means %%, key means %(smth)s
if len(positional_specs) > 1:
raise LocalizationError(start,
"NOVA N703: Multiple positional placeholders")
def nova_localization_strings(logical_line, tokens):
"""Check localization in line.
N701: bad localization call
N702: complex expression instead of string as argument to _()
N703: multiple positional placeholders
"""
gen = check_l18n()
next(gen)
try:
map(gen.send, tokens)
gen.close()
except LocalizationError as e:
return e.args
#TODO(jogo) Dict and list objects
current_file = ""