From 427cb3bee6d2958888aa76c233c2c90ac3879ac4 Mon Sep 17 00:00:00 2001 From: Yuriy Taraday Date: Fri, 24 Feb 2012 16:13:44 +0400 Subject: [PATCH] Improved localization testing. Moved localization tests to tools/hacking.py. Change-Id: I903b90dfb09a46a72b1c64c30301f90661999f5b --- esx/guest_tool.py | 5 +-- hacking.py | 91 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 2 deletions(-) diff --git a/esx/guest_tool.py b/esx/guest_tool.py index 8c8b4df..9cda7e3 100644 --- a/esx/guest_tool.py +++ b/esx/guest_tool.py @@ -350,8 +350,9 @@ def _set_ubuntu_networking(network_details=None): interface_file.write('\ngateway %s' % gateway) interface_file.write('\nnetmask %s' % subnet_mask) interface_file.write('\naddress %s\n' % ip_address) - logging.debug(_("Successfully configured NIC %d with " - "NIC info %s") % (device, network_detail)) + logging.debug(_("Successfully configured NIC %(device)d with " + "NIC info %(detail)s"), {'device': device, + 'detail': network_detail)) interface_file.close() if all_dns_servers: diff --git a/hacking.py b/hacking.py index c3c1d71..187fba8 100755 --- a/hacking.py +++ b/hacking.py @@ -25,6 +25,8 @@ import inspect import os import re import sys +import tokenize +import traceback import pep8 @@ -35,6 +37,7 @@ import pep8 #N4xx docstrings #N5xx dictionaries/lists #N6xx Calling methods +#N7xx localization IMPORT_EXCEPTIONS = ['sqlalchemy', 'migrate', 'nova.db.sqlalchemy.session'] @@ -167,6 +170,94 @@ def nova_import_module_only(logical_line): # TODO(jogo) handle "from x import *" + +FORMAT_RE = re.compile("%(?:" + "%|" # Ignore plain percents + "(\(\w+\))?" # mapping key + "([#0 +-]?" # flag + "(?:\d+|\*)?" # width + "(?:\.\d+)?" # precision + "[hlL]?" # length mod + "\w))") # type + + +class LocalizationError(Exception): + pass + + +def check_l18n(): + """Generator that checks token stream for localization errors. + + Expects tokens to be ``send``ed one by one. + Raises LocalizationError if some error is found. + """ + while True: + try: + token_type, text, _, _, _ = yield + except GeneratorExit: + return + if token_type == tokenize.NAME and text == "_": + while True: + token_type, text, start, _, _ = yield + if token_type != tokenize.NL: + break + if token_type != tokenize.OP or text != "(": + continue # not a localization call + + format_string = '' + while True: + token_type, text, start, _, _ = yield + if token_type == tokenize.STRING: + format_string += eval(text) + elif token_type == tokenize.NL: + pass + else: + break + + if not format_string: + raise LocalizationError(start, + "NOVA N701: Empty localization string") + if token_type != tokenize.OP: + raise LocalizationError(start, + "NOVA N701: Invalid localization call") + if text != ")": + if text == "%": + raise LocalizationError(start, + "NOVA N702: Formatting operation should be outside" + " of localization method call") + elif text == "+": + raise LocalizationError(start, + "NOVA N702: Use bare string concatenation instead" + " of +") + else: + raise LocalizationError(start, + "NOVA N702: Argument to _ must be just a string") + + format_specs = FORMAT_RE.findall(format_string) + positional_specs = [(key, spec) for key, spec in format_specs + if not key and spec] + # not spec means %%, key means %(smth)s + if len(positional_specs) > 1: + raise LocalizationError(start, + "NOVA N703: Multiple positional placeholders") + + +def nova_localization_strings(logical_line, tokens): + """Check localization in line. + + N701: bad localization call + N702: complex expression instead of string as argument to _() + N703: multiple positional placeholders + """ + + gen = check_l18n() + next(gen) + try: + map(gen.send, tokens) + gen.close() + except LocalizationError as e: + return e.args + #TODO(jogo) Dict and list objects current_file = ""