Clone sahara hacking checks to saharaclient
These such rules are: - [S366, S367] Organize your imports according to the ``Import order`` - [S368] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs. - [S373] Don't translate logs - [S375] Use jsonutils from oslo_serialization instead of json Change-Id: I909da6772f733e8282a8f304b829aab9b25d6203
This commit is contained in:
parent
586a306358
commit
6b2fa6adb6
37
HACKING.rst
37
HACKING.rst
|
@ -2,11 +2,44 @@ Sahara Style Commandments
|
|||
=========================
|
||||
|
||||
- Step 1: Read the OpenStack Style Commandments
|
||||
http://docs.openstack.org/developer/hacking/
|
||||
https://docs.openstack.org/hacking/latest/
|
||||
- Step 2: Read on
|
||||
|
||||
Sahara Specific Commandments
|
||||
----------------------------
|
||||
|
||||
None so far
|
||||
Commit Messages
|
||||
---------------
|
||||
Using a common format for commit messages will help keep our git history
|
||||
readable. Follow these guidelines:
|
||||
|
||||
- [S365] First, provide a brief summary of 50 characters or less. Summaries
|
||||
of greater than 72 characters will be rejected by the gate.
|
||||
|
||||
- [S364] The first line of the commit message should provide an accurate
|
||||
description of the change, not just a reference to a bug or blueprint.
|
||||
|
||||
Imports
|
||||
-------
|
||||
- [S366, S367] Organize your imports according to the ``Import order``
|
||||
|
||||
Dictionaries/Lists
|
||||
------------------
|
||||
|
||||
- [S360] Ensure default arguments are not mutable.
|
||||
- [S368] Must use a dict comprehension instead of a dict constructor with a
|
||||
sequence of key-value pairs. For more information, please refer to
|
||||
http://legacy.python.org/dev/peps/pep-0274/
|
||||
|
||||
Logs
|
||||
----
|
||||
|
||||
- [S373] Don't translate logs
|
||||
|
||||
- [S374] You used a deprecated log level
|
||||
|
||||
Importing json
|
||||
--------------
|
||||
|
||||
- [S375] It's more preferable to use ``jsonutils`` from ``oslo_serialization``
|
||||
instead of ``json`` for operating with ``json`` objects.
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
|
||||
from oslo_serialization import jsonutils
|
||||
from six.moves.urllib import parse
|
||||
|
||||
from saharaclient._i18n import _
|
||||
|
@ -236,7 +236,7 @@ def get_json(response):
|
|||
if callable(json_field_or_function):
|
||||
return response.json()
|
||||
else:
|
||||
return json.loads(response.content)
|
||||
return jsonutils.loads(response.content)
|
||||
|
||||
|
||||
class APIException(Exception):
|
||||
|
|
|
@ -13,13 +13,13 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
from saharaclient.osc.v1 import utils
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
from osc_lib.command import command
|
||||
|
@ -149,7 +148,7 @@ class CreateCluster(command.ShowOne):
|
|||
if parsed_args.json:
|
||||
blob = osc_utils.read_blob_file_contents(parsed_args.json)
|
||||
try:
|
||||
template = json.loads(blob)
|
||||
template = jsonutils.loads(blob)
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError(
|
||||
'An error occurred when reading '
|
||||
|
@ -489,7 +488,7 @@ class UpdateCluster(command.ShowOne):
|
|||
if parsed_args.shares:
|
||||
blob = osc_utils.read_blob_file_contents(parsed_args.shares)
|
||||
try:
|
||||
shares = json.loads(blob)
|
||||
shares = jsonutils.loads(blob)
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError(
|
||||
'An error occurred when reading '
|
||||
|
@ -556,7 +555,7 @@ class ScaleCluster(command.ShowOne):
|
|||
if parsed_args.json:
|
||||
blob = osc_utils.read_blob_file_contents(parsed_args.json)
|
||||
try:
|
||||
template = json.loads(blob)
|
||||
template = jsonutils.loads(blob)
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError(
|
||||
'An error occurred when reading '
|
||||
|
|
|
@ -13,13 +13,13 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
from osc_lib.command import command
|
||||
from osc_lib import exceptions
|
||||
from osc_lib import utils as osc_utils
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
from saharaclient.osc.v1 import utils
|
||||
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
from os import path
|
||||
import sys
|
||||
|
||||
|
@ -206,7 +205,7 @@ class UpdatePlugin(command.ShowOne):
|
|||
client = self.app.client_manager.data_processing
|
||||
blob = osc_utils.read_blob_file_contents(parsed_args.json)
|
||||
try:
|
||||
update_dict = json.loads(blob)
|
||||
update_dict = jsonutils.loads(blob)
|
||||
except ValueError as e:
|
||||
raise exceptions.CommandError(
|
||||
'An error occurred when reading '
|
||||
|
|
|
@ -52,7 +52,7 @@ def get_resource_id(manager, name_or_id):
|
|||
|
||||
|
||||
def create_dict_from_kwargs(**kwargs):
|
||||
return dict((k, v) for (k, v) in kwargs.items() if v is not None)
|
||||
return {k: v for (k, v) in kwargs.items() if v is not None}
|
||||
|
||||
|
||||
def prepare_data(data, fields):
|
||||
|
|
|
@ -0,0 +1,138 @@
|
|||
# Copyright (c) 2013 Mirantis Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
import pep8
|
||||
|
||||
from saharaclient.tests.hacking import commit_message
|
||||
from saharaclient.tests.hacking import import_checks
|
||||
from saharaclient.tests.hacking import logging_checks
|
||||
|
||||
RE_OSLO_IMPORTS = (re.compile(r"(((from)|(import))\s+oslo\.)"),
|
||||
re.compile(r"(from\s+oslo\s+import)"))
|
||||
RE_DICT_CONSTRUCTOR_WITH_LIST_COPY = re.compile(r".*\bdict\((\[)?(\(|\[)")
|
||||
RE_USE_JSONUTILS_INVALID_LINE = re.compile(r"(import\s+json)")
|
||||
RE_USE_JSONUTILS_VALID_LINE = re.compile(r"(import\s+jsonschema)")
|
||||
RE_MUTABLE_DEFAULT_ARGS = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
|
||||
|
||||
|
||||
def _starts_with_any(line, *prefixes):
|
||||
for prefix in prefixes:
|
||||
if line.startswith(prefix):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _any_in(line, *sublines):
|
||||
for subline in sublines:
|
||||
if subline in line:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def import_db_only_in_conductor(logical_line, filename):
|
||||
"""Check that db calls are only in conductor module and in tests.
|
||||
|
||||
S361
|
||||
"""
|
||||
if _any_in(filename,
|
||||
"sahara/conductor",
|
||||
"sahara/tests",
|
||||
"sahara/db"):
|
||||
return
|
||||
|
||||
if _starts_with_any(logical_line,
|
||||
"from sahara import db",
|
||||
"from sahara.db",
|
||||
"import sahara.db"):
|
||||
yield (0, "S361: sahara.db import only allowed in "
|
||||
"sahara/conductor/*")
|
||||
|
||||
|
||||
def hacking_no_author_attr(logical_line, tokens):
|
||||
"""__author__ should not be used.
|
||||
|
||||
S362: __author__ = slukjanov
|
||||
"""
|
||||
for token_type, text, start_index, _, _ in tokens:
|
||||
if token_type == tokenize.NAME and text == "__author__":
|
||||
yield (start_index[1],
|
||||
"S362: __author__ should not be used")
|
||||
|
||||
|
||||
def check_oslo_namespace_imports(logical_line):
|
||||
"""Check to prevent old oslo namespace usage.
|
||||
|
||||
S363
|
||||
"""
|
||||
if re.match(RE_OSLO_IMPORTS[0], logical_line):
|
||||
yield(0, "S363: '%s' must be used instead of '%s'." % (
|
||||
logical_line.replace('oslo.', 'oslo_'),
|
||||
logical_line))
|
||||
|
||||
if re.match(RE_OSLO_IMPORTS[1], logical_line):
|
||||
yield(0, "S363: '%s' must be used instead of '%s'" % (
|
||||
'import oslo_%s' % logical_line.split()[-1],
|
||||
logical_line))
|
||||
|
||||
|
||||
def dict_constructor_with_list_copy(logical_line):
|
||||
"""Check to prevent dict constructor with a sequence of key-value pairs.
|
||||
|
||||
S368
|
||||
"""
|
||||
if RE_DICT_CONSTRUCTOR_WITH_LIST_COPY.match(logical_line):
|
||||
yield (0, 'S368: Must use a dict comprehension instead of a dict '
|
||||
'constructor with a sequence of key-value pairs.')
|
||||
|
||||
|
||||
def use_jsonutils(logical_line, filename):
|
||||
"""Check to prevent importing json in sahara code.
|
||||
|
||||
S375
|
||||
"""
|
||||
if pep8.noqa(logical_line):
|
||||
return
|
||||
if (RE_USE_JSONUTILS_INVALID_LINE.match(logical_line) and
|
||||
not RE_USE_JSONUTILS_VALID_LINE.match(logical_line)):
|
||||
yield(0, "S375: Use jsonutils from oslo_serialization instead"
|
||||
" of json")
|
||||
|
||||
|
||||
def no_mutable_default_args(logical_line):
|
||||
"""Check to prevent mutable default argument in sahara code.
|
||||
|
||||
S360
|
||||
"""
|
||||
msg = "S360: Method's default argument shouldn't be mutable!"
|
||||
if RE_MUTABLE_DEFAULT_ARGS.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def factory(register):
|
||||
register(import_db_only_in_conductor)
|
||||
register(hacking_no_author_attr)
|
||||
register(check_oslo_namespace_imports)
|
||||
register(commit_message.OnceGitCheckCommitTitleBug)
|
||||
register(commit_message.OnceGitCheckCommitTitleLength)
|
||||
register(import_checks.hacking_import_groups)
|
||||
register(import_checks.hacking_import_groups_together)
|
||||
register(dict_constructor_with_list_copy)
|
||||
register(logging_checks.no_translate_logs)
|
||||
register(logging_checks.accepted_log_levels)
|
||||
register(use_jsonutils)
|
||||
register(no_mutable_default_args)
|
|
@ -0,0 +1,95 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import os
|
||||
import re
|
||||
import subprocess # nosec
|
||||
|
||||
from hacking import core
|
||||
|
||||
|
||||
class GitCheck(core.GlobalCheck):
|
||||
"""Base-class for Git related checks."""
|
||||
|
||||
def _get_commit_title(self):
|
||||
# Check if we're inside a git checkout
|
||||
try:
|
||||
subp = subprocess.Popen( # nosec
|
||||
['git', 'rev-parse', '--show-toplevel'],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
gitdir = subp.communicate()[0].rstrip()
|
||||
except OSError:
|
||||
# "git" was not found
|
||||
return None
|
||||
|
||||
if not os.path.exists(gitdir):
|
||||
return None
|
||||
|
||||
# Get title of most recent commit
|
||||
subp = subprocess.Popen( # nosec
|
||||
['git', 'log', '--no-merges', '--pretty=%s', '-1'],
|
||||
stdout=subprocess.PIPE)
|
||||
title = subp.communicate()[0]
|
||||
|
||||
if subp.returncode:
|
||||
raise Exception("git log failed with code %s" % subp.returncode)
|
||||
return title.decode('utf-8')
|
||||
|
||||
|
||||
class OnceGitCheckCommitTitleBug(GitCheck):
|
||||
"""Check git commit messages for bugs.
|
||||
|
||||
OpenStack HACKING recommends not referencing a bug or blueprint in first
|
||||
line. It should provide an accurate description of the change
|
||||
S364
|
||||
"""
|
||||
name = "GitCheckCommitTitleBug"
|
||||
|
||||
# From https://github.com/openstack/openstack-ci-puppet
|
||||
# /blob/master/modules/gerrit/manifests/init.pp#L74
|
||||
# Changeid|bug|blueprint
|
||||
GIT_REGEX = re.compile(
|
||||
r'(I[0-9a-f]{8,40})|'
|
||||
'([Bb]ug|[Ll][Pp])[\s\#:]*(\d+)|'
|
||||
'([Bb]lue[Pp]rint|[Bb][Pp])[\s\#:]*([A-Za-z0-9\\-]+)')
|
||||
|
||||
def run_once(self):
|
||||
title = self._get_commit_title()
|
||||
|
||||
# NOTE(jogo) if match regex but over 3 words, acceptable title
|
||||
if (title and self.GIT_REGEX.search(title) is not None
|
||||
and len(title.split()) <= 3):
|
||||
return (1, 0,
|
||||
"S364: git commit title ('%s') should provide an accurate "
|
||||
"description of the change, not just a reference to a bug "
|
||||
"or blueprint" % title.strip(), self.name)
|
||||
|
||||
|
||||
class OnceGitCheckCommitTitleLength(GitCheck):
|
||||
"""Check git commit message length.
|
||||
|
||||
HACKING recommends commit titles 50 chars or less, but enforces
|
||||
a 72 character limit
|
||||
|
||||
S365 Title limited to 72 chars
|
||||
"""
|
||||
name = "GitCheckCommitTitleLength"
|
||||
|
||||
def run_once(self):
|
||||
title = self._get_commit_title()
|
||||
|
||||
if title and len(title) > 72:
|
||||
return (
|
||||
1, 0,
|
||||
"S365: git commit title ('%s') should be under 50 chars"
|
||||
% title.strip(),
|
||||
self.name)
|
|
@ -0,0 +1,450 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import imp
|
||||
|
||||
from hacking import core
|
||||
|
||||
|
||||
# NOTE(Kezar): This checks a good enough if we have only py2.7 supported.
|
||||
# As soon as we'll get py3.x we need to drop it or rewrite. You can read more
|
||||
# about it in dev-list archive, topic: "[hacking]proposed rules drop for 1.0"
|
||||
def _find_module(module, path=None):
|
||||
mod_base = module
|
||||
parent_path = None
|
||||
while '.' in mod_base:
|
||||
first, _, mod_base = mod_base.partition('.')
|
||||
parent_path = path
|
||||
_, path, _ = imp.find_module(first, path)
|
||||
path = [path]
|
||||
try:
|
||||
_, path, _ = imp.find_module(mod_base, path)
|
||||
except ImportError:
|
||||
# NOTE(bnemec): There are two reasons we might get here: 1) A
|
||||
# non-module import and 2) an import of a namespace module that is
|
||||
# in the same namespace as the current project, which caused us to
|
||||
# recurse into the project namespace but fail to find the third-party
|
||||
# module. For 1), we won't be able to import it as a module, so we
|
||||
# return the parent module's path, but for 2) the import below should
|
||||
# succeed, so we re-raise the ImportError because the module was
|
||||
# legitimately not found in this path.
|
||||
try:
|
||||
__import__(module)
|
||||
except ImportError:
|
||||
# Non-module import, return the parent path if we have it
|
||||
if parent_path:
|
||||
return parent_path
|
||||
raise
|
||||
raise
|
||||
return path
|
||||
|
||||
module_cache = dict()
|
||||
|
||||
# List of all Python 2 stdlib modules - anything not in this list will be
|
||||
# allowed in either the stdlib or third-party groups to allow for Python 3
|
||||
# stdlib additions.
|
||||
# The list was generated via the following script, which is a variation on
|
||||
# the one found here:
|
||||
# http://stackoverflow.com/questions/6463918/how-can-i-get-a-list-of-all-the-python-standard-library-modules
|
||||
"""
|
||||
from distutils import sysconfig
|
||||
import os
|
||||
import sys
|
||||
|
||||
std_lib = sysconfig.get_python_lib(standard_lib=True)
|
||||
prefix_len = len(std_lib) + 1
|
||||
modules = ''
|
||||
line = '['
|
||||
mod_list = []
|
||||
for top, dirs, files in os.walk(std_lib):
|
||||
for name in files:
|
||||
if 'site-packages' not in top:
|
||||
if name == '__init__.py':
|
||||
full_name = top[prefix_len:].replace('/', '.')
|
||||
mod_list.append(full_name)
|
||||
elif name.endswith('.py'):
|
||||
full_name = top.replace('/', '.') + '.'
|
||||
full_name += name[:-3]
|
||||
full_name = full_name[prefix_len:]
|
||||
mod_list.append(full_name)
|
||||
elif name.endswith('.so') and top.endswith('lib-dynload'):
|
||||
full_name = name[:-3]
|
||||
if full_name.endswith('module'):
|
||||
full_name = full_name[:-6]
|
||||
mod_list.append(full_name)
|
||||
for name in sys.builtin_module_names:
|
||||
mod_list.append(name)
|
||||
mod_list.sort()
|
||||
for mod in mod_list:
|
||||
if len(line + mod) + 8 > 79:
|
||||
modules += '\n' + line
|
||||
line = ' '
|
||||
line += "'%s', " % mod
|
||||
print modules + ']'
|
||||
"""
|
||||
py2_stdlib = [
|
||||
'BaseHTTPServer', 'Bastion', 'CGIHTTPServer', 'ConfigParser', 'Cookie',
|
||||
'DocXMLRPCServer', 'HTMLParser', 'MimeWriter', 'Queue',
|
||||
'SimpleHTTPServer', 'SimpleXMLRPCServer', 'SocketServer', 'StringIO',
|
||||
'UserDict', 'UserList', 'UserString', '_LWPCookieJar',
|
||||
'_MozillaCookieJar', '__builtin__', '__future__', '__main__',
|
||||
'__phello__.foo', '_abcoll', '_ast', '_bisect', '_bsddb', '_codecs',
|
||||
'_codecs_cn', '_codecs_hk', '_codecs_iso2022', '_codecs_jp',
|
||||
'_codecs_kr', '_codecs_tw', '_collections', '_crypt', '_csv',
|
||||
'_ctypes', '_curses', '_curses_panel', '_elementtree', '_functools',
|
||||
'_hashlib', '_heapq', '_hotshot', '_io', '_json', '_locale',
|
||||
'_lsprof', '_multibytecodec', '_multiprocessing', '_osx_support',
|
||||
'_pyio', '_random', '_socket', '_sqlite3', '_sre', '_ssl',
|
||||
'_strptime', '_struct', '_symtable', '_sysconfigdata',
|
||||
'_threading_local', '_warnings', '_weakref', '_weakrefset', 'abc',
|
||||
'aifc', 'antigravity', 'anydbm', 'argparse', 'array', 'ast',
|
||||
'asynchat', 'asyncore', 'atexit', 'audiodev', 'audioop', 'base64',
|
||||
'bdb', 'binascii', 'binhex', 'bisect', 'bsddb', 'bsddb.db',
|
||||
'bsddb.dbobj', 'bsddb.dbrecio', 'bsddb.dbshelve', 'bsddb.dbtables',
|
||||
'bsddb.dbutils', 'bz2', 'cPickle', 'cProfile', 'cStringIO',
|
||||
'calendar', 'cgi', 'cgitb', 'chunk', 'cmath', 'cmd', 'code', 'codecs',
|
||||
'codeop', 'collections', 'colorsys', 'commands', 'compileall',
|
||||
'compiler', 'compiler.ast', 'compiler.consts', 'compiler.future',
|
||||
'compiler.misc', 'compiler.pyassem', 'compiler.pycodegen',
|
||||
'compiler.symbols', 'compiler.syntax', 'compiler.transformer',
|
||||
'compiler.visitor', 'contextlib', 'cookielib', 'copy', 'copy_reg',
|
||||
'crypt', 'csv', 'ctypes', 'ctypes._endian', 'ctypes.macholib',
|
||||
'ctypes.macholib.dyld', 'ctypes.macholib.dylib',
|
||||
'ctypes.macholib.framework', 'ctypes.util', 'ctypes.wintypes',
|
||||
'curses', 'curses.ascii', 'curses.has_key', 'curses.panel',
|
||||
'curses.textpad', 'curses.wrapper', 'datetime', 'dbhash', 'dbm',
|
||||
'decimal', 'difflib', 'dircache', 'dis', 'distutils',
|
||||
'distutils.archive_util', 'distutils.bcppcompiler',
|
||||
'distutils.ccompiler', 'distutils.cmd', 'distutils.command',
|
||||
'distutils.command.bdist', 'distutils.command.bdist_dumb',
|
||||
'distutils.command.bdist_msi', 'distutils.command.bdist_rpm',
|
||||
'distutils.command.bdist_wininst', 'distutils.command.build',
|
||||
'distutils.command.build_clib', 'distutils.command.build_ext',
|
||||
'distutils.command.build_py', 'distutils.command.build_scripts',
|
||||
'distutils.command.check', 'distutils.command.clean',
|
||||
'distutils.command.config', 'distutils.command.install',
|
||||
'distutils.command.install_data',
|
||||
'distutils.command.install_egg_info',
|
||||
'distutils.command.install_headers', 'distutils.command.install_lib',
|
||||
'distutils.command.install_scripts', 'distutils.command.register',
|
||||
'distutils.command.sdist', 'distutils.command.upload',
|
||||
'distutils.config', 'distutils.core', 'distutils.cygwinccompiler',
|
||||
'distutils.debug', 'distutils.dep_util', 'distutils.dir_util',
|
||||
'distutils.dist', 'distutils.emxccompiler', 'distutils.errors',
|
||||
'distutils.extension', 'distutils.fancy_getopt',
|
||||
'distutils.file_util', 'distutils.filelist', 'distutils.log',
|
||||
'distutils.msvc9compiler', 'distutils.msvccompiler',
|
||||
'distutils.spawn', 'distutils.sysconfig', 'distutils.text_file',
|
||||
'distutils.unixccompiler', 'distutils.util', 'distutils.version',
|
||||
'distutils.versionpredicate', 'dl', 'doctest', 'dumbdbm',
|
||||
'dummy_thread', 'dummy_threading', 'email', 'email._parseaddr',
|
||||
'email.base64mime', 'email.charset', 'email.encoders', 'email.errors',
|
||||
'email.feedparser', 'email.generator', 'email.header',
|
||||
'email.iterators', 'email.message', 'email.mime',
|
||||
'email.mime.application', 'email.mime.audio', 'email.mime.base',
|
||||
'email.mime.image', 'email.mime.message', 'email.mime.multipart',
|
||||
'email.mime.nonmultipart', 'email.mime.text', 'email.parser',
|
||||
'email.quoprimime', 'email.utils', 'encodings', 'encodings.aliases',
|
||||
'encodings.ascii', 'encodings.base64_codec', 'encodings.big5',
|
||||
'encodings.big5hkscs', 'encodings.bz2_codec', 'encodings.charmap',
|
||||
'encodings.cp037', 'encodings.cp1006', 'encodings.cp1026',
|
||||
'encodings.cp1140', 'encodings.cp1250', 'encodings.cp1251',
|
||||
'encodings.cp1252', 'encodings.cp1253', 'encodings.cp1254',
|
||||
'encodings.cp1255', 'encodings.cp1256', 'encodings.cp1257',
|
||||
'encodings.cp1258', 'encodings.cp424', 'encodings.cp437',
|
||||
'encodings.cp500', 'encodings.cp720', 'encodings.cp737',
|
||||
'encodings.cp775', 'encodings.cp850', 'encodings.cp852',
|
||||
'encodings.cp855', 'encodings.cp856', 'encodings.cp857',
|
||||
'encodings.cp858', 'encodings.cp860', 'encodings.cp861',
|
||||
'encodings.cp862', 'encodings.cp863', 'encodings.cp864',
|
||||
'encodings.cp865', 'encodings.cp866', 'encodings.cp869',
|
||||
'encodings.cp874', 'encodings.cp875', 'encodings.cp932',
|
||||
'encodings.cp949', 'encodings.cp950', 'encodings.euc_jis_2004',
|
||||
'encodings.euc_jisx0213', 'encodings.euc_jp', 'encodings.euc_kr',
|
||||
'encodings.gb18030', 'encodings.gb2312', 'encodings.gbk',
|
||||
'encodings.hex_codec', 'encodings.hp_roman8', 'encodings.hz',
|
||||
'encodings.idna', 'encodings.iso2022_jp', 'encodings.iso2022_jp_1',
|
||||
'encodings.iso2022_jp_2', 'encodings.iso2022_jp_2004',
|
||||
'encodings.iso2022_jp_3', 'encodings.iso2022_jp_ext',
|
||||
'encodings.iso2022_kr', 'encodings.iso8859_1', 'encodings.iso8859_10',
|
||||
'encodings.iso8859_11', 'encodings.iso8859_13',
|
||||
'encodings.iso8859_14', 'encodings.iso8859_15',
|
||||
'encodings.iso8859_16', 'encodings.iso8859_2', 'encodings.iso8859_3',
|
||||
'encodings.iso8859_4', 'encodings.iso8859_5', 'encodings.iso8859_6',
|
||||
'encodings.iso8859_7', 'encodings.iso8859_8', 'encodings.iso8859_9',
|
||||
'encodings.johab', 'encodings.koi8_r', 'encodings.koi8_u',
|
||||
'encodings.latin_1', 'encodings.mac_arabic', 'encodings.mac_centeuro',
|
||||
'encodings.mac_croatian', 'encodings.mac_cyrillic',
|
||||
'encodings.mac_farsi', 'encodings.mac_greek', 'encodings.mac_iceland',
|
||||
'encodings.mac_latin2', 'encodings.mac_roman',
|
||||
'encodings.mac_romanian', 'encodings.mac_turkish', 'encodings.mbcs',
|
||||
'encodings.palmos', 'encodings.ptcp154', 'encodings.punycode',
|
||||
'encodings.quopri_codec', 'encodings.raw_unicode_escape',
|
||||
'encodings.rot_13', 'encodings.shift_jis', 'encodings.shift_jis_2004',
|
||||
'encodings.shift_jisx0213', 'encodings.string_escape',
|
||||
'encodings.tis_620', 'encodings.undefined',
|
||||
'encodings.unicode_escape', 'encodings.unicode_internal',
|
||||
'encodings.utf_16', 'encodings.utf_16_be', 'encodings.utf_16_le',
|
||||
'encodings.utf_32', 'encodings.utf_32_be', 'encodings.utf_32_le',
|
||||
'encodings.utf_7', 'encodings.utf_8', 'encodings.utf_8_sig',
|
||||
'encodings.uu_codec', 'encodings.zlib_codec', 'errno', 'exceptions',
|
||||
'fcntl', 'filecmp', 'fileinput', 'fnmatch', 'formatter', 'fpformat',
|
||||
'fractions', 'ftplib', 'functools', 'future_builtins', 'gc', 'gdbm',
|
||||
'genericpath', 'getopt', 'getpass', 'gettext', 'glob', 'grp', 'gzip',
|
||||
'hashlib', 'heapq', 'hmac', 'hotshot', 'hotshot.log', 'hotshot.stats',
|
||||
'hotshot.stones', 'htmlentitydefs', 'htmllib', 'httplib', 'idlelib',
|
||||
'idlelib.AutoComplete', 'idlelib.AutoCompleteWindow',
|
||||
'idlelib.AutoExpand', 'idlelib.Bindings', 'idlelib.CallTipWindow',
|
||||
'idlelib.CallTips', 'idlelib.ClassBrowser', 'idlelib.CodeContext',
|
||||
'idlelib.ColorDelegator', 'idlelib.Debugger', 'idlelib.Delegator',
|
||||
'idlelib.EditorWindow', 'idlelib.FileList', 'idlelib.FormatParagraph',
|
||||
'idlelib.GrepDialog', 'idlelib.HyperParser', 'idlelib.IOBinding',
|
||||
'idlelib.IdleHistory', 'idlelib.MultiCall', 'idlelib.MultiStatusBar',
|
||||
'idlelib.ObjectBrowser', 'idlelib.OutputWindow', 'idlelib.ParenMatch',
|
||||
'idlelib.PathBrowser', 'idlelib.Percolator', 'idlelib.PyParse',
|
||||
'idlelib.PyShell', 'idlelib.RemoteDebugger',
|
||||
'idlelib.RemoteObjectBrowser', 'idlelib.ReplaceDialog',
|
||||
'idlelib.RstripExtension', 'idlelib.ScriptBinding',
|
||||
'idlelib.ScrolledList', 'idlelib.SearchDialog',
|
||||
'idlelib.SearchDialogBase', 'idlelib.SearchEngine',
|
||||
'idlelib.StackViewer', 'idlelib.ToolTip', 'idlelib.TreeWidget',
|
||||
'idlelib.UndoDelegator', 'idlelib.WidgetRedirector',
|
||||
'idlelib.WindowList', 'idlelib.ZoomHeight', 'idlelib.aboutDialog',
|
||||
'idlelib.configDialog', 'idlelib.configHandler',
|
||||
'idlelib.configHelpSourceEdit', 'idlelib.configSectionNameDialog',
|
||||
'idlelib.dynOptionMenuWidget', 'idlelib.idle', 'idlelib.idlever',
|
||||
'idlelib.keybindingDialog', 'idlelib.macosxSupport', 'idlelib.rpc',
|
||||
'idlelib.run', 'idlelib.tabbedpages', 'idlelib.textView', 'ihooks',
|
||||
'imageop', 'imaplib', 'imghdr', 'imp', 'importlib', 'imputil',
|
||||
'inspect', 'io', 'itertools', 'json', 'json.decoder', 'json.encoder',
|
||||
'json.scanner', 'json.tool', 'keyword', 'lib2to3', 'lib2to3.__main__',
|
||||
'lib2to3.btm_matcher', 'lib2to3.btm_utils', 'lib2to3.fixer_base',
|
||||
'lib2to3.fixer_util', 'lib2to3.fixes', 'lib2to3.fixes.fix_apply',
|
||||
'lib2to3.fixes.fix_basestring', 'lib2to3.fixes.fix_buffer',
|
||||
'lib2to3.fixes.fix_callable', 'lib2to3.fixes.fix_dict',
|
||||
'lib2to3.fixes.fix_except', 'lib2to3.fixes.fix_exec',
|
||||
'lib2to3.fixes.fix_execfile', 'lib2to3.fixes.fix_exitfunc',
|
||||
'lib2to3.fixes.fix_filter', 'lib2to3.fixes.fix_funcattrs',
|
||||
'lib2to3.fixes.fix_future', 'lib2to3.fixes.fix_getcwdu',
|
||||
'lib2to3.fixes.fix_has_key', 'lib2to3.fixes.fix_idioms',
|
||||
'lib2to3.fixes.fix_import', 'lib2to3.fixes.fix_imports',
|
||||
'lib2to3.fixes.fix_imports2', 'lib2to3.fixes.fix_input',
|
||||
'lib2to3.fixes.fix_intern', 'lib2to3.fixes.fix_isinstance',
|
||||
'lib2to3.fixes.fix_itertools', 'lib2to3.fixes.fix_itertools_imports',
|
||||
'lib2to3.fixes.fix_long', 'lib2to3.fixes.fix_map',
|
||||
'lib2to3.fixes.fix_metaclass', 'lib2to3.fixes.fix_methodattrs',
|
||||
'lib2to3.fixes.fix_ne', 'lib2to3.fixes.fix_next',
|
||||
'lib2to3.fixes.fix_nonzero', 'lib2to3.fixes.fix_numliterals',
|
||||
'lib2to3.fixes.fix_operator', 'lib2to3.fixes.fix_paren',
|
||||
'lib2to3.fixes.fix_print', 'lib2to3.fixes.fix_raise',
|
||||
'lib2to3.fixes.fix_raw_input', 'lib2to3.fixes.fix_reduce',
|
||||
'lib2to3.fixes.fix_renames', 'lib2to3.fixes.fix_repr',
|
||||
'lib2to3.fixes.fix_set_literal', 'lib2to3.fixes.fix_standarderror',
|
||||
'lib2to3.fixes.fix_sys_exc', 'lib2to3.fixes.fix_throw',
|
||||
'lib2to3.fixes.fix_tuple_params', 'lib2to3.fixes.fix_types',
|
||||
'lib2to3.fixes.fix_unicode', 'lib2to3.fixes.fix_urllib',
|
||||
'lib2to3.fixes.fix_ws_comma', 'lib2to3.fixes.fix_xrange',
|
||||
'lib2to3.fixes.fix_xreadlines', 'lib2to3.fixes.fix_zip',
|
||||
'lib2to3.main', 'lib2to3.patcomp', 'lib2to3.pgen2',
|
||||
'lib2to3.pgen2.conv', 'lib2to3.pgen2.driver', 'lib2to3.pgen2.grammar',
|
||||
'lib2to3.pgen2.literals', 'lib2to3.pgen2.parse', 'lib2to3.pgen2.pgen',
|
||||
'lib2to3.pgen2.token', 'lib2to3.pgen2.tokenize', 'lib2to3.pygram',
|
||||
'lib2to3.pytree', 'lib2to3.refactor', 'linecache', 'linuxaudiodev',
|
||||
'locale', 'logging', 'logging.config', 'logging.handlers', 'macpath',
|
||||
'macurl2path', 'mailbox', 'mailcap', 'markupbase', 'marshal', 'math',
|
||||
'md5', 'mhlib', 'mimetools', 'mimetypes', 'mimify', 'mmap',
|
||||
'modulefinder', 'multifile', 'multiprocessing',
|
||||
'multiprocessing.connection', 'multiprocessing.dummy',
|
||||
'multiprocessing.dummy.connection', 'multiprocessing.forking',
|
||||
'multiprocessing.heap', 'multiprocessing.managers',
|
||||
'multiprocessing.pool', 'multiprocessing.process',
|
||||
'multiprocessing.queues', 'multiprocessing.reduction',
|
||||
'multiprocessing.sharedctypes', 'multiprocessing.synchronize',
|
||||
'multiprocessing.util', 'mutex', 'netrc', 'new', 'nis', 'nntplib',
|
||||
'ntpath', 'nturl2path', 'numbers', 'opcode', 'operator', 'optparse',
|
||||
'os', 'os2emxpath', 'ossaudiodev', 'parser', 'pdb', 'pickle',
|
||||
'pickletools', 'pipes', 'pkgutil', 'plat-linux2.CDROM',
|
||||
'plat-linux2.DLFCN', 'plat-linux2.IN', 'plat-linux2.TYPES',
|
||||
'platform', 'plistlib', 'popen2', 'poplib', 'posix', 'posixfile',
|
||||
'posixpath', 'pprint', 'profile', 'pstats', 'pty', 'pwd',
|
||||
'py_compile', 'pyclbr', 'pydoc', 'pydoc_data', 'pydoc_data.topics',
|
||||
'pyexpat', 'quopri', 'random', 're', 'readline', 'repr', 'resource',
|
||||
'rexec', 'rfc822', 'rlcompleter', 'robotparser', 'runpy', 'sched',
|
||||
'select', 'sets', 'sgmllib', 'sha', 'shelve', 'shlex', 'shutil',
|
||||
'signal', 'site', 'smtpd', 'smtplib', 'sndhdr', 'socket', 'spwd',
|
||||
'sqlite3', 'sqlite3.dbapi2', 'sqlite3.dump', 'sre', 'sre_compile',
|
||||
'sre_constants', 'sre_parse', 'ssl', 'stat', 'statvfs', 'string',
|
||||
'stringold', 'stringprep', 'strop', 'struct', 'subprocess', 'sunau',
|
||||
'sunaudio', 'symbol', 'symtable', 'sys', 'sysconfig', 'syslog',
|
||||
'tabnanny', 'tarfile', 'telnetlib', 'tempfile', 'termios', 'test',
|
||||
'test.test_support', 'textwrap', 'this', 'thread', 'threading',
|
||||
'time', 'timeit', 'timing', 'toaiff', 'token', 'tokenize', 'trace',
|
||||
'traceback', 'tty', 'types', 'unicodedata', 'unittest',
|
||||
'unittest.__main__', 'unittest.case', 'unittest.loader',
|
||||
'unittest.main', 'unittest.result', 'unittest.runner',
|
||||
'unittest.signals', 'unittest.suite', 'unittest.test',
|
||||
'unittest.test.dummy', 'unittest.test.support',
|
||||
'unittest.test.test_assertions', 'unittest.test.test_break',
|
||||
'unittest.test.test_case', 'unittest.test.test_discovery',
|
||||
'unittest.test.test_functiontestcase', 'unittest.test.test_loader',
|
||||
'unittest.test.test_program', 'unittest.test.test_result',
|
||||
'unittest.test.test_runner', 'unittest.test.test_setups',
|
||||
'unittest.test.test_skipping', 'unittest.test.test_suite',
|
||||
'unittest.util', 'urllib', 'urllib2', 'urlparse', 'user', 'uu',
|
||||
'uuid', 'warnings', 'wave', 'weakref', 'webbrowser', 'whichdb',
|
||||
'wsgiref', 'wsgiref.handlers', 'wsgiref.headers',
|
||||
'wsgiref.simple_server', 'wsgiref.util', 'wsgiref.validate', 'xdrlib',
|
||||
'xml', 'xml.dom', 'xml.dom.NodeFilter', 'xml.dom.domreg',
|
||||
'xml.dom.expatbuilder', 'xml.dom.minicompat', 'xml.dom.minidom',
|
||||
'xml.dom.pulldom', 'xml.dom.xmlbuilder', 'xml.etree',
|
||||
'xml.etree.ElementInclude', 'xml.etree.ElementPath',
|
||||
'xml.etree.ElementTree', 'xml.etree.cElementTree', 'xml.parsers',
|
||||
'xml.parsers.expat', 'xml.sax', 'xml.sax._exceptions',
|
||||
'xml.sax.expatreader', 'xml.sax.handler', 'xml.sax.saxutils',
|
||||
'xml.sax.xmlreader', 'xmllib', 'xmlrpclib', 'xxsubtype', 'zipfile', ]
|
||||
# Dynamic modules that can't be auto-discovered by the script above
|
||||
manual_stdlib = ['os.path', ]
|
||||
py2_stdlib.extend(manual_stdlib)
|
||||
|
||||
|
||||
def _get_import_type(module):
|
||||
if module in module_cache:
|
||||
return module_cache[module]
|
||||
|
||||
def cache_type(module_type):
|
||||
module_cache[module] = module_type
|
||||
return module_type
|
||||
|
||||
# Check static stdlib list
|
||||
if module in py2_stdlib:
|
||||
return cache_type('stdlib')
|
||||
|
||||
# Check if the module is local
|
||||
try:
|
||||
_find_module(module, ['.'])
|
||||
# If the previous line succeeded then it must be a project module
|
||||
return cache_type('project')
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Otherwise treat it as third-party - this means we may treat some stdlib
|
||||
# modules as third-party, but that's okay because we are allowing
|
||||
# third-party libs in the stdlib section.
|
||||
return cache_type('third-party')
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def hacking_import_groups(logical_line, blank_before, previous_logical,
|
||||
indent_level, previous_indent_level, physical_line,
|
||||
noqa):
|
||||
r"""Check that imports are grouped correctly.
|
||||
|
||||
OpenStack HACKING guide recommendation for imports:
|
||||
imports grouped such that Python standard library imports are together,
|
||||
third party library imports are together, and project imports are
|
||||
together
|
||||
|
||||
Okay: import os\nimport sys\n\nimport six\n\nimport hacking
|
||||
Okay: import six\nimport znon_existent_package
|
||||
Okay: import os\nimport threading
|
||||
S366: import mock\nimport os
|
||||
S366: import hacking\nimport os
|
||||
S366: import hacking\nimport nonexistent
|
||||
S366: import hacking\nimport mock
|
||||
"""
|
||||
if (noqa or blank_before > 0 or
|
||||
indent_level != previous_indent_level):
|
||||
return
|
||||
|
||||
normalized_line = core.import_normalize(logical_line.strip()).split()
|
||||
normalized_previous = core.import_normalize(previous_logical.
|
||||
strip()).split()
|
||||
|
||||
def compatible(previous, current):
|
||||
if previous == current:
|
||||
return True
|
||||
|
||||
if normalized_line and normalized_line[0] == 'import':
|
||||
current_type = _get_import_type(normalized_line[1])
|
||||
if normalized_previous and normalized_previous[0] == 'import':
|
||||
previous_type = _get_import_type(normalized_previous[1])
|
||||
if not compatible(previous_type, current_type):
|
||||
yield(0, 'S366: imports not grouped correctly '
|
||||
'(%s: %s, %s: %s)' %
|
||||
(normalized_previous[1], previous_type,
|
||||
normalized_line[1], current_type))
|
||||
|
||||
|
||||
class ImportGroupData(object):
|
||||
"""A class to hold persistent state data for import group checks.
|
||||
|
||||
To verify import grouping, it is necessary to know the current group
|
||||
for the current file. This can not always be known solely from the
|
||||
current and previous line, so this class can be used to keep track.
|
||||
"""
|
||||
|
||||
# NOTE(bnemec): *args is needed because the test code tries to run this
|
||||
# as a flake8 check and passes an argument to it.
|
||||
def __init__(self, *args):
|
||||
self.current_group = None
|
||||
self.current_filename = None
|
||||
self.current_import = None
|
||||
|
||||
|
||||
together_data = ImportGroupData()
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def hacking_import_groups_together(logical_line, blank_lines, indent_level,
|
||||
previous_indent_level, line_number,
|
||||
physical_line, filename, noqa):
|
||||
r"""Check that like imports are grouped together.
|
||||
|
||||
OpenStack HACKING guide recommendation for imports:
|
||||
Imports should be grouped together by type.
|
||||
|
||||
Okay: import os\nimport sys
|
||||
Okay: try:\n import foo\nexcept ImportError:\n pass\n\nimport six
|
||||
Okay: import abc\nimport mock\n\nimport six
|
||||
Okay: import eventlet\neventlet.monkey_patch()\n\nimport copy
|
||||
S367: import mock\n\nimport six
|
||||
S367: import os\n\nimport sys
|
||||
S367: import mock\nimport os\n\nimport sys
|
||||
"""
|
||||
if line_number == 1 or filename != together_data.current_filename:
|
||||
together_data.current_group = None
|
||||
together_data.current_filename = filename
|
||||
|
||||
if noqa:
|
||||
return
|
||||
|
||||
def update_current_group(current):
|
||||
together_data.current_group = current
|
||||
|
||||
normalized_line = core.import_normalize(logical_line.strip()).split()
|
||||
if normalized_line:
|
||||
if normalized_line[0] == 'import':
|
||||
current_type = _get_import_type(normalized_line[1])
|
||||
previous_import = together_data.current_import
|
||||
together_data.current_import = normalized_line[1]
|
||||
matched = current_type == together_data.current_group
|
||||
update_current_group(current_type)
|
||||
if (matched and indent_level == previous_indent_level and
|
||||
blank_lines >= 1):
|
||||
yield(0, 'S367: like imports should be grouped together (%s '
|
||||
'and %s from %s are separated by whitespace)' %
|
||||
(previous_import,
|
||||
together_data.current_import,
|
||||
current_type))
|
||||
else:
|
||||
# Reset on non-import code
|
||||
together_data.current_group = None
|
|
@ -0,0 +1,64 @@
|
|||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import re
|
||||
|
||||
|
||||
ALL_LOG_LEVELS = "info|exception|warning|critical|error|debug"
|
||||
|
||||
RE_ACCEPTED_LOG_LEVELS = re.compile(
|
||||
r"(.)*LOG\.(%(levels)s)\(" % {'levels': ALL_LOG_LEVELS})
|
||||
|
||||
# Since _Lx() have been removed, we just need to check _()
|
||||
RE_TRANSLATED_LOG = re.compile(
|
||||
r"(.)*LOG\.(%(levels)s)\(\s*_\(" % {'levels': ALL_LOG_LEVELS})
|
||||
|
||||
|
||||
def no_translate_logs(logical_line, filename):
|
||||
"""Check for 'LOG.*(_('
|
||||
|
||||
Translators don't provide translations for log messages, and operators
|
||||
asked not to translate them.
|
||||
|
||||
* This check assumes that 'LOG' is a logger.
|
||||
* Use filename so we can start enforcing this in specific folders instead
|
||||
of needing to do so all at once.
|
||||
S373
|
||||
"""
|
||||
|
||||
msg = "S373 Don't translate logs"
|
||||
if RE_TRANSLATED_LOG.match(logical_line):
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
def accepted_log_levels(logical_line, filename):
|
||||
"""In Sahara we use only 5 log levels.
|
||||
|
||||
This check is needed because we don't want new contributors to
|
||||
use deprecated log levels.
|
||||
S374
|
||||
"""
|
||||
|
||||
# NOTE(Kezar): sahara/tests included because we don't require translations
|
||||
# in tests. sahara/db/templates provide separate cli interface so we don't
|
||||
# want to translate it.
|
||||
|
||||
ignore_dirs = ["sahara/db/templates",
|
||||
"sahara/tests"]
|
||||
for directory in ignore_dirs:
|
||||
if directory in filename:
|
||||
return
|
||||
msg = ("S374 You used deprecated log level. Accepted log levels are "
|
||||
"%(levels)s" % {'levels': ALL_LOG_LEVELS})
|
||||
if logical_line.startswith("LOG."):
|
||||
if not RE_ACCEPTED_LOG_LEVELS.search(logical_line):
|
||||
yield(0, msg)
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as osc_utils
|
||||
|
||||
from saharaclient.api import cluster_templates as api_ct
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as osc_utils
|
||||
|
||||
from saharaclient.api import data_sources as api_ds
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as osc_utils
|
||||
|
||||
from saharaclient.api import images as api_images
|
||||
|
|
|
@ -17,9 +17,9 @@ import mock
|
|||
|
||||
from saharaclient.api import job_binaries as api_jb
|
||||
from saharaclient.osc.v1 import job_binaries as osc_jb
|
||||
|
||||
from saharaclient.tests.unit.osc.v1 import fakes
|
||||
|
||||
|
||||
JOB_BINARY_INFO = {
|
||||
"name": 'job-binary',
|
||||
"description": 'descr',
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as osc_utils
|
||||
|
||||
from saharaclient.api import jobs as api_j
|
||||
|
|
|
@ -18,7 +18,6 @@ import mock
|
|||
from saharaclient.api import job_types as api_jt
|
||||
from saharaclient.api import jobs as api_j
|
||||
from saharaclient.osc.v1 import job_types as osc_jt
|
||||
|
||||
from saharaclient.tests.unit.osc.v1 import fakes
|
||||
|
||||
JOB_TYPE_INFO = {
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as osc_utils
|
||||
|
||||
from saharaclient.api import job_executions as api_je
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
# limitations under the License.
|
||||
|
||||
import mock
|
||||
|
||||
from osc_lib.tests import utils as osc_utils
|
||||
|
||||
from saharaclient.api import node_group_templates as api_ngt
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import mock
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
from saharaclient.api import plugins as api_plugins
|
||||
from saharaclient.osc.v1 import plugins as osc_plugins
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import cluster_templates as ct
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class ClusterTemplateTest(base.BaseTestCase):
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import clusters as cl
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class ClusterTest(base.BaseTestCase):
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import data_sources as ds
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class DataSourceTest(base.BaseTestCase):
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
# Copyright 2015 EasyStack Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import testtools
|
||||
|
||||
from saharaclient.tests.hacking import checks
|
||||
|
||||
|
||||
class HackingTestCase(testtools.TestCase):
|
||||
def test_dict_constructor_with_list_copy(self):
|
||||
# Following checks for code-lines with pep8 error
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([(i, connect_info[i])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" attrs = dict([(k, _from_json(v))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" type_names = dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict((value, key) for key, value in"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
"foo(param=dict((k, v) for k, v in bar.items()))"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict([[i,i] for i in range(3)])"))))
|
||||
|
||||
self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dd = dict([i,i] for i in range(3))"))))
|
||||
# Following checks for ok code-lines
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" dict()"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" create_kwargs = dict(snapshot=snapshot,"))))
|
||||
|
||||
self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy(
|
||||
" self._render_dict(xml, data_el, data.__dict__)"))))
|
||||
|
||||
def test_use_jsonutils(self):
|
||||
self.assertEqual(0, len(list(checks.use_jsonutils(
|
||||
"import json # noqa", "path"))))
|
||||
self.assertEqual(0, len(list(checks.use_jsonutils(
|
||||
"from oslo_serialization import jsonutils as json", "path"))))
|
||||
self.assertEqual(0, len(list(checks.use_jsonutils(
|
||||
"import jsonschema", "path"))))
|
||||
self.assertEqual(1, len(list(checks.use_jsonutils(
|
||||
"import json", "path"))))
|
||||
self.assertEqual(1, len(list(checks.use_jsonutils(
|
||||
"import json as jsonutils", "path"))))
|
||||
|
||||
def test_no_mutable_default_args(self):
|
||||
self.assertEqual(0, len(list(checks.no_mutable_default_args(
|
||||
"def foo (bar):"))))
|
||||
self.assertEqual(1, len(list(checks.no_mutable_default_args(
|
||||
"def foo (bar=[]):"))))
|
||||
self.assertEqual(1, len(list(checks.no_mutable_default_args(
|
||||
"def foo (bar={}):"))))
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import images
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class ImageTest(base.BaseTestCase):
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import job_binaries as jb
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class JobBinaryTest(base.BaseTestCase):
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
from saharaclient.api import job_binary_internals as jbi
|
||||
from saharaclient.tests.unit import base
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import job_executions as je
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class JobExecutionTest(base.BaseTestCase):
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import jobs
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class JobTest(base.BaseTestCase):
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
from saharaclient.api import node_group_templates as ng
|
||||
from saharaclient.tests.unit import base
|
||||
|
||||
import json
|
||||
from oslo_serialization import jsonutils as json
|
||||
|
||||
|
||||
class NodeGroupTemplateTest(base.BaseTestCase):
|
||||
|
|
Loading…
Reference in New Issue