Add pylint support

This update adds pylint to tox and zuul for stx-updates and resolves
most warnings and errors, with some disabled for future resolution.

Change-Id: Ic853202e4793bf89d687b50cbd740d364db85189
Story: 2003371
Task: 24832
Signed-off-by: Don Penney <don.penney@windriver.com>
changes/40/596140/4
Don Penney 4 years ago
parent 9554c0609f
commit 2885d09ed2
  1. 10
      .zuul.yaml
  2. 2
      cgcs-patch/centos/build_srpm.data
  3. 3
      cgcs-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
  4. 3
      cgcs-patch/cgcs-patch/cgcs_patch/app.py
  5. 6
      cgcs-patch/cgcs-patch/cgcs_patch/authapi/app.py
  6. 9
      cgcs-patch/cgcs-patch/cgcs_patch/authapi/auth_token.py
  7. 1
      cgcs-patch/cgcs-patch/cgcs_patch/config.py
  8. 1
      cgcs-patch/cgcs-patch/cgcs_patch/exceptions.py
  9. 3
      cgcs-patch/cgcs-patch/cgcs_patch/patch_agent.py
  10. 8
      cgcs-patch/cgcs-patch/cgcs_patch/patch_client.py
  11. 21
      cgcs-patch/cgcs-patch/cgcs_patch/patch_controller.py
  12. 59
      cgcs-patch/cgcs-patch/cgcs_patch/patch_functions.py
  13. 6
      cgcs-patch/cgcs-patch/cgcs_patch/patch_signing.py
  14. 8
      cgcs-patch/cgcs-patch/cgcs_patch/patch_verify.py
  15. 3
      patch-alarm/patch-alarm/patch_alarm/patch_alarm_manager.py
  16. 218
      pylint.rc
  17. 42
      tox.ini
  18. 2
      tsconfig/scripts/tsconfig
  19. 8
      tsconfig/tsconfig/tsconfig/tsconfig.py

@ -4,7 +4,17 @@
jobs:
- openstack-tox-linters
- openstack-tox-pep8
- stx-updates-pylint
gate:
jobs:
- openstack-tox-linters
- openstack-tox-pep8
- stx-updates-pylint
- job:
name: stx-updates-pylint
parent: openstack-tox-pylint
required-projects:
- openstack/stx-config
- openstack/stx-fault
- openstack/stx-integ

@ -1 +1 @@
TIS_PATCH_VER=25
TIS_PATCH_VER=26

@ -13,7 +13,6 @@ import glob
from cgcs_patch.exceptions import PatchError
from cgcs_patch.patch_controller import pc
import logging
from cgcs_patch.patch_functions import LOG
@ -206,7 +205,7 @@ class PatchAPIController(object):
force = True
try:
result = pc.patch_host_install(list(args)[0], force, async=True)
result = pc.patch_host_install(list(args)[0], force, async_req=True)
except PatchError as e:
return dict(error="Error: %s" % e.message)

@ -6,13 +6,10 @@ SPDX-License-Identifier: Apache-2.0
"""
from pecan import make_app
from cgcs_patch import model
def setup_app(config):
model.init_model()
return make_app(
config.app.root,
static_root=config.app.static_root,

@ -32,8 +32,8 @@ def get_pecan_config():
def setup_app(pecan_config=None, extra_hooks=None):
config = ConfigParser.RawConfigParser()
config.read('/etc/patching/patching.conf')
config_parser = ConfigParser.RawConfigParser()
config_parser.read('/etc/patching/patching.conf')
policy.init()
@ -62,7 +62,7 @@ def setup_app(pecan_config=None, extra_hooks=None):
)
if pecan_config.app.enable_acl:
return acl.install(app, config, pecan_config.app.acl_public_routes)
return acl.install(app, config_parser, pecan_config.app.acl_public_routes)
return app

@ -24,7 +24,10 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
for public routes in the API.
"""
def __init__(self, app, conf, public_api_routes=[]):
def __init__(self, app, conf, public_api_routes=None):
if public_api_routes is None:
public_api_routes = []
self.public_api_routes = set(public_api_routes)
super(AuthTokenMiddleware, self).__init__(app, conf)
@ -33,6 +36,6 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
path = utils.safe_rstrip(env.get('PATH_INFO'), '/')
if path in self.public_api_routes:
return self.app(env, start_response)
return self.app(env, start_response) # pylint: disable=no-member
return super(AuthTokenMiddleware, self).__call__(env, start_response)
return super(AuthTokenMiddleware, self).__call__(env, start_response) # pylint: disable=too-many-function-args

@ -8,7 +8,6 @@ SPDX-License-Identifier: Apache-2.0
import os
import ConfigParser
import StringIO
import subprocess
import logging
import socket
import cgcs_patch.utils as utils

@ -10,6 +10,7 @@ class PatchError(Exception):
"""Base class for patching exceptions."""
def __init__(self, message=None):
super(PatchError, self).__init__(message)
self.message = message
def __str__(self):

@ -19,7 +19,7 @@ import sys
import yaml
import shutil
from rpmUtils.miscutils import stringToVersion
from rpmUtils.miscutils import stringToVersion # pylint: disable=import-error
from cgcs_patch.patch_functions import (configure_logging, LOG)
import cgcs_patch.config as cfg
@ -300,6 +300,7 @@ class PatchAgent(PatchService):
self.query_id = 0
self.state = constants.PATCH_AGENT_STATE_IDLE
self.last_config_audit = 0
self.rejection_timestamp = 0
# Check state flags
if os.path.exists(patch_installing_file):

@ -503,9 +503,9 @@ def patch_commit_req(debug, args):
dry_run = True
args.remove(constants.CLI_OPT_DRY_RUN)
all = False
all_patches = False
if constants.CLI_OPT_ALL in args:
all = True
all_patches = True
args.remove(constants.CLI_OPT_ALL)
# Default to running release
@ -526,11 +526,11 @@ def patch_commit_req(debug, args):
headers = {}
append_auth_token_if_required(headers)
if release and not all:
if release and not all_patches:
# Disallow
print "Use of --release option requires --all"
return 1
elif all:
elif all_patches:
# Get a list of all patches
extra_opts = "&release=%s" % relopt
url = "http://%s/patch/query?show=all%s" % (api_addr, extra_opts)

@ -15,7 +15,7 @@ import ConfigParser
import rpm
import os
from rpmUtils.miscutils import stringToVersion
from rpmUtils.miscutils import stringToVersion # pylint: disable=import-error
from wsgiref import simple_server
from cgcs_patch.api import app
@ -25,7 +25,7 @@ from cgcs_patch.patch_functions import \
avail_dir, applied_dir, committed_dir, \
PatchFile, parse_rpm_filename, \
package_dir, repo_dir, SW_VERSION, root_package_dir
from cgcs_patch.exceptions import MetadataFail, RpmFail, PatchFail, PatchValidationFailure, PatchMismatchFailure
from cgcs_patch.exceptions import MetadataFail, RpmFail, PatchError, PatchFail, PatchValidationFailure, PatchMismatchFailure
from cgcs_patch.patch_functions import LOG
from cgcs_patch.patch_functions import audit_log_info
from cgcs_patch.patch_functions import patch_dir, repo_root_dir
@ -1733,7 +1733,6 @@ class PatchController(PatchService):
raise PatchFail(msg)
release = None
all = False
patch_added = False
failure = False
recursive = True
@ -1945,7 +1944,7 @@ class PatchController(PatchService):
return rc
def patch_host_install(self, host_ip, force, async=False):
def patch_host_install(self, host_ip, force, async_req=False):
msg_info = ""
msg_warning = ""
msg_error = ""
@ -1960,7 +1959,7 @@ class PatchController(PatchService):
if ip not in self.hosts:
# Translated successfully, but IP isn't in the table.
# Raise an exception to drop out to the failure handling
raise
raise PatchError("Host IP (%s) not in table" % ip)
except:
self.hosts_lock.release()
msg = "Unknown host specified: %s" % host_ip
@ -1968,7 +1967,7 @@ class PatchController(PatchService):
LOG.error("Error in host-install: " + msg)
return dict(info=msg_info, warning=msg_warning, error=msg_error)
msg = "Running host-install for %s (%s), force=%s, async=%s" % (host_ip, ip, force, async)
msg = "Running host-install for %s (%s), force=%s, async_req=%s" % (host_ip, ip, force, async_req)
LOG.info(msg)
audit_log_info(msg)
@ -1989,11 +1988,11 @@ class PatchController(PatchService):
installreq.send(self.sock_out)
self.socket_lock.release()
if async:
# async install requested, so return now
if async_req:
# async_req install requested, so return now
msg = "Patch installation request sent to %s." % self.hosts[ip].hostname
msg_info += msg + "\n"
LOG.info("host-install async: " + msg)
LOG.info("host-install async_req: " + msg)
return dict(info=msg_info, warning=msg_warning, error=msg_error)
# Now we wait, up to ten mins... TODO: Wait on a condition
@ -2063,7 +2062,7 @@ class PatchController(PatchService):
if ip not in self.hosts:
# Translated successfully, but IP isn't in the table.
# Raise an exception to drop out to the failure handling
raise
raise PatchError("Host IP (%s) not in table" % ip)
except:
self.hosts_lock.release()
msg = "Unknown host specified: %s" % host_ip
@ -2132,7 +2131,7 @@ def get_handler_cls():
handler = MyServerHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.request_handler = self # pylint: disable=attribute-defined-outside-init
handler.run(self.server.get_app())
return MyHandler

@ -147,7 +147,7 @@ def parse_rpm_filename(filename):
# RPM name format is:
# [<epoch>:]<pkgname>-<version>-<release>.<arch>
#
pattern = re.compile('((([^:]):)?)(.*)-([^-]+)-(.*)\.([^\.]*)$')
pattern = re.compile(r'((([^:]):)?)(.*)-([^-]+)-(.*)\.([^\.]*)$')
m = pattern.match(basename)
@ -424,25 +424,26 @@ class PatchData:
tree = ElementTree.parse(filename)
root = tree.getroot()
"""
<patch>
<id>PATCH_0001</id>
<summary>Brief description</summary>
<description>Longer description</description>
<install_instructions/>
<warnings/>
<status>Dev</status>
<unremovable/>
<reboot_required/>
<personality type="compute">
<package>pkgA</package>
<package>pkgB</package>
</personality>
<personality type="controller">
<package>pkgB</package>
</personality>
</patch>
"""
#
# <patch>
# <id>PATCH_0001</id>
# <summary>Brief description</summary>
# <description>Longer description</description>
# <install_instructions/>
# <warnings/>
# <status>Dev</status>
# <unremovable/>
# <reboot_required/>
# <personality type="compute">
# <package>pkgA</package>
# <package>pkgB</package>
# </personality>
# <personality type="controller">
# <package>pkgB</package>
# </personality>
# </patch>
#
patch_id = root.findtext("id")
if patch_id is None:
LOG.error("Patch metadata contains no id tag")
@ -554,17 +555,17 @@ class PatchData:
self.load_all_metadata(avail_dir, repostate=constants.AVAILABLE)
self.load_all_metadata(committed_dir, repostate=constants.COMMITTED)
def gen_release_groups_xml(self, sw_version, dir=None):
def gen_release_groups_xml(self, sw_version, output_dir=None):
"""
Generate the groups configuration file for the patching repo
"""
if dir is None:
dir = repo_dir[sw_version]
if output_dir is None:
output_dir = repo_dir[sw_version]
if not os.path.exists(dir):
os.makedirs(dir)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
fname = "%s/comps.xml" % dir
fname = "%s/comps.xml" % output_dir
top = ElementTree.Element('comps')
if sw_version in self.groups:
for groupname in sorted(self.groups[sw_version].keys()):
@ -993,7 +994,8 @@ class PatchFile:
# Change back to original working dir
os.chdir(orig_wd)
shutil.rmtree(tmpdir)
return r
return r
@staticmethod
def modify_patch(patch,
@ -1039,7 +1041,8 @@ class PatchFile:
# Change back to original working dir
os.chdir(orig_wd)
shutil.rmtree(tmpdir)
return rc
return rc
@staticmethod
def extract_patch(patch,

@ -8,9 +8,9 @@ SPDX-License-Identifier: Apache-2.0
import os
from Crypto.Signature import PKCS1_PSS
from Crypto.Hash import SHA256
from Crypto.PublicKey import RSA
from Crypto.Util.asn1 import DerSequence
from binascii import a2b_base64
from Crypto.PublicKey import RSA # pylint: disable=unused-import
from Crypto.Util.asn1 import DerSequence # pylint: disable=unused-import
from binascii import a2b_base64 # pylint: disable=unused-import
from cgcs_patch.patch_verify import read_RSA_key, cert_type_formal_str, cert_type_dev_str
# To save memory, read and hash 1M of files at a time

@ -59,7 +59,6 @@ def verify_hash(data_hash, signature_bytes, certificate_list):
verified = verifier.verify(data_hash, signature_bytes)
except ValueError as e:
verified = False
pass
if not verified:
verifier = PKCS1_v1_5.new(pub_key)
@ -67,11 +66,11 @@ def verify_hash(data_hash, signature_bytes, certificate_list):
verified = verifier.verify(data_hash, signature_bytes)
except ValueError as e:
verified = False
pass
return verified
def get_public_certificates_by_type(cert_type=cert_type_all):
def get_public_certificates_by_type(cert_type=None):
"""
Builds a list of accepted certificates which can be used to validate
further things. This list may contain multiple certificates depending on
@ -83,6 +82,9 @@ def get_public_certificates_by_type(cert_type=cert_type_all):
:return: A list of certificates in PEM format
"""
if cert_type is None:
cert_type = cert_type_all
cert_list = []
if cert_type_formal_str in cert_type:

@ -12,9 +12,8 @@ import logging
import time
import requests
import json
import os
from daemon import runner
from daemon import runner # pylint: disable=no-name-in-module
from fm_api import fm_api
from fm_api import constants as fm_constants

@ -0,0 +1,218 @@
[MASTER]
# Specify a configuration file.
rcfile=pylint.rc
# Python code to execute, usually for sys.path manipulation such as pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not paths.
ignore=unit_test.py
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
[MESSAGES CONTROL]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once).
# W0603 Using the global statement warning
# W0612 Unused variable warning
# W0613 Unused argument warning
# W0702 bare-except
# W0703 broad except warning
# W1201 logging-not-lazy
disable=C, R, W0603, W0612, W0613, W0702, W0703, W1201
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=no
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=85
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 tab).
indent-string=' '
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set).
ignored-classes=rpm
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E0201 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent
[BASIC]
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,apply,input
# Regular expression which should only match correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression which should only match correct module level names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression which should only match correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Regular expression which should only match correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct instance attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match correct list comprehension /
# generator expression variable names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Regular expression which should only match functions or classes name which do
# not require a docstring
no-docstring-rgx=__.*__
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the beginning of the name of dummy variables
# (i.e. not used).
dummy-variables-rgx=_|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branchs=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

@ -1,5 +1,15 @@
#
# Copyright (c) 2018 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
# Tox (http://tox.testrun.org/) is a tool for running tests
# in multiple virtualenvs. This configuration file will run the
# test suite on all supported python versions. To use it, "pip install tox"
# and then run "tox" from this directory.
[tox]
envlist = linters,pep8
envlist = linters,pep8,py27,py35,pylint
minversion = 2.3
skipsdist = True
@ -11,9 +21,18 @@ setenv = VIRTUAL_ENV={envdir}
OS_STDERR_CAPTURE=1
OS_TEST_TIMEOUT=60
PYTHONDONTWRITEBYTECODE=True
cgcs_patch_dir = {toxinidir}/cgcs-patch/cgcs-patch
cgcs_patch_src_dir = {[testenv]cgcs_patch_dir}/cgcs_patch
patch_alarm_dir = {toxinidir}/patch-alarm/patch-alarm
patch_alarm_src_dir = {[testenv]patch_alarm_dir}/patch_alarm
tsconfig_dir = {toxinidir}/tsconfig/tsconfig
tsconfig_src_dir = {[testenv]tsconfig_dir}/tsconfig
commands = find {toxinidir} -type f -not -path '{toxinidir}/.tox/*' -not -path '*/__pycache__/*' -name '*.py[c|o]' -delete
deps = -r{toxinidir}/requirements.txt
-r{toxinidir}/test-requirements.txt
{[testenv]cgcs_patch_dir}
{[testenv]patch_alarm_dir}
{[testenv]tsconfig_dir}
[testenv:linters]
whitelist_externals = bash
@ -66,3 +85,24 @@ whitelist_externals = cp
recreate = True
commands = {[testenv]commands}
[testenv:pylint]
deps = {[testenv]deps}
pylint
-e{toxinidir}/../stx-config/sysinv/sysinv/sysinv
-e{toxinidir}/../stx-fault/fm-api
pyCrypto
daemon
keystoneauth1
keystonemiddleware
netaddr
oslo_config
pecan
requests
requests_toolbelt
rpm
basepython = python2.7
commands = pylint {[testenv]cgcs_patch_src_dir} \
{[testenv]patch_alarm_src_dir} \
{[testenv]tsconfig_src_dir} \
--rcfile=./pylint.rc

@ -13,8 +13,6 @@ PLATFORM_SIMPLEX_FLAG=${PLATFORM_CONF_PATH}/simplex
VOLATILE_PATH=/var/run
PLATFORM_PATH=/opt/platform
CONFIG_PATH=${PLATFORM_PATH}/config/${SW_VERSION}
# TODO(mpeters) remove the PACKSTACK_PATH
PACKSTACK_PATH=${PLATFORM_PATH}/packstack/${SW_VERSION}
PUPPET_PATH=${PLATFORM_PATH}/puppet/${SW_VERSION}
CGCS_PATH=/opt/cgcs

@ -154,17 +154,15 @@ def _load():
_load()
''' Keep the following path and flag declarations in sync with the tsconfig
bash script.
'''
# Keep the following path and flag declarations in sync with the tsconfig
# bash script.
#
# Platform configuration paths and files
VOLATILE_PATH = "/var/run"
PLATFORM_PATH = "/opt/platform"
CONFIG_PATH = PLATFORM_PATH + "/config/" + SW_VERSION + "/"
# TODO(mpeters) remove the PACKSTACK_PATH
PACKSTACK_PATH = PLATFORM_PATH + "/packstack/" + SW_VERSION + "/"
PUPPET_PATH = PLATFORM_PATH + "/puppet/" + SW_VERSION + "/"
CGCS_PATH = "/opt/cgcs"
KEYRING_PATH = PLATFORM_PATH + "/.keyring/" + SW_VERSION

Loading…
Cancel
Save