Update pre-commit hooks configuration

Update pre-commit hooks configuration:
- use pre-commit 2.16.0
- use python pylint 2.12.2

Change-Id: Ic91148ba5cec708341b02fead011947f01b375b5
This commit is contained in:
Federico Ressi 2021-12-21 10:10:42 +01:00
parent 45729ca7ec
commit e5008111ad
8 changed files with 14 additions and 14 deletions

View File

@ -51,13 +51,12 @@ repos:
- repo: local - repo: local
hooks: hooks:
- id: pylint - id: pylint
files: ^tobiko/
name: pylint name: pylint
entry: .tox/pep8/bin/pylint --max-line-length=80 -E -e W,E -d unused-import,broad-except,fixme entry: pylint --max-line-length=88 -E -e W,E -d unused-import,broad-except,fixme,unspecified-encoding,redundant-u-string-prefix
files: ^tobiko/
language: system language: system
types: [python] types: [python]
# - repo: https://github.com/adrienverge/yamllint.git # - repo: https://github.com/adrienverge/yamllint.git
# rev: v1.23.0 # rev: v1.23.0
# hooks: # hooks:

View File

@ -1,4 +1,4 @@
# pep8 and flake8 requirements # pep8 and flake8 requirements
pre-commit # MIT pre-commit >= 2.16.0 # MIT
pylint >= 2.8.3, < 2.9.0 # GPLv2 pylint===2.12.2 # GPL2

View File

@ -55,7 +55,6 @@ class TobikoException(Exception):
message = "unknown reason" message = "unknown reason"
def __init__(self, message=None, **properties): def __init__(self, message=None, **properties):
# pylint: disable=exception-message-attribute
message = message or self.message message = message or self.message
if properties: if properties:
message = message.format(**properties) message = message.format(**properties)

View File

@ -227,7 +227,6 @@ def wait_for_systemd_units_state(
if bad_units: if bad_units:
raise UnexpectedSystemctlUnitState(matcher=match_unit, raise UnexpectedSystemctlUnitState(matcher=match_unit,
units=bad_units) units=bad_units)
# pylint: disable=dict-values-not-iterating
return tobiko.Selection(all_units.values()) return tobiko.Selection(all_units.values())

View File

@ -228,7 +228,6 @@ class BaseAgentTest(testtools.TestCase):
self.assertGreater(min_pids_per_host, 0) self.assertGreater(min_pids_per_host, 0)
self.assertGreater(max_pids_per_host, 0) self.assertGreater(max_pids_per_host, 0)
self.assertGreaterEqual(max_pids_per_host, min_pids_per_host) self.assertGreaterEqual(max_pids_per_host, min_pids_per_host)
# pylint: disable=range-builtin-not-iterating
pids_count_range = range(min_pids_per_host, max_pids_per_host + 1) pids_count_range = range(min_pids_per_host, max_pids_per_host + 1)
pids_per_host = {} pids_per_host = {}
for host in hosts: for host in hosts:

View File

@ -2,6 +2,7 @@ from __future__ import absolute_import
import io import io
import time import time
import typing
from oslo_log import log from oslo_log import log
import pandas import pandas
@ -26,7 +27,7 @@ def get_random_controller_ssh_client():
return controller_node.ssh_client return controller_node.ssh_client
def get_pcs_resources_table(timeout=720, interval=2): def get_pcs_resources_table(timeout=720, interval=2) -> pandas.DataFrame:
""" """
get pcs status from a controller and parse it get pcs status from a controller and parse it
to have it's resources states in check to have it's resources states in check
@ -39,7 +40,7 @@ def get_pcs_resources_table(timeout=720, interval=2):
:return: dataframe of pcs resources stats table :return: dataframe of pcs resources stats table
""" """
failures = [] failures: typing.List[str] = []
start = time.time() start = time.time()
ssh_client = get_random_controller_ssh_client() ssh_client = get_random_controller_ssh_client()
@ -54,7 +55,8 @@ def get_pcs_resources_table(timeout=720, interval=2):
# remove the first column when it only includes '*' characters # remove the first column when it only includes '*' characters
output = output.replace('*', '').strip() output = output.replace('*', '').strip()
stream = io.StringIO(output) stream = io.StringIO(output)
table = pandas.read_csv(stream, delim_whitespace=True, header=None) table: pandas.DataFrame = pandas.read_csv(
stream, delim_whitespace=True, header=None)
table.columns = ['resource', 'resource_type', 'resource_state', table.columns = ['resource', 'resource_type', 'resource_state',
'overcloud_node'] 'overcloud_node']
except ValueError: except ValueError:
@ -233,6 +235,7 @@ def get_overcloud_nodes_running_pcs_resource(resource=None,
resource/type/state: exact str of a resource name as seen in pcs status resource/type/state: exact str of a resource name as seen in pcs status
:return: list of overcloud nodes :return: list of overcloud nodes
""" """
# pylint: disable=no-member
pcs_df = get_pcs_resources_table() pcs_df = get_pcs_resources_table()
if resource: if resource:
pcs_df_query_resource = pcs_df.query('resource=="{}"'.format( pcs_df_query_resource = pcs_df.query('resource=="{}"'.format(
@ -279,6 +282,7 @@ def get_overcloud_resource(resource_type=None,
'resource'].unique().tolist() 'resource'].unique().tolist()
if resource_type and not resource_state: if resource_type and not resource_state:
# pylint: disable=no-member
pcs_df_query_resource_type = pcs_df.query( pcs_df_query_resource_type = pcs_df.query(
'resource_type=="{}"'.format(resource_type)) 'resource_type=="{}"'.format(resource_type))
return pcs_df_query_resource_type['resource'].unique().tolist() return pcs_df_query_resource_type['resource'].unique().tolist()

View File

@ -54,10 +54,12 @@ root | 11| 2| 0.0| 0|00:00:05|migration/0 |[migration/0]
"ps|sed 's/\"/''/g'", "ps|sed 's/\"/''/g'",
ssh_client=ssh_client).stdout ssh_client=ssh_client).stdout
stream = io.StringIO(output) stream = io.StringIO(output)
table = pandas.read_csv(stream, sep='DELIM', header=None, skiprows=1) table: pandas.DataFrame = pandas.read_csv(
stream, sep='DELIM', header=None, skiprows=1)
table.replace(to_replace=' ', value="", regex=True, inplace=True) table.replace(to_replace=' ', value="", regex=True, inplace=True)
table.columns = ['USER', 'PID', 'PPID', 'CPU', 'VSZ', 'TIME', 'PROCESS', table.columns = ['USER', 'PID', 'PPID', 'CPU', 'VSZ', 'TIME', 'PROCESS',
'PROCESS_ARGS'] 'PROCESS_ARGS']
# pylint: disable=unsupported-assignment-operation
table['overcloud_node'] = hostname table['overcloud_node'] = hostname
LOG.debug("Successfully got overcloud nodes processes status table") LOG.debug("Successfully got overcloud nodes processes status table")

View File

@ -77,8 +77,6 @@ commands =
pre-commit run -a flake8 pre-commit run -a flake8
setenv = setenv =
{[testenv]setenv} {[testenv]setenv}
#fressi: There is a conflict between the last global constrains file and pylint
# requirements
TOX_CONSTRAINTS = -c{toxinidir}/upper-constraints.txt TOX_CONSTRAINTS = -c{toxinidir}/upper-constraints.txt
[testenv:mypy] [testenv:mypy]