Update controllerconfig tox environment for debian
The flake8 version was very old and has been updated The py27 and py36 targets are removed The pylint file has been cleaned up for python3 Zuul nodesets for this component are now debian Minor fixes to cleanup simple flake8 and pylint warnings. The use of 'file' and 'stat' as variables, which mask the builtin or top-level imports have been cleaned up. Main change was to replace yaml.load with yaml.safe_load This is because yaml.safe_load exists in older and newer versions of pyyaml, whereas yaml.load signature changes between versions, and not all Loader types exist in both. Test Plan: PASS: tox Story: 2010642 Task: 48156 Signed-off-by: Al Bailey <al.bailey@windriver.com> Change-Id: Ibdac877f40dc32140121724f19751aff6052e9db
This commit is contained in:
parent
04f7c0d36d
commit
f7ccd93a79
@ -116,7 +116,7 @@
|
|||||||
name: controllerconfig-tox-flake8
|
name: controllerconfig-tox-flake8
|
||||||
parent: tox
|
parent: tox
|
||||||
description: Run flake8 tests for controllerconfig
|
description: Run flake8 tests for controllerconfig
|
||||||
nodeset: ubuntu-xenial
|
nodeset: debian-bullseye
|
||||||
files:
|
files:
|
||||||
- controllerconfig/*
|
- controllerconfig/*
|
||||||
vars:
|
vars:
|
||||||
@ -127,7 +127,7 @@
|
|||||||
name: controllerconfig-tox-pylint
|
name: controllerconfig-tox-pylint
|
||||||
parent: tox
|
parent: tox
|
||||||
description: Run pylint tests for controllerconfig
|
description: Run pylint tests for controllerconfig
|
||||||
nodeset: ubuntu-xenial
|
nodeset: debian-bullseye
|
||||||
required-projects:
|
required-projects:
|
||||||
- starlingx/fault
|
- starlingx/fault
|
||||||
files:
|
files:
|
||||||
|
@ -79,7 +79,7 @@ def configure_management():
|
|||||||
try:
|
try:
|
||||||
lldp_interfaces = json.loads(
|
lldp_interfaces = json.loads(
|
||||||
lldpcli_show_output)['lldp'][0]['interface']
|
lldpcli_show_output)['lldp'][0]['interface']
|
||||||
except Exception as e:
|
except Exception:
|
||||||
lldp_interfaces = {}
|
lldp_interfaces = {}
|
||||||
print("DONE")
|
print("DONE")
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ class OpenStack(object):
|
|||||||
try:
|
try:
|
||||||
with open(openstack_config_file_path) as f:
|
with open(openstack_config_file_path) as f:
|
||||||
openstack_config_data = \
|
openstack_config_data = \
|
||||||
yaml.load(f)['clouds']['openstack_helm']
|
yaml.safe_load(f)['clouds']['openstack_helm']
|
||||||
self.conf['admin_user'] = \
|
self.conf['admin_user'] = \
|
||||||
openstack_config_data['auth']['username']
|
openstack_config_data['auth']['username']
|
||||||
self.conf['admin_pwd'] = \
|
self.conf['admin_pwd'] = \
|
||||||
@ -459,7 +459,7 @@ def tidy_storage(result_file):
|
|||||||
c_client.volumes.reset_state(
|
c_client.volumes.reset_state(
|
||||||
c_utils.find_volume(c_client, vol), state='error')
|
c_utils.find_volume(c_client, vol), state='error')
|
||||||
print("Setting state to error for volume %s \n" % vol)
|
print("Setting state to error for volume %s \n" % vol)
|
||||||
except Exception as e:
|
except Exception:
|
||||||
LOG.error("Failed to update volume to error state for %s"
|
LOG.error("Failed to update volume to error state for %s"
|
||||||
% vol)
|
% vol)
|
||||||
raise TidyStorageFail("Failed to update volume to error state")
|
raise TidyStorageFail("Failed to update volume to error state")
|
||||||
@ -620,6 +620,5 @@ def main():
|
|||||||
open(result_file, 'w')
|
open(result_file, 'w')
|
||||||
except IOError:
|
except IOError:
|
||||||
raise TidyStorageFail("Failed to open file: %s" % result_file)
|
raise TidyStorageFail("Failed to open file: %s" % result_file)
|
||||||
exit(1)
|
|
||||||
|
|
||||||
tidy_storage(result_file)
|
tidy_storage(result_file)
|
||||||
|
@ -107,8 +107,8 @@ def get_db_credentials(shared_services, from_release, role=None):
|
|||||||
hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
||||||
"hieradata")
|
"hieradata")
|
||||||
static_file = os.path.join(hiera_path, "static.yaml")
|
static_file = os.path.join(hiera_path, "static.yaml")
|
||||||
with open(static_file, 'r') as file:
|
with open(static_file, 'r') as s_file:
|
||||||
static_config = yaml.load(file)
|
static_config = yaml.safe_load(s_file)
|
||||||
|
|
||||||
db_credentials = dict()
|
db_credentials = dict()
|
||||||
for database, values in db_credential_keys.items():
|
for database, values in db_credential_keys.items():
|
||||||
@ -411,8 +411,8 @@ def migrate_sysinv_data(from_release, to_release):
|
|||||||
hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
||||||
"hieradata")
|
"hieradata")
|
||||||
static_file = os.path.join(hiera_path, "static.yaml")
|
static_file = os.path.join(hiera_path, "static.yaml")
|
||||||
with open(static_file, 'r') as file:
|
with open(static_file, 'r') as s_file:
|
||||||
static_config = yaml.load(file)
|
static_config = yaml.safe_load(s_file)
|
||||||
|
|
||||||
username = static_config["sysinv::db::postgresql::user"]
|
username = static_config["sysinv::db::postgresql::user"]
|
||||||
password = utils.get_password_from_keyring("sysinv", "database")
|
password = utils.get_password_from_keyring("sysinv", "database")
|
||||||
@ -448,8 +448,8 @@ def prepare_postgres_filesystems():
|
|||||||
|
|
||||||
# Create a temporary filesystem for the dumped database
|
# Create a temporary filesystem for the dumped database
|
||||||
from_dir = os.path.join(POSTGRES_MOUNT_PATH, "upgrade")
|
from_dir = os.path.join(POSTGRES_MOUNT_PATH, "upgrade")
|
||||||
stat = os.statvfs(from_dir)
|
statvfs = os.statvfs(from_dir)
|
||||||
db_dump_filesystem_size = str(stat.f_frsize * stat.f_blocks) + "B"
|
db_dump_filesystem_size = str(statvfs.f_frsize * statvfs.f_blocks) + "B"
|
||||||
|
|
||||||
# Move the dumped files to a temporary filesystem.
|
# Move the dumped files to a temporary filesystem.
|
||||||
os.mkdir(POSTGRES_DUMP_MOUNT_PATH)
|
os.mkdir(POSTGRES_DUMP_MOUNT_PATH)
|
||||||
@ -459,8 +459,8 @@ def prepare_postgres_filesystems():
|
|||||||
shutil.move(from_dir, POSTGRES_DUMP_MOUNT_PATH)
|
shutil.move(from_dir, POSTGRES_DUMP_MOUNT_PATH)
|
||||||
|
|
||||||
# Create a temporary filesystem for the migrated database
|
# Create a temporary filesystem for the migrated database
|
||||||
stat = os.statvfs(POSTGRES_MOUNT_PATH)
|
statvfs = os.statvfs(POSTGRES_MOUNT_PATH)
|
||||||
db_filesystem_size = str(stat.f_frsize * stat.f_blocks) + "B"
|
db_filesystem_size = str(statvfs.f_frsize * statvfs.f_blocks) + "B"
|
||||||
if not os.path.isdir(utils.POSTGRES_PATH):
|
if not os.path.isdir(utils.POSTGRES_PATH):
|
||||||
os.mkdir(utils.POSTGRES_PATH)
|
os.mkdir(utils.POSTGRES_PATH)
|
||||||
create_temp_filesystem("cgts-vg", "postgres-temp-lv", utils.POSTGRES_PATH,
|
create_temp_filesystem("cgts-vg", "postgres-temp-lv", utils.POSTGRES_PATH,
|
||||||
@ -808,7 +808,7 @@ def migrate_hiera_data(from_release, to_release, role=None):
|
|||||||
# Update the static.yaml file
|
# Update the static.yaml file
|
||||||
static_file = os.path.join(constants.HIERADATA_PERMDIR, "static.yaml")
|
static_file = os.path.join(constants.HIERADATA_PERMDIR, "static.yaml")
|
||||||
with open(static_file, 'r') as yaml_file:
|
with open(static_file, 'r') as yaml_file:
|
||||||
static_config = yaml.load(yaml_file)
|
static_config = yaml.safe_load(yaml_file)
|
||||||
static_config.update({
|
static_config.update({
|
||||||
'platform::params::software_version': SW_VERSION,
|
'platform::params::software_version': SW_VERSION,
|
||||||
'platform::client::credentials::params::keyring_directory':
|
'platform::client::credentials::params::keyring_directory':
|
||||||
@ -823,7 +823,7 @@ def migrate_hiera_data(from_release, to_release, role=None):
|
|||||||
secure_static_file = os.path.join(
|
secure_static_file = os.path.join(
|
||||||
constants.HIERADATA_PERMDIR, "secure_static.yaml")
|
constants.HIERADATA_PERMDIR, "secure_static.yaml")
|
||||||
with open(secure_static_file, 'r') as yaml_file:
|
with open(secure_static_file, 'r') as yaml_file:
|
||||||
secure_static_config = yaml.load(yaml_file)
|
secure_static_config = yaml.safe_load(yaml_file)
|
||||||
|
|
||||||
with open(secure_static_file, 'w') as yaml_file:
|
with open(secure_static_file, 'w') as yaml_file:
|
||||||
yaml.dump(secure_static_config, yaml_file, default_flow_style=False)
|
yaml.dump(secure_static_config, yaml_file, default_flow_style=False)
|
||||||
|
@ -179,7 +179,7 @@ def prepare_upgrade(from_load, to_load, i_system, mgmt_address):
|
|||||||
admin_conf = os.path.join(tsc.PLATFORM_PATH, "config", to_load,
|
admin_conf = os.path.join(tsc.PLATFORM_PATH, "config", to_load,
|
||||||
"kubernetes", utils.KUBERNETES_ADMIN_CONF_FILE)
|
"kubernetes", utils.KUBERNETES_ADMIN_CONF_FILE)
|
||||||
with open(admin_conf, 'r') as yaml_file:
|
with open(admin_conf, 'r') as yaml_file:
|
||||||
config = yaml.load(yaml_file)
|
config = yaml.safe_load(yaml_file)
|
||||||
|
|
||||||
for item, values in config.items():
|
for item, values in config.items():
|
||||||
# update server address in cluster
|
# update server address in cluster
|
||||||
|
@ -216,8 +216,8 @@ def get_upgrade_token(from_release,
|
|||||||
from_hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
from_hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
||||||
"hieradata")
|
"hieradata")
|
||||||
system_file = os.path.join(from_hiera_path, "system.yaml")
|
system_file = os.path.join(from_hiera_path, "system.yaml")
|
||||||
with open(system_file, 'r') as file:
|
with open(system_file, 'r') as s_file:
|
||||||
system_config = yaml.load(file)
|
system_config = yaml.safe_load(s_file)
|
||||||
|
|
||||||
# during a controller-1 upgrade, keystone is running
|
# during a controller-1 upgrade, keystone is running
|
||||||
# on the controller UNIT IP, however the service catalog
|
# on the controller UNIT IP, however the service catalog
|
||||||
@ -297,8 +297,8 @@ def get_upgrade_data(from_release,
|
|||||||
from_hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
from_hiera_path = os.path.join(PLATFORM_PATH, "puppet", from_release,
|
||||||
"hieradata")
|
"hieradata")
|
||||||
system_file = os.path.join(from_hiera_path, "system.yaml")
|
system_file = os.path.join(from_hiera_path, "system.yaml")
|
||||||
with open(system_file, 'r') as file:
|
with open(system_file, 'r') as s_file:
|
||||||
system_config_from_release = yaml.load(file)
|
system_config_from_release = yaml.safe_load(s_file)
|
||||||
|
|
||||||
# Get keystone region
|
# Get keystone region
|
||||||
keystone_region = system_config_from_release.get(
|
keystone_region = system_config_from_release.get(
|
||||||
@ -329,11 +329,11 @@ def add_upgrade_entries_to_hiera_data(from_release):
|
|||||||
|
|
||||||
# Get the hiera data for this release
|
# Get the hiera data for this release
|
||||||
filepath = os.path.join(path, filename)
|
filepath = os.path.join(path, filename)
|
||||||
with open(filepath, 'r') as file:
|
with open(filepath, 'r') as c_file:
|
||||||
config = yaml.load(file)
|
config = yaml.safe_load(c_file)
|
||||||
secure_filepath = os.path.join(path, secure_filename)
|
secure_filepath = os.path.join(path, secure_filename)
|
||||||
with open(secure_filepath, 'r') as file:
|
with open(secure_filepath, 'r') as s_file:
|
||||||
secure_config = yaml.load(file)
|
secure_config = yaml.safe_load(s_file)
|
||||||
|
|
||||||
# File for system.yaml
|
# File for system.yaml
|
||||||
# This is needed for adding new service account and endpoints
|
# This is needed for adding new service account and endpoints
|
||||||
|
@ -264,7 +264,7 @@ def persist_config():
|
|||||||
|
|
||||||
def apply_banner_customization():
|
def apply_banner_customization():
|
||||||
""" Apply and Install banners provided by the user """
|
""" Apply and Install banners provided by the user """
|
||||||
""" execute: /usr/sbin/apply_banner_customization """
|
# execute: /usr/sbin/apply_banner_customization
|
||||||
logfile = "/tmp/apply_banner_customization.log"
|
logfile = "/tmp/apply_banner_customization.log"
|
||||||
try:
|
try:
|
||||||
with open(logfile, "w") as blog:
|
with open(logfile, "w") as blog:
|
||||||
|
@ -17,91 +17,6 @@ load-plugins=
|
|||||||
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
[MESSAGES CONTROL]
|
||||||
# Enable the message, report, category or checker with the given id(s). You can
|
|
||||||
# either give multiple identifier separated by comma (,) or put this option
|
|
||||||
# multiple time.
|
|
||||||
#
|
|
||||||
# Python3 checker:
|
|
||||||
#
|
|
||||||
# E1601: print-statement
|
|
||||||
# E1602: parameter-unpacking
|
|
||||||
# E1603: unpacking-in-except
|
|
||||||
# E1604: old-raise-syntax
|
|
||||||
# E1605: backtick
|
|
||||||
# E1606: long-suffix
|
|
||||||
# E1607: old-ne-operator
|
|
||||||
# E1608: old-octal-literal
|
|
||||||
# E1609: import-star-module-level
|
|
||||||
# E1610: non-ascii-bytes-literal
|
|
||||||
# E1611: invalid-unicode-literal
|
|
||||||
# W1601: apply-builtin
|
|
||||||
# W1602: basestring-builtin
|
|
||||||
# W1603: buffer-builtin
|
|
||||||
# W1604: cmp-builtin
|
|
||||||
# W1605: coerce-builtin
|
|
||||||
# W1606: execfile-builtin
|
|
||||||
# W1607: file-builtin
|
|
||||||
# W1608: long-builtin
|
|
||||||
# W1609: raw_input-builtin
|
|
||||||
# W1610: reduce-builtin
|
|
||||||
# W1611: standarderror-builtin
|
|
||||||
# W1612: unicode-builtin
|
|
||||||
# W1613: xrange-builtin
|
|
||||||
# W1614: coerce-method
|
|
||||||
# W1615: delslice-method
|
|
||||||
# W1616: getslice-method
|
|
||||||
# W1617: setslice-method
|
|
||||||
# W1618: no-absolute-import
|
|
||||||
# W1619: old-division
|
|
||||||
# W1620: dict-iter-method
|
|
||||||
# W1621: dict-view-method
|
|
||||||
# W1622: next-method-called
|
|
||||||
# W1623: metaclass-assignment
|
|
||||||
# W1624: indexing-exception
|
|
||||||
# W1625: raising-string
|
|
||||||
# W1626: reload-builtin
|
|
||||||
# W1627: oct-method
|
|
||||||
# W1628: hex-method
|
|
||||||
# W1629: nonzero-method
|
|
||||||
# W1630: cmp-method
|
|
||||||
# W1632: input-builtin
|
|
||||||
# W1633: round-builtin
|
|
||||||
# W1634: intern-builtin
|
|
||||||
# W1635: unichr-builtin
|
|
||||||
# W1636: map-builtin-not-iterating
|
|
||||||
# W1637: zip-builtin-not-iterating
|
|
||||||
# W1638: range-builtin-not-iterating
|
|
||||||
# W1639: filter-builtin-not-iterating
|
|
||||||
# W1640: using-cmp-argument
|
|
||||||
# W1641: eq-without-hash
|
|
||||||
# W1642: div-method
|
|
||||||
# W1643: idiv-method
|
|
||||||
# W1644: rdiv-method
|
|
||||||
# W1645: exception-message-attribute
|
|
||||||
# W1646: invalid-str-codec
|
|
||||||
# W1647: sys-max-int
|
|
||||||
# W1648: bad-python3-import
|
|
||||||
# W1649: deprecated-string-function
|
|
||||||
# W1650: deprecated-str-translate-call
|
|
||||||
# W1651: deprecated-itertools-function
|
|
||||||
# W1652: deprecated-types-field
|
|
||||||
# W1653: next-method-defined
|
|
||||||
# W1654: dict-items-not-iterating
|
|
||||||
# W1655: dict-keys-not-iterating
|
|
||||||
# W1656: dict-values-not-iterating
|
|
||||||
# W1657: deprecated-operator-function
|
|
||||||
# W1658: deprecated-urllib-function
|
|
||||||
# W1659: xreadlines-attribute
|
|
||||||
# W1660: deprecated-sys-function
|
|
||||||
# W1661: exception-escape
|
|
||||||
# W1662: comprehension-escape
|
|
||||||
enable=E1603,E1609,E1610,E1602,E1606,E1608,E1607,E1605,E1604,E1601,E1611,W1652,
|
|
||||||
W1651,W1649,W1657,W1660,W1658,W1659,W1623,W1622,W1620,W1621,W1645,W1641,
|
|
||||||
W1624,W1648,W1625,W1611,W1662,W1661,W1650,W1640,W1630,W1614,W1615,W1642,
|
|
||||||
W1616,W1628,W1643,W1629,W1627,W1644,W1617,W1601,W1602,W1603,W1604,W1605,
|
|
||||||
W1654,W1655,W1656,W1619,W1606,W1607,W1639,W1618,W1632,W1634,W1608,W1636,
|
|
||||||
W1653,W1646,W1638,W1609,W1610,W1626,W1633,W1647,W1635,W1612,W1613,W1637
|
|
||||||
|
|
||||||
# Disable the message, report, category or checker with the given id(s). You
|
# Disable the message, report, category or checker with the given id(s). You
|
||||||
# can either give multiple identifier separated by comma (,) or put this option
|
# can either give multiple identifier separated by comma (,) or put this option
|
||||||
# multiple time (only on the command line, not in the configuration file where
|
# multiple time (only on the command line, not in the configuration file where
|
||||||
@ -113,17 +28,10 @@ enable=E1603,E1609,E1610,E1602,E1606,E1608,E1607,E1605,E1604,E1601,E1611,W1652,
|
|||||||
# We are not disabling (F)atal, (E)rror
|
# We are not disabling (F)atal, (E)rror
|
||||||
# The following suppressed warnings should be fixed:
|
# The following suppressed warnings should be fixed:
|
||||||
# fixme: (notes, xxx, fixme)
|
# fixme: (notes, xxx, fixme)
|
||||||
# W0101: unreachable-code
|
|
||||||
# W0105: pointless-string-statement
|
|
||||||
# W0311: bad-indentation
|
|
||||||
# W0403: relative-import
|
|
||||||
# W0613: unused-argument
|
# W0613: unused-argument
|
||||||
# W0621: redefined-outer-name
|
|
||||||
# W0622: redefined-builtin
|
# W0622: redefined-builtin
|
||||||
# W0703: broad-except
|
# W0703: broad-except
|
||||||
# W1501: bad-open-mode
|
disable=C, R, fixme, W0613, W0622, W0703
|
||||||
# W1618: no-absolute-import
|
|
||||||
disable=C, R, fixme, W0101, W0105, W0311, W0403, W0613, W0621, W0622, W0703, W1501, W1618
|
|
||||||
|
|
||||||
|
|
||||||
[REPORTS]
|
[REPORTS]
|
||||||
|
@ -1,12 +1,15 @@
|
|||||||
hacking!=0.13.0,<0.14,>=0.12.0 # Apache-2.0
|
# The order of packages is significant, because pip processes them in the order
|
||||||
pytest
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
mockproc>= 0.3.1 # BSD
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
|
hacking>=1.1.0,<=2.0.0 # Apache-2.0
|
||||||
coverage>=3.6
|
coverage>=3.6
|
||||||
PyYAML>=3.10.0 # MIT
|
isort<5
|
||||||
|
mockproc>= 0.3.1 # BSD
|
||||||
os-testr>=0.8.0 # Apache-2.0
|
os-testr>=0.8.0 # Apache-2.0
|
||||||
|
pylint<2.4.0 # GPLv2
|
||||||
|
pytest
|
||||||
|
PyYAML>=3.10.0 # MIT
|
||||||
stestr>=1.0.0 # Apache-2.0
|
stestr>=1.0.0 # Apache-2.0
|
||||||
testresources>=0.2.4 # Apache-2.0/BSD
|
testresources>=0.2.4 # Apache-2.0/BSD
|
||||||
testrepository>=0.0.18 # Apache-2.0/BSD
|
testrepository>=0.0.18 # Apache-2.0/BSD
|
||||||
isort<5;python_version>="3.0"
|
|
||||||
pylint<2.1.0;python_version<"3.0" # GPLv2
|
|
||||||
pylint<2.4.0;python_version>="3.0" # GPLv2
|
|
||||||
|
@ -4,17 +4,16 @@
|
|||||||
# and then run "tox" from this directory.
|
# and then run "tox" from this directory.
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = flake8, pylint, py27, py36, py39
|
envlist = flake8, pylint, py39
|
||||||
# Tox does not work if the path to the workdir is too long, so move it to /tmp
|
# Tox does not work if the path to the workdir is too long, so move it to /tmp
|
||||||
toxworkdir = /tmp/{env:USER}_cctox
|
toxworkdir = /tmp/{env:USER}_cctox
|
||||||
stxdir = {toxinidir}/../../..
|
stxdir = {toxinidir}/../../..
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
allowlist_externals = find
|
allowlist_externals = find
|
||||||
install_command = pip install \
|
basepython = python3
|
||||||
--no-cache-dir \
|
commands =
|
||||||
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/stable/stein/upper-constraints.txt} \
|
find . -type f -name "*.pyc" -delete
|
||||||
{opts} {packages}
|
|
||||||
|
|
||||||
deps = -r{toxinidir}/requirements.txt
|
deps = -r{toxinidir}/requirements.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
@ -22,52 +21,28 @@ deps = -r{toxinidir}/requirements.txt
|
|||||||
-e{[tox]stxdir}/config/tsconfig/tsconfig
|
-e{[tox]stxdir}/config/tsconfig/tsconfig
|
||||||
-e{[tox]stxdir}/config/sysinv/sysinv/sysinv
|
-e{[tox]stxdir}/config/sysinv/sysinv/sysinv
|
||||||
|
|
||||||
commands =
|
install_command = pip install \
|
||||||
find . -type f -name "*.pyc" -delete
|
--no-cache-dir \
|
||||||
|
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \
|
||||||
|
{opts} {packages}
|
||||||
|
|
||||||
[testenv:venv]
|
[testenv:venv]
|
||||||
commands = {posargs}
|
commands = {posargs}
|
||||||
|
|
||||||
[testenv:py27]
|
[testenv:flake8]
|
||||||
basepython = python2.7
|
deps = -r{toxinidir}/test-requirements.txt
|
||||||
deps = {[testenv]deps}
|
commands = flake8 {posargs}
|
||||||
passenv = CURL_CA_BUNDLE
|
|
||||||
commands =
|
|
||||||
{[testenv]commands}
|
|
||||||
stestr run {posargs}
|
|
||||||
stestr slowest
|
|
||||||
|
|
||||||
[testenv:py36]
|
[testenv:py39]
|
||||||
basepython = python3.6
|
basepython = python3.9
|
||||||
deps = {[testenv]deps}
|
|
||||||
commands =
|
commands =
|
||||||
{[testenv]commands}
|
{[testenv]commands}
|
||||||
stestr run {posargs}
|
stestr run {posargs}
|
||||||
stestr slowest
|
stestr slowest
|
||||||
|
|
||||||
[testenv:pylint]
|
[testenv:pylint]
|
||||||
basepython = python3
|
|
||||||
deps = {[testenv]deps}
|
|
||||||
pylint
|
|
||||||
|
|
||||||
commands = pylint {posargs} controllerconfig --rcfile=./pylint.rc --extension-pkg-whitelist=netifaces
|
commands = pylint {posargs} controllerconfig --rcfile=./pylint.rc --extension-pkg-whitelist=netifaces
|
||||||
|
|
||||||
[testenv:flake8]
|
|
||||||
basepython = python3
|
|
||||||
deps = -r{toxinidir}/test-requirements.txt
|
|
||||||
commands = flake8 {posargs}
|
|
||||||
|
|
||||||
[testenv:py39]
|
|
||||||
basepython = python3.9
|
|
||||||
install_command = pip install \
|
|
||||||
--no-cache-dir \
|
|
||||||
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \
|
|
||||||
{opts} {packages}
|
|
||||||
commands =
|
|
||||||
{[testenv]commands}
|
|
||||||
stestr run {posargs}
|
|
||||||
stestr slowest
|
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
# H series are hacking
|
# H series are hacking
|
||||||
# H101: Use TODO(NAME)
|
# H101: Use TODO(NAME)
|
||||||
@ -77,16 +52,17 @@ commands =
|
|||||||
# H401: docstring should not start with a space
|
# H401: docstring should not start with a space
|
||||||
# H404: multi line docstring should start without a leading new line
|
# H404: multi line docstring should start without a leading new line
|
||||||
# H405: multi line docstring summary not separated with an empty line
|
# H405: multi line docstring summary not separated with an empty line
|
||||||
ignore = H101,H102,H104,H306,H401,H404,H405
|
#
|
||||||
|
# W504 line break after binary operator
|
||||||
|
# W605 invalid escape sequence
|
||||||
|
ignore = H101,H102,H104,H306,H401,H404,H405,
|
||||||
|
W504,W605
|
||||||
exclude = build
|
exclude = build
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
basepython = python3
|
basepython = python3.9
|
||||||
deps = {[testenv]deps}
|
|
||||||
|
|
||||||
commands =
|
commands =
|
||||||
coverage erase
|
coverage erase
|
||||||
python setup.py testr --coverage --testr-args='{posargs}'
|
python setup.py testr --coverage --testr-args='{posargs}'
|
||||||
coverage xml
|
coverage xml
|
||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
|
@ -38,8 +38,8 @@ def main():
|
|||||||
)
|
)
|
||||||
res = 0
|
res = 0
|
||||||
if action == "migrate" and (
|
if action == "migrate" and (
|
||||||
from_release == "21.12"
|
from_release == "21.12" and
|
||||||
and to_release == "22.12"
|
to_release == "22.12"
|
||||||
):
|
):
|
||||||
if not is_multipath():
|
if not is_multipath():
|
||||||
LOG.info("Multipath not detected, nothing to do")
|
LOG.info("Multipath not detected, nothing to do")
|
||||||
@ -230,5 +230,6 @@ def transform_part_device_path(path):
|
|||||||
return "{}wwn-0x{}-{}".format(result[1], result[3], result[2])
|
return "{}wwn-0x{}-{}".format(result[1], result[3], result[2])
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
@ -45,7 +45,7 @@ def add_pod_security_admission_controller_labels():
|
|||||||
|
|
||||||
namespaces_output = subprocess.check_output(cmd).decode("utf-8")
|
namespaces_output = subprocess.check_output(cmd).decode("utf-8")
|
||||||
|
|
||||||
except Exception as exc:
|
except Exception:
|
||||||
LOG.error('Command failed:\n %s' % (cmd))
|
LOG.error('Command failed:\n %s' % (cmd))
|
||||||
raise Exception('Cannot get namespaces for pod security labels')
|
raise Exception('Cannot get namespaces for pod security labels')
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user