lint: enable black

Assures consistent formatting of our python codebase without having
to rely on humans to do it, or to debate during reviews.

Change-Id: I1e62cc755fa60e453dea865f436241ecae330771
This commit is contained in:
Sorin Sbarnea 2020-01-23 12:49:42 +00:00
parent 885459e9ab
commit f78da2885a
15 changed files with 1460 additions and 1139 deletions

View File

@ -1,5 +1,17 @@
---
repos:
- repo: https://github.com/python/black.git
rev: 19.10b0
hooks:
- id: black
language_version: python3
- repo: https://gitlab.com/pycqa/flake8.git
rev: 3.7.9
hooks:
- id: flake8
additional_dependencies:
- flake8-black>=0.1.1
language_version: python3
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.4.0
hooks:

2
pyproject.toml Normal file
View File

@ -0,0 +1,2 @@
[tool.black]
skip-string-normalization = true

View File

@ -53,7 +53,8 @@ def get_last_jobs(change):
sanitized_content = "\n".join(response.content.split("\n")[1:])
detail = json.loads(sanitized_content)
zuul_messages = [
message for message in detail['messages']
message
for message in detail['messages']
if message['author']['username'] == GERRIT_USER_NAME
and "({} pipeline)".format(ZUUL_PIPELINE) in message['message']
]
@ -62,8 +63,9 @@ def get_last_jobs(change):
patchset = "Patch Set {}".format(patchset)
filtered = [m for m in zuul_messages if patchset in m['message']]
if len(filtered) == 0:
raise RuntimeError("{} not found for review {}".format(
patchset, change))
raise RuntimeError(
"{} not found for review {}".format(patchset, change)
)
last_message = filtered[0]
else:
last_message = zuul_messages[-1]
@ -97,18 +99,15 @@ def is_equal(lho_jobs, rho_jobs, file_path):
rho_files = download(rho_jobs, file_path)
print(">>>>>>> Comparing {}".format(file_path))
if lho_files != rho_files:
diffkeys = [
k for k in lho_files
if lho_files[k] != rho_files.get(k, None)
]
diffkeys = [k for k in lho_files if lho_files[k] != rho_files.get(k, None)]
print("{} are different at the following jobs:".format(file_path))
for key in diffkeys:
print(Fore.BLUE + key)
print(Fore.BLUE + lho + ": " + lho_jobs[key])
print(Fore.BLUE + rho + ": " + rho_jobs[key])
for line in difflib.unified_diff(
lho_files[key].splitlines(),
rho_files.get(key, '').splitlines()):
lho_files[key].splitlines(), rho_files.get(key, '').splitlines()
):
print(colors.get(line[0], Fore.BLACK) + line)
return False
print("{} files are the same".format(file_path))
@ -117,22 +116,24 @@ def is_equal(lho_jobs, rho_jobs, file_path):
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Compares files at logs.o.o from two reviews')
description='Compares files at logs.o.o from two reviews'
)
parser.add_argument(
'reviews',
metavar='review',
nargs=2,
help='left-side and right-side review numbers to compare it can'
'include the specific patchset, examples:610491 or 610491/1')
'include the specific patchset, examples:610491 or 610491/1',
)
parser.add_argument(
'--files',
type=str,
default='playbook_executions.log,reproducer-quickstart.sh,'
'collect_logs.sh',
default='playbook_executions.log,reproducer-quickstart.sh,' 'collect_logs.sh',
help='Comma separated list of files to compare at logs.o.o '
'(default: %(default)s)')
'(default: %(default)s)',
)
args = parser.parse_args()

View File

@ -39,8 +39,7 @@ import requests
import yaml
# Define releases
RELEASES = ['newton', 'ocata', 'pike', 'queens',
'rocky', 'stein', 'train', 'master']
RELEASES = ['newton', 'ocata', 'pike', 'queens', 'rocky', 'stein', 'train', 'master']
# Define long term releases
LONG_TERM_SUPPORT_RELEASES = ['queens']
UNSUPPORTED_STANDALONE = ['newton', 'ocata', 'pike', 'queens', 'rocky']
@ -63,8 +62,7 @@ def setup_logging(log_file):
'''Setup logging for the script'''
logger = logging.getLogger('emit-releases')
logger.setLevel(logging.DEBUG)
log_handler = logging.handlers.WatchedFileHandler(
os.path.expanduser(log_file))
log_handler = logging.handlers.WatchedFileHandler(os.path.expanduser(log_file))
logger.addHandler(log_handler)
@ -74,15 +72,17 @@ def load_featureset_file(featureset_file):
with open(featureset_file, 'r') as stream:
featureset = yaml.safe_load(stream)
except Exception as e:
logger.error("The featureset file: {} can not be "
"opened.".format(featureset_file))
logger.error(
"The featureset file: {} can not be " "opened.".format(featureset_file)
)
logger.exception(e)
raise e
return featureset
def get_dlrn_hash(release, hash_name, distro_name='centos', distro_version='7',
retries=20, timeout=8):
def get_dlrn_hash(
release, hash_name, distro_name='centos', distro_version='7', retries=20, timeout=8
):
"""Get the dlrn hash for the release and hash name
Retrieves the delorean.repo for the provided release and hash name, e.g.
@ -97,20 +97,22 @@ def get_dlrn_hash(release, hash_name, distro_name='centos', distro_version='7',
rdo_url = os.getenv('NODEPOOL_RDO_PROXY', 'https://trunk.rdoproject.org')
logger.error("distro %s version %s", distro_name, distro_version)
if distro_name == 'centos' and distro_version == '7':
repo_url = ('%s/centos7-%s/%s/delorean.repo' %
(rdo_url, release, hash_name))
repo_url = '%s/centos7-%s/%s/delorean.repo' % (rdo_url, release, hash_name)
elif distro_name == 'centos' and distro_version == '8':
repo_url = ('%s/centos8-%s/%s/delorean.repo.md5' %
(rdo_url, release, hash_name))
logger.debug("distro_name {} distro_version {} repo_url {}"
"".format(distro_name, distro_version, repo_url))
repo_url = '%s/centos8-%s/%s/delorean.repo.md5' % (rdo_url, release, hash_name)
logger.debug(
"distro_name {} distro_version {} repo_url {}"
"".format(distro_name, distro_version, repo_url)
)
for retry_num in range(retries):
repo_file = None
try:
repo_file = requests.get(repo_url, timeout=timeout)
except Exception as e:
logger.warning("Attempt {} of {} to get DLRN hash threw an "
"exception.".format(retry_num + 1, retries))
logger.warning(
"Attempt {} of {} to get DLRN hash threw an "
"exception.".format(retry_num + 1, retries)
)
logger.exception(e)
continue
if repo_file is not None and repo_file.ok:
@ -122,27 +124,37 @@ def get_dlrn_hash(release, hash_name, distro_name='centos', distro_version='7',
break
elif repo_file:
logger.warning("Attempt {} of {} to get DLRN hash returned "
"status code {}.".format(retry_num + 1,
retries,
repo_file.status_code))
logger.warning(
"Attempt {} of {} to get DLRN hash returned "
"status code {}.".format(retry_num + 1, retries, repo_file.status_code)
)
else:
logger.warning("Attempt {} of {} to get DLRN hash failed to "
"get a response.".format(retry_num + 1,
retries))
logger.warning(
"Attempt {} of {} to get DLRN hash failed to "
"get a response.".format(retry_num + 1, retries)
)
if repo_file is None or not repo_file.ok:
raise RuntimeError("Failed to retrieve repo file from {} after "
"{} retries".format(repo_url, retries))
raise RuntimeError(
"Failed to retrieve repo file from {} after "
"{} retries".format(repo_url, retries)
)
logger.info("Got DLRN hash: {} for the named hash: {} on the {} "
"release".format(full_hash, hash_name, release))
logger.info(
"Got DLRN hash: {} for the named hash: {} on the {} "
"release".format(full_hash, hash_name, release)
)
return full_hash
def compose_releases_dictionary(stable_release, featureset, upgrade_from,
is_periodic=False,
distro_name='centos', distro_version='7'):
def compose_releases_dictionary(
stable_release,
featureset,
upgrade_from,
is_periodic=False,
distro_name='centos',
distro_version='7',
):
"""Compose the release dictionary for stable_release and featureset
This contains the main logic determining the contents of the release file.
@ -167,56 +179,65 @@ def compose_releases_dictionary(stable_release, featureset, upgrade_from,
"""
logger = logging.getLogger('emit-releases')
if stable_release not in RELEASES:
raise RuntimeError("The {} release is not supported by this tool"
"Supported releases: {}".format(
stable_release, RELEASES))
raise RuntimeError(
"The {} release is not supported by this tool"
"Supported releases: {}".format(stable_release, RELEASES)
)
if (featureset.get('overcloud_upgrade') or
featureset.get('undercloud_upgrade')) and \
stable_release == RELEASES[0]:
if (
featureset.get('overcloud_upgrade') or featureset.get('undercloud_upgrade')
) and stable_release == RELEASES[0]:
raise RuntimeError("Cannot upgrade to {}".format(RELEASES[0]))
if featureset.get('undercloud_upgrade') and stable_release == 'ocata':
raise RuntimeError("Undercloud upgrades are not supported from "
"newton to ocata")
raise RuntimeError(
"Undercloud upgrades are not supported from " "newton to ocata"
)
if featureset.get('overcloud_upgrade') and \
featureset.get('undercloud_upgrade'):
raise RuntimeError("This tool currently only supports upgrading the "
"undercloud OR the overcloud NOT both.")
if featureset.get('overcloud_upgrade') and featureset.get('undercloud_upgrade'):
raise RuntimeError(
"This tool currently only supports upgrading the "
"undercloud OR the overcloud NOT both."
)
if (featureset.get('overcloud_upgrade') or
featureset.get('ffu_overcloud_upgrade')) and \
not featureset.get('mixed_upgrade'):
if (
featureset.get('overcloud_upgrade') or featureset.get('ffu_overcloud_upgrade')
) and not featureset.get('mixed_upgrade'):
raise RuntimeError("Overcloud upgrade has to be mixed upgrades")
if featureset.get('standalone_upgrade') and \
stable_release in UNSUPPORTED_STANDALONE:
if (
featureset.get('standalone_upgrade')
and stable_release in UNSUPPORTED_STANDALONE
):
raise RuntimeError(
"Standalone upgrade doesn't support {}".format(stable_release))
"Standalone upgrade doesn't support {}".format(stable_release)
)
if featureset.get('ffu_overcloud_upgrade') and \
stable_release not in LONG_TERM_SUPPORT_RELEASES:
if (
featureset.get('ffu_overcloud_upgrade')
and stable_release not in LONG_TERM_SUPPORT_RELEASES
):
raise RuntimeError(
"{} is not a long-term support release, and cannot be "
"used in a fast forward upgrade. Current long-term support "
"releases: {}".format(stable_release, LONG_TERM_SUPPORT_RELEASES))
"releases: {}".format(stable_release, LONG_TERM_SUPPORT_RELEASES)
)
newest_hash = get_dlrn_hash(stable_release, NEWEST_HASH_NAME,
distro_name,
distro_version)
newest_hash = get_dlrn_hash(
stable_release, NEWEST_HASH_NAME, distro_name, distro_version
)
if stable_release == 'newton':
current_hash = get_dlrn_hash(stable_release, NEWTON_HASH_NAME,
distro_name,
distro_version)
current_hash = get_dlrn_hash(
stable_release, NEWTON_HASH_NAME, distro_name, distro_version
)
elif is_periodic:
current_hash = get_dlrn_hash(stable_release, PROMOTION_HASH_NAME,
distro_name,
distro_version)
current_hash = get_dlrn_hash(
stable_release, PROMOTION_HASH_NAME, distro_name, distro_version
)
else:
current_hash = get_dlrn_hash(stable_release, CURRENT_HASH_NAME,
distro_name,
distro_version)
current_hash = get_dlrn_hash(
stable_release, CURRENT_HASH_NAME, distro_name, distro_version
)
releases_dictionary = {
'undercloud_install_release': stable_release,
@ -240,13 +261,13 @@ def compose_releases_dictionary(stable_release, featureset, upgrade_from,
logger.info('Doing an overcloud upgrade')
deploy_release = get_relative_release(stable_release, -1)
if deploy_release == 'newton':
deploy_hash = get_dlrn_hash(deploy_release, NEWTON_HASH_NAME,
distro_name,
distro_version)
deploy_hash = get_dlrn_hash(
deploy_release, NEWTON_HASH_NAME, distro_name, distro_version
)
else:
deploy_hash = get_dlrn_hash(deploy_release, CURRENT_HASH_NAME,
distro_name,
distro_version)
deploy_hash = get_dlrn_hash(
deploy_release, CURRENT_HASH_NAME, distro_name, distro_version
)
releases_dictionary['overcloud_deploy_release'] = deploy_release
releases_dictionary['overcloud_deploy_hash'] = deploy_hash
@ -254,48 +275,46 @@ def compose_releases_dictionary(stable_release, featureset, upgrade_from,
logger.info('Doing an overcloud fast forward upgrade')
deploy_release = get_relative_release(stable_release, -3)
if deploy_release == 'newton':
deploy_hash = get_dlrn_hash(deploy_release, NEWTON_HASH_NAME,
distro_name,
distro_version)
deploy_hash = get_dlrn_hash(
deploy_release, NEWTON_HASH_NAME, distro_name, distro_version
)
else:
deploy_hash = get_dlrn_hash(deploy_release, CURRENT_HASH_NAME,
distro_name,
distro_version)
deploy_hash = get_dlrn_hash(
deploy_release, CURRENT_HASH_NAME, distro_name, distro_version
)
releases_dictionary['overcloud_deploy_release'] = deploy_release
releases_dictionary['overcloud_deploy_hash'] = deploy_hash
elif featureset.get('undercloud_upgrade'):
logger.info('Doing an undercloud upgrade')
install_release = get_relative_release(stable_release, -1)
install_hash = get_dlrn_hash(install_release, CURRENT_HASH_NAME,
distro_name,
distro_version)
install_hash = get_dlrn_hash(
install_release, CURRENT_HASH_NAME, distro_name, distro_version
)
releases_dictionary['undercloud_install_release'] = install_release
releases_dictionary['undercloud_install_hash'] = install_hash
elif featureset.get('standalone_upgrade'):
logger.info('Doing an standalone upgrade')
install_release = get_relative_release(stable_release, -1)
install_hash = get_dlrn_hash(install_release, CURRENT_HASH_NAME,
distro_name,
distro_version)
install_newest_hash = get_dlrn_hash(install_release, NEWEST_HASH_NAME,
distro_name,
distro_version)
install_hash = get_dlrn_hash(
install_release, CURRENT_HASH_NAME, distro_name, distro_version
)
install_newest_hash = get_dlrn_hash(
install_release, NEWEST_HASH_NAME, distro_name, distro_version
)
releases_dictionary['standalone_deploy_release'] = install_release
releases_dictionary['standalone_deploy_newest_hash'] = \
install_newest_hash
releases_dictionary['standalone_deploy_newest_hash'] = install_newest_hash
releases_dictionary['standalone_deploy_hash'] = install_hash
elif featureset.get('overcloud_update'):
logger.info('Doing an overcloud update')
previous_hash = get_dlrn_hash(stable_release, PREVIOUS_HASH_NAME,
distro_name,
distro_version)
previous_hash = get_dlrn_hash(
stable_release, PREVIOUS_HASH_NAME, distro_name, distro_version
)
releases_dictionary['overcloud_deploy_hash'] = previous_hash
logger.debug("stable_release: %s, featureset: %s", stable_release,
featureset)
logger.debug("stable_release: %s, featureset: %s", stable_release, featureset)
logger.info('output releases: %s', releases_dictionary)
@ -326,19 +345,24 @@ def shim_convert_old_release_names(releases_names, is_periodic):
if oc_deploy_release != oc_target_release:
release_file = "{}-undercloud-{}-overcloud".format(
uc_install_release, oc_deploy_release)
uc_install_release, oc_deploy_release
)
modified_releases_name['undercloud_install_release'] = release_file
modified_releases_name['undercloud_target_release'] = release_file
modified_releases_name['overcloud_deploy_release'] = release_file
modified_releases_name['overcloud_target_release'] = release_file
elif is_periodic:
for key in [
'undercloud_install_release', 'undercloud_target_release',
'overcloud_deploy_release', 'overcloud_target_release',
'standalone_deploy_release', 'standalone_target_release'
'undercloud_install_release',
'undercloud_target_release',
'overcloud_deploy_release',
'overcloud_target_release',
'standalone_deploy_release',
'standalone_target_release',
]:
modified_releases_name[
key] = "promotion-testing-hash-" + releases_names[key]
modified_releases_name[key] = (
"promotion-testing-hash-" + releases_names[key]
)
return modified_releases_name
@ -362,7 +386,9 @@ export STANDALONE_DEPLOY_NEWEST_HASH="{standalone_deploy_newest_hash}"
export STANDALONE_TARGET_RELEASE="{standalone_target_release}"
export STANDALONE_TARGET_NEWEST_HASH="{standalone_target_newest_hash}"
export STANDALONE_TARGET_HASH="{standalone_target_hash}"
'''.format(**releases_dictionary)
'''.format(
**releases_dictionary
)
with open(bash_file_name, 'w') as bash_file:
bash_file.write(bash_script)
except Exception:
@ -379,47 +405,59 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser(
formatter_class=argparse.RawTextHelpFormatter,
description='Get a dictionary of releases from a release '
'and a featureset file.')
'and a featureset file.',
)
parser.add_argument(
'--stable-release',
choices=RELEASES,
required=True,
help='Release that the change being tested is from.\n'
'All other releases are calculated from this\n'
'basis.')
'All other releases are calculated from this\n'
'basis.',
)
parser.add_argument(
'--distro-name',
choices=['centos'],
required=True,
help='Distribution name')
'--distro-name', choices=['centos'], required=True, help='Distribution name'
)
parser.add_argument(
'--distro-version',
choices=['7', '8'],
required=True,
help='Distribution version')
help='Distribution version',
)
parser.add_argument(
'--featureset-file',
required=True,
help='Featureset file which will be introspected to\n'
'infer what type of upgrade is being performed\n'
'(if any).')
'infer what type of upgrade is being performed\n'
'(if any).',
)
parser.add_argument(
'--output-file', default=default_output_file,
'--output-file',
default=default_output_file,
help='Output file containing dictionary of releases\n'
'for the provided featureset and release.\n'
'(default: %(default)s)')
'for the provided featureset and release.\n'
'(default: %(default)s)',
)
parser.add_argument(
'--log-file', default=default_log_file,
'--log-file',
default=default_log_file,
help='log file to print debug information from\n'
'running the script.\n'
'(default: %(default)s)')
parser.add_argument('--upgrade-from', action='store_false',
help='Upgrade FROM the change under test instead\n'
'of the default of upgrading TO the change\n'
'under test.')
'running the script.\n'
'(default: %(default)s)',
)
parser.add_argument(
'--upgrade-from',
action='store_false',
help='Upgrade FROM the change under test instead\n'
'of the default of upgrading TO the change\n'
'under test.',
)
parser.add_argument('--is-periodic', action='store_true',
help='Specify if the current running job is periodic')
parser.add_argument(
'--is-periodic',
action='store_true',
help='Specify if the current running job is periodic',
)
args = parser.parse_args()
@ -428,16 +466,18 @@ if __name__ == '__main__':
featureset = load_featureset_file(args.featureset_file)
releases_dictionary = compose_releases_dictionary(args.stable_release,
featureset,
args.upgrade_from,
args.is_periodic,
args.distro_name,
args.distro_version)
releases_dictionary = compose_releases_dictionary(
args.stable_release,
featureset,
args.upgrade_from,
args.is_periodic,
args.distro_name,
args.distro_version,
)
releases_dictionary = shim_convert_old_release_names(releases_dictionary,
args.is_periodic)
releases_dictionary = shim_convert_old_release_names(
releases_dictionary, args.is_periodic
)
if not write_releases_dictionary_to_bash(
releases_dictionary, args.output_file):
if not write_releases_dictionary_to_bash(releases_dictionary, args.output_file):
exit(1)

View File

@ -22,21 +22,26 @@ def test_get_dlrn_hash_ok(mock_get, mock_logging):
'7e8e0fc03b54164921f49fdb4103202c\nbaseurl=https:/'
'/trunk.rdoproject.org/centos7/81/c2/81c23c047e8e0'
'fc03b54164921f49fdb4103202c_b333f915\nenabled=1\n'
'gpgcheck=0\npriority=1')
'gpgcheck=0\npriority=1'
)
mock_get.return_value = mock_response
release = 'master'
hash_name = 'current-tripleo'
dlrn_hash = '81c23c047e8e0fc03b54164921f49fdb4103202c_b333f915'
repo_url = ('https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' %
(release, hash_name))
assert get_dlrn_hash(release, hash_name, distro_name='centos',
distro_version='7') == dlrn_hash
repo_url = 'https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' % (
release,
hash_name,
)
assert (
get_dlrn_hash(release, hash_name, distro_name='centos', distro_version='7')
== dlrn_hash
)
mock_get.assert_called_once_with(repo_url, timeout=8)
mock_log_info.assert_called_once_with("Got DLRN hash: {} for the named "
"hash: {} on the {} "
"release".format(dlrn_hash,
hash_name,
release))
mock_log_info.assert_called_once_with(
"Got DLRN hash: {} for the named "
"hash: {} on the {} "
"release".format(dlrn_hash, hash_name, release)
)
mock_log_warning.assert_not_called()
mock_log_exception.assert_not_called()
@ -47,16 +52,20 @@ def test_get_dlrn_hash_ok(mock_get, mock_logging):
release = 'master'
hash_name = 'current-tripleo'
dlrn_hash = '7e8e0fc03b54164921f49fdb4103202c'
repo_url = ('https://trunk.rdoproject.org/centos8-%s/%s/delorean.repo.md5' %
(release, hash_name))
assert get_dlrn_hash(release, hash_name, distro_name='centos',
distro_version='8') == dlrn_hash
repo_url = 'https://trunk.rdoproject.org/centos8-%s/%s/delorean.repo.md5' % (
release,
hash_name,
)
assert (
get_dlrn_hash(release, hash_name, distro_name='centos', distro_version='8')
== dlrn_hash
)
mock_get.assert_called_once_with(repo_url, timeout=8)
mock_log_info.assert_called_once_with("Got DLRN hash: {} for the named "
"hash: {} on the {} "
"release".format(dlrn_hash,
hash_name,
release))
mock_log_info.assert_called_once_with(
"Got DLRN hash: {} for the named "
"hash: {} on the {} "
"release".format(dlrn_hash, hash_name, release)
)
mock_log_warning.assert_not_called()
mock_log_exception.assert_not_called()
@ -74,16 +83,19 @@ def test_null_response_raises_runtimeerror(mock_get, mock_logging):
mock_logger.info = mock_log_info
release = 'master'
hash_name = 'current-tripleo'
repo_url = ('https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' %
(release, hash_name))
repo_url = 'https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' % (
release,
hash_name,
)
mock_get.return_value = None
with pytest.raises(RuntimeError):
get_dlrn_hash(release, hash_name)
mock_get.assert_called_with(repo_url, timeout=8)
assert mock_get.call_count == 20
mock_log_info.assert_not_called()
mock_log_warning.assert_called_with("Attempt 20 of 20 to get DLRN hash "
"failed to get a response.")
mock_log_warning.assert_called_with(
"Attempt 20 of 20 to get DLRN hash " "failed to get a response."
)
assert mock_log_warning.call_count == 20
mock_log_exception.assert_not_called()
@ -106,25 +118,29 @@ def test_get_dlrn_hash_500_then_200(mock_get, mock_logging):
'7e8e0fc03b54164921f49fdb4103202c\nbaseurl=https:/'
'/trunk.rdoproject.org/centos7/81/c2/81c23c047e8e0'
'fc03b54164921f49fdb4103202c_b333f915\nenabled=1\n'
'gpgcheck=0\npriority=1')
'gpgcheck=0\npriority=1'
)
mock_response_bad = mock.Mock()
mock_response_bad.ok = False
mock_response_bad.status_code = 500
release = 'master'
hash_name = 'current-tripleo'
dlrn_hash = '81c23c047e8e0fc03b54164921f49fdb4103202c_b333f915'
repo_url = ('https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' %
(release, hash_name))
repo_url = 'https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' % (
release,
hash_name,
)
mock_get.side_effect = [mock_response_bad, mock_response_ok]
assert get_dlrn_hash(release, hash_name, retries=20) == dlrn_hash
mock_get.assert_called_with(repo_url, timeout=8)
mock_log_info.assert_called_once_with("Got DLRN hash: {} for the named "
"hash: {} on the {} "
"release".format(dlrn_hash,
hash_name,
release))
mock_log_warning.assert_called_once_with("Attempt 1 of 20 to get DLRN "
"hash returned status code 500.")
mock_log_info.assert_called_once_with(
"Got DLRN hash: {} for the named "
"hash: {} on the {} "
"release".format(dlrn_hash, hash_name, release)
)
mock_log_warning.assert_called_once_with(
"Attempt 1 of 20 to get DLRN " "hash returned status code 500."
)
mock_log_exception.assert_not_called()
@ -141,16 +157,19 @@ def test_get_dlrn_hash_timeout(mock_get, mock_logging):
mock_logger.info = mock_log_info
release = 'master'
hash_name = 'current-tripleo'
repo_url = ('https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' %
(release, hash_name))
repo_url = 'https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' % (
release,
hash_name,
)
mock_get_exception = Exception("We need more power!")
mock_get.side_effect = mock_get_exception
with pytest.raises(RuntimeError):
get_dlrn_hash(release, hash_name, retries=20)
mock_get.assert_called_with(repo_url, timeout=8)
mock_log_info.assert_not_called()
mock_log_warning.assert_called_with("Attempt 20 of 20 to get DLRN hash "
"threw an exception.")
mock_log_warning.assert_called_with(
"Attempt 20 of 20 to get DLRN hash " "threw an exception."
)
assert mock_log_warning.call_count == 20
mock_log_exception.assert_called_with(mock_get_exception)
assert mock_log_exception.call_count == 20
@ -172,14 +191,17 @@ def test_get_dlrn_hash_500_10_times(mock_get, mock_logging):
mock_response.status_code = 500
release = 'master'
hash_name = 'current-tripleo'
repo_url = ('https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' %
(release, hash_name))
repo_url = 'https://trunk.rdoproject.org/centos7-%s/%s/delorean.repo' % (
release,
hash_name,
)
mock_get.return_value = mock_response
with pytest.raises(RuntimeError):
get_dlrn_hash(release, hash_name, retries=20)
mock_get.assert_called_with(repo_url, timeout=8)
mock_log_info.assert_not_called()
mock_log_warning.assert_called_with("Attempt 20 of 20 to get DLRN hash "
"returned status code 500.")
mock_log_warning.assert_called_with(
"Attempt 20 of 20 to get DLRN hash " "returned status code 500."
)
assert mock_log_warning.call_count == 20
mock_log_exception.assert_not_called()

View File

@ -23,12 +23,16 @@ def test_converting_from_oc_upgrade_has_double_release():
'overcloud_target_hash': 'current-tripleo',
}
assert (shim_convert_old_release_names(releases_name, is_periodic=False) ==
expected_releases_file)
assert (
shim_convert_old_release_names(releases_name, is_periodic=False)
== expected_releases_file
)
# Also periodic jobs use the same release files
assert (shim_convert_old_release_names(releases_name, is_periodic=True) ==
expected_releases_file)
assert (
shim_convert_old_release_names(releases_name, is_periodic=True)
== expected_releases_file
)
def test_converting_from_uc_upgrade_has_single_release():
@ -52,8 +56,10 @@ def test_converting_from_uc_upgrade_has_single_release():
'overcloud_target_release': 'master',
'overcloud_target_hash': 'current-tripleo',
}
assert (shim_convert_old_release_names(releases_name, is_periodic=False) ==
expected_releases_file)
assert (
shim_convert_old_release_names(releases_name, is_periodic=False)
== expected_releases_file
)
def test_converting_from_periodic_uc_upgrade_has_single_release_with_sufix():
@ -85,8 +91,10 @@ def test_converting_from_periodic_uc_upgrade_has_single_release_with_sufix():
'standalone_target_release': 'promotion-testing-hash-master',
'standalone_target_hash': 'current-tripleo',
}
assert (shim_convert_old_release_names(releases_name, is_periodic=True) ==
expected_releases_file)
assert (
shim_convert_old_release_names(releases_name, is_periodic=True)
== expected_releases_file
)
def test_converting_from_noop_has_single_release():
@ -110,8 +118,10 @@ def test_converting_from_noop_has_single_release():
'overcloud_target_release': 'master',
'overcloud_target_hash': 'current-tripleo',
}
assert (shim_convert_old_release_names(releases_name, is_periodic=False) ==
expected_releases_file)
assert (
shim_convert_old_release_names(releases_name, is_periodic=False)
== expected_releases_file
)
def test_converting_from_periodic_noop_has_single_release_with_sufix():
@ -143,5 +153,7 @@ def test_converting_from_periodic_noop_has_single_release_with_sufix():
'standalone_target_release': 'promotion-testing-hash-master',
'standalone_target_hash': 'current-tripleo',
}
assert (shim_convert_old_release_names(releases_name, is_periodic=True) ==
expected_releases_file)
assert (
shim_convert_old_release_names(releases_name, is_periodic=True)
== expected_releases_file
)

File diff suppressed because it is too large Load Diff

View File

@ -2,12 +2,10 @@ from emit_releases_file import compose_releases_dictionary
import pytest
@pytest.mark.parametrize('featureset', [{
'mixed_upgrade': True,
'overcloud_upgrade': True
}, {
'undercloud_upgrade': True
}])
@pytest.mark.parametrize(
'featureset',
[{'mixed_upgrade': True, 'overcloud_upgrade': True}, {'undercloud_upgrade': True}],
)
def test_upgrade_to_newton_is_unsupported(featureset):
stable_release = 'newton'
upgrade_from = False
@ -38,8 +36,7 @@ def test_undercloud_upgrades_from_newton_to_ocata_are_unsupported():
compose_releases_dictionary(stable_release, featureset, upgrade_from)
@pytest.mark.parametrize('upgrade_type',
['ffu_overcloud_upgrade', 'overcloud_upgrade'])
@pytest.mark.parametrize('upgrade_type', ['ffu_overcloud_upgrade', 'overcloud_upgrade'])
def test_overcloud_upgrades_has_to_be_mixed(upgrade_type):
featureset = {
upgrade_type: True,
@ -50,9 +47,9 @@ def test_overcloud_upgrades_has_to_be_mixed(upgrade_type):
compose_releases_dictionary(stable_release, featureset, upgrade_from)
@pytest.mark.parametrize('stable_release',
['ocata', 'pike', 'newton',
'rocky', 'stein', 'master'])
@pytest.mark.parametrize(
'stable_release', ['ocata', 'pike', 'newton', 'rocky', 'stein', 'master']
)
def test_ffu_overcloud_upgrade_only_supported_from_newton(stable_release):
featureset = {
'mixed_upgrade': True,
@ -63,8 +60,9 @@ def test_ffu_overcloud_upgrade_only_supported_from_newton(stable_release):
compose_releases_dictionary(stable_release, featureset, upgrade_from)
@pytest.mark.parametrize('stable_release',
['newton', 'ocata', 'pike', 'queens', 'rocky'])
@pytest.mark.parametrize(
'stable_release', ['newton', 'ocata', 'pike', 'queens', 'rocky']
)
def test_standalone_upgrade_only_supported_from_stein(stable_release):
featureset = {
'standalone_upgrade': True,

View File

@ -14,7 +14,7 @@ else:
def test_empty_releases_dictionary_fails():
assert (not write_releases_dictionary_to_bash({}, ""))
assert not write_releases_dictionary_to_bash({}, "")
@pytest.fixture
@ -37,44 +37,44 @@ def releases_dictionary():
}
@pytest.mark.parametrize('deleted_key', [
'undercloud_install_release',
'undercloud_install_hash',
'undercloud_target_release',
'undercloud_target_hash',
'overcloud_deploy_release',
'overcloud_deploy_hash',
'overcloud_target_release',
'overcloud_target_hash',
'standalone_deploy_release',
'standalone_deploy_newest_hash',
'standalone_deploy_hash',
'standalone_target_release',
'standalone_target_newest_hash',
'standalone_target_hash',
])
@pytest.mark.parametrize(
'deleted_key',
[
'undercloud_install_release',
'undercloud_install_hash',
'undercloud_target_release',
'undercloud_target_hash',
'overcloud_deploy_release',
'overcloud_deploy_hash',
'overcloud_target_release',
'overcloud_target_hash',
'standalone_deploy_release',
'standalone_deploy_newest_hash',
'standalone_deploy_hash',
'standalone_target_release',
'standalone_target_newest_hash',
'standalone_target_hash',
],
)
def test_missing_key_fails(releases_dictionary, deleted_key):
wrong_releases_dictionary = releases_dictionary.pop(deleted_key)
assert (not write_releases_dictionary_to_bash(wrong_releases_dictionary,
""))
assert not write_releases_dictionary_to_bash(wrong_releases_dictionary, "")
@mock.patch(BUILTINS_OPEN, new_callable=mock_open)
def test_open_exception_fails(mock, releases_dictionary):
bash_script = '/foo/bar.sh'
mock.side_effect = IOError
assert (not write_releases_dictionary_to_bash(releases_dictionary,
bash_script))
assert not write_releases_dictionary_to_bash(releases_dictionary, bash_script)
@mock.patch(BUILTINS_OPEN, new_callable=mock_open)
def test_output_is_sourceable(mock, releases_dictionary):
bash_script = '/foo/bar.sh'
assert (write_releases_dictionary_to_bash(releases_dictionary,
bash_script))
assert write_releases_dictionary_to_bash(releases_dictionary, bash_script)
mock.assert_called_once_with(bash_script, 'w')
handle = mock()
args, _ = handle.write.call_args
written_content = args[0]
# TODO(Llorente): check environment variables
assert (0 == os.system(written_content))
assert 0 == os.system(written_content)

View File

@ -30,20 +30,18 @@ def process_events(all_events, events):
# Older clients return timestamps in the first format, newer ones
# append a Z. This way we can handle both formats.
try:
strptime = time.strptime(event['event_time'],
'%Y-%m-%dT%H:%M:%S')
strptime = time.strptime(event['event_time'], '%Y-%m-%dT%H:%M:%S')
except ValueError:
strptime = time.strptime(event['event_time'],
'%Y-%m-%dT%H:%M:%SZ')
strptime = time.strptime(event['event_time'], '%Y-%m-%dT%H:%M:%SZ')
etime = time.mktime(strptime)
if name in events:
if status == 'CREATE_IN_PROGRESS':
times[name] = {'start': etime, 'elapsed': None}
elif status == 'CREATE_COMPLETE' or status == 'CREATE_FAILED':
times[name]['elapsed'] = etime - times[name]['start']
for name, data in sorted(times.items(),
key=lambda x: x[1]['elapsed'],
reverse=True):
for name, data in sorted(
times.items(), key=lambda x: x[1]['elapsed'], reverse=True
):
elapsed = 'Still in progress'
if times[name]['elapsed'] is not None:
elapsed = times[name]['elapsed']

View File

@ -1,5 +1,6 @@
#!/usr/bin/env python
from __future__ import print_function
# Example usage:
# python move_bugs.py --no-dry-run --priority-less-than High tripleo stein-3 train-1
# python move_bugs.py --no-dry-run tripleo stein-3 stein-rc1
@ -11,6 +12,7 @@ import argparse
import lazr.restfulclient.errors
import os
import sys
# import sqlite3
from launchpadlib import launchpad
@ -43,11 +45,13 @@ def no_creds():
def login():
lp = launchpad.Launchpad.login_with(application_name='tripleo-bugs',
service_root='production',
launchpadlib_dir=LP_CACHE_DIR,
credential_save_failed=no_creds,
version='devel')
lp = launchpad.Launchpad.login_with(
application_name='tripleo-bugs',
service_root='production',
launchpadlib_dir=LP_CACHE_DIR,
credential_save_failed=no_creds,
version='devel',
)
return lp
@ -60,16 +64,19 @@ def validate_milestone(project, milestone):
def validate_importance(importance):
if importance not in LP_IMPORTANCE:
parser.error('Provided importance {} is not one of: {}'.format(
importance, ', '.join(LP_IMPORTANCE)))
parser.error(
'Provided importance {} is not one of: {}'.format(
importance, ', '.join(LP_IMPORTANCE)
)
)
return importance
def get_importance_from_input(args):
if args.priority_less_than:
return LP_IMPORTANCE[LP_IMPORTANCE.index(args.priority_less_than) + 1:]
return LP_IMPORTANCE[LP_IMPORTANCE.index(args.priority_less_than) + 1 :]
if args.priority_greater_than:
return LP_IMPORTANCE[:LP_IMPORTANCE.index(args.priority_greater_than)]
return LP_IMPORTANCE[: LP_IMPORTANCE.index(args.priority_greater_than)]
return args.priority
@ -83,12 +90,11 @@ def main(args):
# bug_status = ['New', 'Incomplete', 'Confirmed', 'Triaged']
bug_status = LP_OPEN_STATUS
from_importance = get_importance_from_input(args)
print("Moving bugs from {} to {}".format(from_milestone.name,
to_milestone.name))
print("Moving bugs from {} to {}".format(from_milestone.name, to_milestone.name))
print("Limiting to importance: {}".format(from_importance))
bugs = project.searchTasks(status=bug_status,
milestone=from_milestone,
importance=from_importance)
bugs = project.searchTasks(
status=bug_status, milestone=from_milestone, importance=from_importance
)
failed = set()
success = set()
@ -96,8 +102,12 @@ def main(args):
bug = b.bug
# print("{}\t{}\t{}".format(b.bug.id, b.importance, b.status))
print("Moving {} from {} to {} ...".format(bug.id, from_milestone.name,
to_milestone.name), end='')
print(
"Moving {} from {} to {} ...".format(
bug.id, from_milestone.name, to_milestone.name
),
end='',
)
b.milestone = to_milestone
try:
if args.no_dry_run:
@ -135,32 +145,34 @@ def main(args):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Change Move bugs")
parser.add_argument('projectname',
default='tripleo',
help='The project to act on')
parser.add_argument('projectname', default='tripleo', help='The project to act on')
limiting = parser.add_mutually_exclusive_group()
limiting.add_argument('--priority-less-than',
type=validate_importance,
dest='priority_less_than',
help='All bugs with with importance less than '
'the provided value')
limiting.add_argument('--priority-greater-than',
type=validate_importance,
dest='priority_greater_than',
help='All bugs with with importance greater than '
'the provided value')
limiting.add_argument('--priority',
type=validate_importance,
dest='priority',
help='All bugs with with the provided importance')
parser.add_argument('from_milestone',
help='Milestone to move from (queens-1)')
parser.add_argument('to_milestone',
help='Milestone to move to (queens-2)')
parser.add_argument('--no-dry-run',
dest='no_dry_run',
help='Execute the move for real.',
action='store_true')
limiting.add_argument(
'--priority-less-than',
type=validate_importance,
dest='priority_less_than',
help='All bugs with with importance less than ' 'the provided value',
)
limiting.add_argument(
'--priority-greater-than',
type=validate_importance,
dest='priority_greater_than',
help='All bugs with with importance greater than ' 'the provided value',
)
limiting.add_argument(
'--priority',
type=validate_importance,
dest='priority',
help='All bugs with with the provided importance',
)
parser.add_argument('from_milestone', help='Milestone to move from (queens-1)')
parser.add_argument('to_milestone', help='Milestone to move to (queens-2)')
parser.add_argument(
'--no-dry-run',
dest='no_dry_run',
help='Execute the move for real.',
action='store_true',
)
args = parser.parse_args()
main(args)

View File

@ -51,12 +51,19 @@ def get_gerrit_reviews(project, status="open", branch="master", limit="30"):
status_query = ''
if status:
status_query = 'status: %s' % status
cmd = 'ssh review.opendev.org -p29418 gerrit' \
' query "%s project: %s branch: %s" --comments' \
' --format JSON limit: %s --patch-sets --current-patch-set'\
% (status_query, project, branch, limit)
p = subprocess.Popen([cmd], shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
cmd = (
'ssh review.opendev.org -p29418 gerrit'
' query "%s project: %s branch: %s" --comments'
' --format JSON limit: %s --patch-sets --current-patch-set'
% (status_query, project, branch, limit)
)
p = subprocess.Popen(
[cmd],
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout = p.stdout
for line in stdout.readlines():
review = json.loads(line)
@ -74,7 +81,7 @@ def get_jenkins_comment_message(review):
continue
# NOTE(bnemec): For some reason the experimental-tripleo
# message does not include "pipeline".
if ('tripleo-ci' not in comment['message']):
if 'tripleo-ci' not in comment['message']:
continue
jenkins_messages[comment['timestamp']] = comment['message']
return jenkins_messages
@ -93,9 +100,11 @@ def process_jenkins_comment_message(message, job_names):
duration = " ".join(split[6].split()[:2])
else:
duration = ''
job_results[voting_job_name] = {'log_url': split[2],
'status': split[4],
'duration': duration}
job_results[voting_job_name] = {
'log_url': split[2],
'status': split[4],
'duration': duration,
}
return job_results
@ -104,14 +113,17 @@ def gen_html(data, html_file, table_file, stats_hours, job_names, options):
fp.write('<table border="1" cellspacing="0">')
fp.write("<tr class='headers'><td>&nbsp;</td>")
for job_name in job_names:
fp.write("<td class='headers'><b>%s</b></td>" %
job_name.replace("tripleo-ci-centos-7-", ""))
fp.write(
"<td class='headers'><b>%s</b></td>"
% job_name.replace("tripleo-ci-centos-7-", "")
)
fp.write("</tr>")
count = 0
reversed_sorted_keys = [(x['id'], x['patchset']) for x in
reversed(sorted(data.values(),
key=lambda y: y['ts']))]
reversed_sorted_keys = [
(x['id'], x['patchset'])
for x in reversed(sorted(data.values(), key=lambda y: y['ts']))
]
passed_jobs = 0
partial_jobs = 0
failed_jobs = 0
@ -148,15 +160,18 @@ def gen_html(data, html_file, table_file, stats_hours, job_names, options):
result_types.add(ci_result['status'])
job_columns += '<font color="%s">' % color
gerrit_href = 'https://review.opendev.org/#/c/%s/%s"' % (
result['url'].split('/')[-1], result['patchset']
result['url'].split('/')[-1],
result['patchset'],
)
job_columns += '<a STYLE="color : %s" href="%s">%s,%s</a>' % (
color,
gerrit_href,
result['url'].split('/')[-1],
result['patchset'],
)
job_columns += '<a STYLE="color : %s" href="%s">%s,%s</a>' % \
(color, gerrit_href, result['url'].split('/')[-1],
result['patchset'])
job_columns += '<br/>%s ' % (ci_result['duration'])
job_columns += '<a STYLE="text-decoration:none" '
job_columns += 'href="%s">log</a>' %\
ci_result['log_url']
job_columns += 'href="%s">log</a>' % ci_result['log_url']
job_columns += '</font><br/>'
job_columns += "</td>"
else:
@ -179,21 +194,18 @@ def gen_html(data, html_file, table_file, stats_hours, job_names, options):
total = passed_jobs + partial_jobs + failed_jobs
fp.write("<p>Overall</p>")
fp.write("Passed: %d/%d (%d %%)<br/>" % (
passed_jobs,
total,
float(passed_jobs) / float(total) * 100
))
fp.write("Partial Failures: %d/%d (%d %%)<br/>" % (
partial_jobs,
total,
float(partial_jobs) / float(total) * 100
))
fp.write("Complete Failures: %d/%d (%d %%)<br/>" % (
failed_jobs,
total,
float(failed_jobs) / float(total) * 100
))
fp.write(
"Passed: %d/%d (%d %%)<br/>"
% (passed_jobs, total, float(passed_jobs) / float(total) * 100)
)
fp.write(
"Partial Failures: %d/%d (%d %%)<br/>"
% (partial_jobs, total, float(partial_jobs) / float(total) * 100)
)
fp.write(
"Complete Failures: %d/%d (%d %%)<br/>"
% (failed_jobs, total, float(failed_jobs) / float(total) * 100)
)
fp.close()
@ -205,13 +217,19 @@ def gen_html(data, html_file, table_file, stats_hours, job_names, options):
def main(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description=("Get details of tripleo ci jobs and generates a html "
"report."))
description=("Get details of tripleo ci jobs and generates a html " "report.")
)
parser.add_argument('-o', default="tripleo-jobs.html", help="html file")
parser.add_argument('-p', default=",".join(DEFAULT_PROJECTS),
help='comma separated list of projects to use.')
parser.add_argument('-j', default=",".join(DEFAULT_JOB_NAMES),
help='comma separated list of jobs to monitor.')
parser.add_argument(
'-p',
default=",".join(DEFAULT_PROJECTS),
help='comma separated list of projects to use.',
)
parser.add_argument(
'-j',
default=",".join(DEFAULT_JOB_NAMES),
help='comma separated list of jobs to monitor.',
)
parser.add_argument('-s', default="", help="status")
parser.add_argument('-b', default="master", help="branch")
parser.add_argument('-l', default="30", help="limit")
@ -222,29 +240,30 @@ def main(args=sys.argv[1:]):
# project reviews
proj_reviews = []
for proj in opts.p.split(","):
proj_reviews.extend(get_gerrit_reviews(proj,
status=opts.s,
branch=opts.b,
limit=opts.l))
proj_reviews.extend(
get_gerrit_reviews(proj, status=opts.s, branch=opts.b, limit=opts.l)
)
results = {}
for review in proj_reviews:
for ts, message in get_jenkins_comment_message(review).iteritems():
ci_results = process_jenkins_comment_message(message,
job_names)
ci_results = process_jenkins_comment_message(message, job_names)
patchset = str(re.search('Patch Set (.+?):', message).group(1))
key = (review['id'], patchset)
results.setdefault(key, {}).update({
'id': review['id'],
'ts': ts,
'status': review['status'],
'timestamp': datetime.datetime.fromtimestamp(
int(ts)).strftime('%Y-%m-%d %H:%M:%S'),
'url': review['url'],
'patchset': patchset,
'project': re.sub(r'.*/', '', review['project']),
'branch': review['branch'],
})
results.setdefault(key, {}).update(
{
'id': review['id'],
'ts': ts,
'status': review['status'],
'timestamp': datetime.datetime.fromtimestamp(int(ts)).strftime(
'%Y-%m-%d %H:%M:%S'
),
'url': review['url'],
'patchset': patchset,
'project': re.sub(r'.*/', '', review['project']),
'branch': review['branch'],
}
)
results[key].setdefault('ci_results', {}).update(ci_results)
gen_html(results, opts.o, "%s-table" % opts.o, 24, job_names, opts)

View File

@ -17,6 +17,4 @@
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
setuptools.setup(setup_requires=['pbr'], pbr=True)

View File

@ -32,8 +32,7 @@ import uuid
import gear
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger('testenv-client')
logger.setLevel(logging.INFO)
@ -65,8 +64,10 @@ class TestCallback(object):
if time_waiting > 90:
logger.warn('%.1f seconds waiting for a worker.' % (time_waiting))
if "Couldn't retrieve env" in job.arguments or \
"Failed creating OVB stack" in job.arguments:
if (
"Couldn't retrieve env" in job.arguments
or "Failed creating OVB stack" in job.arguments
):
logger.error(job.arguments)
self.rv = 2
job.sendWorkComplete("")
@ -126,7 +127,9 @@ def add_servers(client, servers):
def main(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description=(textwrap.dedent("""
description=(
textwrap.dedent(
"""
Starts up a gearman worker and then calls the job "lockenv" over
gearman, then waits for the worker to be called, once the worker
is called it will place the provided data in a datafile (indicated
@ -136,49 +139,74 @@ def main(args=sys.argv[1:]):
worker is holding a test environment in a locked state e.g. to
simply output the data provided one could run the command:
$ echo 'cat $TE_DATAFILE' | %s -- bash
""" % sys.argv[0])),
formatter_class=argparse.RawTextHelpFormatter
"""
% sys.argv[0]
)
),
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument('command', nargs="+",
help='A command to run once the test env is locked')
parser.add_argument('--geard', '-b', default='127.0.0.1:4730',
help='A comma separated list of gearman brokers to '
'connect to.')
parser.add_argument('--jobnum', '-n', default=uuid.uuid4().hex,
help='A unique identifier identifing this job.')
parser.add_argument('--timeout', '-t', default='10800',
help='Set a timeout, after which the command will '
'be killed.')
parser.add_argument('--envsize', default="2",
help='Number of baremetal nodes to request')
parser.add_argument('--compute-envsize', default='0',
help='Number of compute baremetal nodes to request. '
'When this is set to a value > 0, the primary '
'nodes will be tagged with the controller '
'profile and the extra nodes with compute. The '
'compute nodes will be a smaller flavor in order '
'to use less resources.')
parser.add_argument('--ucinstance',
help='uuid for the undercloud instance (where an '
'interface on the provisioning net is attached')
parser.add_argument('--create-undercloud', action='store_true',
help='deploy the undercloud node.')
parser.add_argument('--ssh-key', default='',
help='ssh key for the ovb nodes to be deployed.')
parser.add_argument('--net-iso',
default="multi-nic",
choices=['none', 'multi-nic', 'public-bond'],
help='"none" requests an environment without network '
'isolation, "multi-nic" requests one with a '
'basic multiple nic configuration, and '
'"public-bond" requests one like "multi-nic" '
'but with two public nics for use with bonded '
'nic-configs.')
parser.add_argument('--extra-nodes', default='0',
help='Number of extra undercloud-like nodes to '
'request')
parser.add_argument('--debug', '-d', action='store_true',
help='Set to debug mode.')
parser.add_argument(
'command', nargs="+", help='A command to run once the test env is locked'
)
parser.add_argument(
'--geard',
'-b',
default='127.0.0.1:4730',
help='A comma separated list of gearman brokers to ' 'connect to.',
)
parser.add_argument(
'--jobnum',
'-n',
default=uuid.uuid4().hex,
help='A unique identifier identifing this job.',
)
parser.add_argument(
'--timeout',
'-t',
default='10800',
help='Set a timeout, after which the command will ' 'be killed.',
)
parser.add_argument(
'--envsize', default="2", help='Number of baremetal nodes to request'
)
parser.add_argument(
'--compute-envsize',
default='0',
help='Number of compute baremetal nodes to request. '
'When this is set to a value > 0, the primary '
'nodes will be tagged with the controller '
'profile and the extra nodes with compute. The '
'compute nodes will be a smaller flavor in order '
'to use less resources.',
)
parser.add_argument(
'--ucinstance',
help='uuid for the undercloud instance (where an '
'interface on the provisioning net is attached',
)
parser.add_argument(
'--create-undercloud', action='store_true', help='deploy the undercloud node.'
)
parser.add_argument(
'--ssh-key', default='', help='ssh key for the ovb nodes to be deployed.'
)
parser.add_argument(
'--net-iso',
default="multi-nic",
choices=['none', 'multi-nic', 'public-bond'],
help='"none" requests an environment without network '
'isolation, "multi-nic" requests one with a '
'basic multiple nic configuration, and '
'"public-bond" requests one like "multi-nic" '
'but with two public nics for use with bonded '
'nic-configs.',
)
parser.add_argument(
'--extra-nodes',
default='0',
help='Number of extra undercloud-like nodes to ' 'request',
)
parser.add_argument('--debug', '-d', action='store_true', help='Set to debug mode.')
opts = parser.parse_args(args)
if opts.debug:
logger.setLevel(logging.DEBUG)
@ -191,8 +219,10 @@ def main(args=sys.argv[1:]):
add_servers(client, opts.geard)
client.waitForServer()
job_identifier = '%s: %s' % (os.environ.get('ZUUL_CHANGE', 'No change'),
os.environ['TOCI_JOBTYPE'])
job_identifier = '%s: %s' % (
os.environ.get('ZUUL_CHANGE', 'No change'),
os.environ['TOCI_JOBTYPE'],
)
job_params = {
"callback_name": callback_name,
"timeout": opts.timeout,
@ -222,8 +252,10 @@ def main(args=sys.argv[1:]):
# completes we kill this process and all its children, to immediately
# stop the running job
if cb.rv is None:
logger.error("The command hasn't completed but the testenv worker has "
"released the environment. Killing all processes.")
logger.error(
"The command hasn't completed but the testenv worker has "
"released the environment. Killing all processes."
)
subprocess.call(["sudo", "kill", "-9", "-%d" % os.getpgrp()])
logger.debug("Exiting with status : %d", cb.rv)
return cb.rv

View File

@ -20,6 +20,7 @@ docker_skip = False
docker_reason = ''
try:
import docker
client = docker.from_env(timeout=5)
if not client.ping():
raise Exception("Failed to ping docker server.")
@ -36,8 +37,9 @@ def pytest_generate_tests(metafunc):
role_path = os.path.abspath('./roles/%s' % role)
for _, dirnames, _ in os.walk(role_path + '/molecule'):
for scenario in dirnames:
if os.path.isfile('%s/molecule/%s/molecule.yml' %
(role_path, scenario)):
if os.path.isfile(
'%s/molecule/%s/molecule.yml' % (role_path, scenario)
):
matches.append([role_path, scenario])
metafunc.parametrize('testdata', matches)
@ -46,6 +48,6 @@ def pytest_generate_tests(metafunc):
def test_molecule(testdata):
cwd, scenario = testdata
cmd = ['python', '-m', 'molecule', 'test', '-s', scenario]
print("running: %s (from %s)" % (" " .join(cmd), cwd))
print("running: %s (from %s)" % (" ".join(cmd), cwd))
r = subprocess.call(cmd, cwd=cwd)
assert r == 0