Bump ansible-core to 2.18/2.19

Bumped collections in requirements-core.yml to Ansible 12 major versions,
see [1].

[1]: https://github.com/ansible-community/ansible-build-data/blob/main/12/ansible-12.1.0.yaml

Depends-On: https://review.opendev.org/c/openstack/kolla/+/960307

Co-authored-by: Doug Szumski doug@stackhpc.com
Change-Id: If8a29052d8a43ffc0fef11514adabb5e4ab9d977
Signed-off-by: Michal Nasiadka <mnasiadka@gmail.com>
This commit is contained in:
Michal Nasiadka
2025-09-10 07:44:04 +02:00
parent 011034a141
commit 3380770aee
50 changed files with 208 additions and 119 deletions

View File

@@ -20,10 +20,21 @@ import tempfile
from ansible import constants
from ansible.plugins import action
# TODO(dougszu): From Ansible 12 onwards we must explicitly trust templates.
# Since this feature is not supported in previous releases, we define a
# noop method here for backwards compatibility. This can be removed in the
# G cycle.
try:
from ansible.template import trust_as_template
except ImportError:
def trust_as_template(template):
return template
from io import StringIO
from oslo_config import iniparser
_ORPHAN_SECTION = 'TEMPORARY_ORPHAN_VARIABLE_SECTION'
DOCUMENTATION = '''
@@ -150,7 +161,7 @@ class ActionModule(action.ActionBase):
# Only use config if present
if os.access(source, os.R_OK):
with open(source, 'r') as f:
template_data = f.read()
template_data = trust_as_template(f.read())
# set search path to mimic 'template' module behavior
searchpath = [

View File

@@ -23,6 +23,16 @@ from ansible import constants
from ansible import errors as ansible_errors
from ansible.plugins import action
# TODO(dougszu): From Ansible 12 onwards we must explicitly trust templates.
# Since this feature is not supported in previous releases, we define a
# noop method here for backwards compatibility. This can be removed in the
# G cycle.
try:
from ansible.template import trust_as_template
except ImportError:
def trust_as_template(template):
return template
DOCUMENTATION = '''
---
module: merge_yaml
@@ -91,7 +101,7 @@ class ActionModule(action.ActionBase):
# Only use config if present
if source and os.access(source, os.R_OK):
with open(source, 'r') as f:
template_data = f.read()
template_data = trust_as_template(f.read())
# set search path to mimic 'template' module behavior
searchpath = [

View File

@@ -93,7 +93,7 @@ run_default_volumes_docker: []
# Dimension options for Docker Containers
# NOTE(mnasiadka): Lower 1073741816 nofile limit on EL9 (RHEL9/CentOS Stream 9/Rocky Linux 9)
# fixes at least rabbitmq and mariadb
default_container_dimensions: "{{ default_container_dimensions_el9 if ansible_facts.os_family == 'RedHat' else '{}' }}"
default_container_dimensions: "{{ default_container_dimensions_el9 if ansible_facts.os_family == 'RedHat' else {} }}"
default_container_dimensions_el9: "{{ default_docker_dimensions_el9 if kolla_container_engine == 'docker' else default_podman_dimensions_el9 }}"
default_docker_dimensions_el9:
ulimits:

View File

@@ -410,7 +410,7 @@ class ContainerWorker(ABC):
vol_dict = dict()
for vol in volumes:
if len(vol) == 0:
if not vol:
continue
if ':' not in vol:

View File

@@ -469,8 +469,11 @@ class DockerWorker(ContainerWorker):
labels={'kolla_managed': 'true'})
def create_container_volumes(self):
volumes = self.params.get("volumes", [])
volumes = self.params.get('volumes')
if not volumes:
return
# Filter out null / empty string volumes
volumes = [v for v in volumes if v]
for volume in volumes:
volume_name = volume.split(":")[0]
if "/" in volume_name:

View File

@@ -85,7 +85,7 @@ class PodmanWorker(ContainerWorker):
# functionality is broken
mounts = []
filtered_volumes = {}
volumes = self.params.get('volumes', [])
volumes = self.params.get('volumes')
if volumes:
self.parse_volumes(volumes, mounts, filtered_volumes)
# we can delete original volumes so it won't raise error later
@@ -149,10 +149,10 @@ class PodmanWorker(ContainerWorker):
# Therefore, we must parse them and set the permissions ourselves
# and send them to API separately.
def parse_volumes(self, volumes, mounts, filtered_volumes):
# we can ignore empty strings
volumes = [item for item in volumes if item.strip()]
for item in volumes:
if not item or not item.strip():
# we can ignore empty strings or null volumes
continue
# if it starts with / it is bind not volume
if item[0] == '/':
mode = None
@@ -642,7 +642,11 @@ class PodmanWorker(ContainerWorker):
self.result = vol.attrs
def create_container_volumes(self):
volumes = self.params.get("volumes", []) or []
volumes = self.params.get('volumes')
if not volumes:
return
# Filter out null / empty string volumes
volumes = [v for v in volumes if v]
for volume in volumes:
volume_name = volume.split(":")[0]

View File

@@ -26,7 +26,7 @@
aodh_policy_file: "{{ aodh_policy.results.0.stat.path | basename }}"
aodh_policy_file_path: "{{ aodh_policy.results.0.stat.path }}"
when:
- aodh_policy.results
- aodh_policy.results | length > 0
- name: Copying over existing policy file
template:

View File

@@ -40,7 +40,7 @@
barbican_policy_file: "{{ barbican_policy.results.0.stat.path | basename }}"
barbican_policy_file_path: "{{ barbican_policy.results.0.stat.path }}"
when:
- barbican_policy.results
- barbican_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
blazar_policy_file: "{{ blazar_policy.results.0.stat.path | basename }}"
blazar_policy_file_path: "{{ blazar_policy.results.0.stat.path }}"
when:
- blazar_policy.results
- blazar_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -155,7 +155,7 @@
ceilometer_policy_file: "{{ ceilometer_policy.results.0.stat.path | basename }}"
ceilometer_policy_file_path: "{{ ceilometer_policy.results.0.stat.path }}"
when:
- ceilometer_policy.results
- ceilometer_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -38,7 +38,7 @@
cinder_policy_file: "{{ cinder_policy.results.0.stat.path | basename }}"
cinder_policy_file_path: "{{ cinder_policy.results.0.stat.path }}"
when:
- cinder_policy.results
- cinder_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
cloudkitty_policy_file: "{{ cloudkitty_policy.results.0.stat.path | basename }}"
cloudkitty_policy_file_path: "{{ cloudkitty_policy.results.0.stat.path }}"
when:
- cloudkitty_policy.results
- cloudkitty_policy.results | length > 0
- name: Check if custom {{ cloudkitty_custom_metrics_yaml_file }} exists
stat:

View File

@@ -26,7 +26,7 @@
cyborg_policy_file: "{{ cyborg_policy.results.0.stat.path | basename }}"
cyborg_policy_file_path: "{{ cyborg_policy.results.0.stat.path }}"
when:
- cyborg_policy.results
- cyborg_policy.results | length > 0
- name: Copying over existing policy file
template:

View File

@@ -26,7 +26,7 @@
designate_policy_file: "{{ designate_policy.results.0.stat.path | basename }}"
designate_policy_file_path: "{{ designate_policy.results.0.stat.path }}"
when:
- designate_policy.results
- designate_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -30,7 +30,7 @@
glance_policy_file: "{{ glance_policy.results.0.stat.path | basename }}"
glance_policy_file_path: "{{ glance_policy.results.0.stat.path }}"
when:
- glance_policy.results
- glance_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -30,7 +30,7 @@
gnocchi_policy_file: "{{ gnocchi_policy.results.0.stat.path | basename }}"
gnocchi_policy_file_path: "{{ gnocchi_policy.results.0.stat.path }}"
when:
- gnocchi_policy.results
- gnocchi_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
heat_policy_file: "{{ heat_policy.results.0.stat.path | basename }}"
heat_policy_file_path: "{{ heat_policy.results.0.stat.path }}"
when:
- heat_policy.results
- heat_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -22,4 +22,4 @@
set_fact:
custom_policy: "{{ custom_policy + [overwritten_files.results.0.stat.path] }}"
when:
- overwritten_files.results
- overwritten_files.results | length > 0

View File

@@ -26,7 +26,7 @@
ironic_policy_file: "{{ ironic_policy.results.0.stat.path | basename }}"
ironic_policy_file_path: "{{ ironic_policy.results.0.stat.path }}"
when:
- ironic_policy.results
- ironic_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
keystone_policy_file: "{{ keystone_policy.results.0.stat.path | basename }}"
keystone_policy_file_path: "{{ keystone_policy.results.0.stat.path }}"
when:
- keystone_policy.results
- keystone_policy.results | length > 0
- name: Check if Keystone domain-specific config is supplied
stat:

View File

@@ -26,7 +26,7 @@
kuryr_policy_file: "{{ kuryr_policy.results.0.stat.path | basename }}"
kuryr_policy_file_path: "{{ kuryr_policy.results.0.stat.path }}"
when:
- kuryr_policy.results
- kuryr_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
magnum_policy_file: "{{ magnum_policy.results.0.stat.path | basename }}"
magnum_policy_file_path: "{{ magnum_policy.results.0.stat.path }}"
when:
- magnum_policy.results
- magnum_policy.results | length > 0
- name: Check if kubeconfig file is supplied
stat:

View File

@@ -31,7 +31,7 @@
manila_policy_file: "{{ manila_policy.results.0.stat.path | basename }}"
manila_policy_file_path: "{{ manila_policy.results.0.stat.path }}"
when:
- manila_policy.results
- manila_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
masakari_policy_file: "{{ masakari_policy.results.0.stat.path | basename }}"
masakari_policy_file_path: "{{ masakari_policy.results.0.stat.path }}"
when:
- masakari_policy.results
- masakari_policy.results | length > 0
- name: Copying over existing policy file
template:

View File

@@ -26,7 +26,7 @@
mistral_policy_file: "{{ mistral_policy.results.0.stat.path | basename }}"
mistral_policy_file_path: "{{ mistral_policy.results.0.stat.path }}"
when:
- mistral_policy.results
- mistral_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -529,9 +529,9 @@ neutron_dhcp_agent_default_volumes:
- "kolla_logs:/var/log/kolla/"
- "{{ '/dev/shm:/dev/shm' }}"
- "{{ kolla_dev_repos_directory ~ '/neutron:/dev-mode/neutron' if neutron_dev_mode | bool else '' }}"
- "{{ '/var/run/docker.sock:/var/run/docker.sock:ro' if neutron_agents_wrappers | bool and kolla_container_engine == 'docker' }}"
- "{{ '/run/podman/podman.sock:/run/podman/podman.sock' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' }}"
- "{{ '/var/lib/containers:/var/lib/containers' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' }}"
- "{{ '/var/run/docker.sock:/var/run/docker.sock:ro' if neutron_agents_wrappers | bool and kolla_container_engine == 'docker' else '' }}"
- "{{ '/run/podman/podman.sock:/run/podman/podman.sock' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' else '' }}"
- "{{ '/var/lib/containers:/var/lib/containers' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' else '' }}"
neutron_l3_agent_default_volumes:
- "{{ node_config_directory }}/neutron-l3-agent/:{{ container_config_directory }}/:ro"
- "/etc/localtime:/etc/localtime:ro"
@@ -542,9 +542,9 @@ neutron_l3_agent_default_volumes:
- "kolla_logs:/var/log/kolla/"
- "{{ '/dev/shm:/dev/shm' }}"
- "{{ kolla_dev_repos_directory ~ '/neutron:/dev-mode/neutron' if neutron_dev_mode | bool else '' }}"
- "{{ '/var/run/docker.sock:/var/run/docker.sock:ro' if neutron_agents_wrappers | bool and kolla_container_engine == 'docker' }}"
- "{{ '/run/podman/podman.sock:/run/podman/podman.sock' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' }}"
- "{{ '/var/lib/containers:/var/lib/containers' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' }}"
- "{{ '/var/run/docker.sock:/var/run/docker.sock:ro' if neutron_agents_wrappers | bool and kolla_container_engine == 'docker' else '' }}"
- "{{ '/run/podman/podman.sock:/run/podman/podman.sock' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' else '' }}"
- "{{ '/var/lib/containers:/var/lib/containers' if neutron_agents_wrappers | bool and kolla_container_engine == 'podman' else '' }}"
neutron_sriov_agent_default_volumes:
- "{{ node_config_directory }}/neutron-sriov-agent/:{{ container_config_directory }}/:ro"
- "/etc/localtime:/etc/localtime:ro"

View File

@@ -38,7 +38,7 @@
neutron_policy_file: "{{ neutron_policy.results.0.stat.path | basename }}"
neutron_policy_file_path: "{{ neutron_policy.results.0.stat.path }}"
when:
- neutron_policy.results
- neutron_policy.results | length > 0
- name: Copying over existing policy file
template:

View File

@@ -35,7 +35,7 @@
nova_policy_file: "{{ nova_policy.results.0.stat.path | basename }}"
nova_policy_file_path: "{{ nova_policy.results.0.stat.path }}"
when:
- nova_policy.results
- nova_policy.results | length > 0
- name: Check for vendordata file
stat:

View File

@@ -26,7 +26,7 @@
- '"already exists" not in nova_cell_create.stdout'
when:
- inventory_hostname == groups[nova_conductor.group][0] | default(None)
- nova_cell_settings | length == 0
- not nova_cell_settings | bool
- name: Update cell
vars:
@@ -51,5 +51,5 @@
- nova_cell_updated.rc != 0
when:
- inventory_hostname == groups[nova_conductor.group][0] | default(None)
- nova_cell_settings | length > 0
- nova_cell_settings | bool
- nova_cell_settings.cell_message_queue != nova_cell_rpc_transport_url or nova_cell_settings.cell_database != nova_cell_database_url

View File

@@ -26,7 +26,7 @@
nova_policy_file: "{{ nova_policy.results.0.stat.path | basename }}"
nova_policy_file_path: "{{ nova_policy.results.0.stat.path }}"
when:
- nova_policy.results
- nova_policy.results | length > 0
- name: Check for vendordata file
stat:

View File

@@ -59,7 +59,7 @@
failed_when:
- nova_cell0_updated.rc != 0
when:
- nova_cell_settings | length > 0
- nova_cell_settings | bool
- nova_cell_settings.cell_database != nova_cell0_connection
run_once: True
delegate_to: "{{ groups[nova_api.group][0] }}"

View File

@@ -29,7 +29,7 @@
octavia_policy_file: "{{ octavia_policy.results.0.stat.path | basename }}"
octavia_policy_file_path: "{{ octavia_policy.results.0.stat.path }}"
when:
- octavia_policy.results
- octavia_policy.results | length > 0
- name: Copying over existing policy file
template:

View File

@@ -96,4 +96,4 @@
delay: 6
when:
- enable_ovn_sb_db_relay | bool
loop: "{{ range(1, (ovn_sb_db_relay_count | int) +1) }}"
loop: "{{ range(1, (ovn_sb_db_relay_count | int) +1) | list }}"

View File

@@ -20,7 +20,7 @@
changed_when: false
register: ovn_nb_db_cluster_status
when: groups['ovn-nb-db_leader'] is defined and inventory_hostname in groups.get('ovn-nb-db_had_volume_False', '')
delegate_to: "{{ groups['ovn-nb-db_leader'][0] }}"
delegate_to: "{{ groups['ovn-nb-db_leader'][0] if groups['ovn-nb-db_leader'] is defined else omit }}"
- name: Check SB cluster status
command: >
@@ -30,7 +30,7 @@
changed_when: false
register: ovn_sb_db_cluster_status
when: groups['ovn-sb-db_leader'] is defined and inventory_hostname in groups.get('ovn-sb-db_had_volume_False', '')
delegate_to: "{{ groups['ovn-sb-db_leader'][0] }}"
delegate_to: "{{ groups['ovn-sb-db_leader'][0] if groups['ovn-sb-db_leader'] is defined else omit }}"
- name: Remove an old node with the same ip address as the new node in NB DB
vars:
@@ -42,7 +42,7 @@
when:
- ovn_nb_db_cluster_status.stdout is defined
- (ovn_nb_db_cluster_status.stdout is search('at tcp:' + api_interface_address)) and inventory_hostname in groups.get('ovn-nb-db_had_volume_False', '')
delegate_to: "{{ groups['ovn-nb-db_leader'][0] }}"
delegate_to: "{{ groups['ovn-nb-db_leader'][0] if groups['ovn-nb-db_leader'] is defined else omit }}"
- name: Remove an old node with the same ip address as the new node in SB DB
vars:
@@ -54,7 +54,7 @@
when:
- ovn_sb_db_cluster_status.stdout is defined
- (ovn_sb_db_cluster_status.stdout is search('at tcp:' + api_interface_address)) and inventory_hostname in groups.get('ovn-sb-db_had_volume_False', '')
delegate_to: "{{ groups['ovn-sb-db_leader'][0] }}"
delegate_to: "{{ groups['ovn-sb-db_leader'][0] if groups['ovn-sb-db_leader'] is defined else omit }}"
- name: Set bootstrap args fact for NB (new member)
set_fact:

View File

@@ -26,7 +26,7 @@
placement_policy_file: "{{ placement_policy.results.0.stat.path | basename }}"
placement_policy_file_path: "{{ placement_policy.results.0.stat.path }}"
when:
- placement_policy.results
- placement_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -1,8 +1,8 @@
---
docker_version_min: '18.09'
docker_py_version_min: '3.4.1'
ansible_version_min: '2.17'
ansible_version_max: '2.18'
ansible_version_min: '2.18'
ansible_version_max: '2.19'
# Top level keys should match ansible_facts.distribution.
# These map to lists of supported releases (ansible_facts.distribution_release) or

View File

@@ -26,7 +26,7 @@
tacker_policy_file: "{{ tacker_policy.results.0.stat.path | basename }}"
tacker_policy_file_path: "{{ tacker_policy.results.0.stat.path }}"
when:
- tacker_policy.results
- tacker_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
trove_policy_file: "{{ trove_policy.results.0.stat.path | basename }}"
trove_policy_file_path: "{{ trove_policy.results.0.stat.path }}"
when:
- trove_policy.results
- trove_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
venus_policy_file: "{{ venus_policy.results.0.stat.path | basename }}"
venus_policy_file_path: "{{ venus_policy.results.0.stat.path }}"
when:
- venus_policy.results
- venus_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -26,7 +26,7 @@
watcher_policy_file: "{{ watcher_policy.results.0.stat.path | basename }}"
watcher_policy_file_path: "{{ watcher_policy.results.0.stat.path }}"
when:
- watcher_policy.results
- watcher_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -31,7 +31,7 @@
zun_policy_file: "{{ zun_policy.results.0.stat.path | basename }}"
zun_policy_file_path: "{{ zun_policy.results.0.stat.path }}"
when:
- zun_policy.results
- zun_policy.results | length > 0
- include_tasks: copy-certs.yml
when:

View File

@@ -142,10 +142,10 @@ else:
TESTED_RUNTIMES_GOVERNANCE_URL =\
'https://governance.openstack.org/tc/reference/runtimes/{}.html'.format(KOLLA_OPENSTACK_RELEASE)
ANSIBLE_CORE_VERSION_MIN = '2.17'
ANSIBLE_CORE_VERSION_MAX = '2.18'
ANSIBLE_VERSION_MIN = '10'
ANSIBLE_VERSION_MAX = '11'
ANSIBLE_CORE_VERSION_MIN = '2.18'
ANSIBLE_CORE_VERSION_MAX = '2.19'
ANSIBLE_VERSION_MIN = '11'
ANSIBLE_VERSION_MAX = '12'
GLOBAL_VARIABLE_MAP = {
'|ANSIBLE_CORE_VERSION_MIN|': ANSIBLE_CORE_VERSION_MIN,

View File

@@ -0,0 +1,5 @@
---
upgrade:
- |
Minimum supported Ansible version is now ``11`` (ansible-core 2.18)
and maximum supported is ``12`` (ansible-core 2.19).

View File

@@ -2,19 +2,19 @@
collections:
- name: ansible.netcommon
source: https://galaxy.ansible.com
version: <8
version: <9
- name: ansible.posix
source: https://galaxy.ansible.com
version: <2
version: <3
- name: ansible.utils
source: https://galaxy.ansible.com
version: <6
version: <7
- name: community.crypto
source: https://galaxy.ansible.com
version: <3
version: <4
- name: community.general
source: https://galaxy.ansible.com
version: <11
version: <12
- name: community.docker
source: https://galaxy.ansible.com
version: <5

View File

@@ -11,7 +11,7 @@ hvac>=0.10.1 # Apache-2.0
Jinja2>=3 # BSD License (3 clause)
# Ansible and ansible's json_query
ansible-core>=2.17,<2.19 # GPLv3
ansible-core>=2.18,!=2.19.0,<2.20; python_version >= '3.11' # GPLv3
jmespath>=0.9.3 # MIT
# ini parsing

View File

@@ -1,3 +1,31 @@
# Password hashing
bcrypt>=3.0.0 # Apache-2.0
# password generation
cryptography>=2.1 # BSD/Apache-2.0
# Hashicorp Vault
hvac>=0.10.1 # Apache-2.0
# templating
Jinja2>=3 # BSD License (3 clause)
# Ansible and ansible's json_query
ansible-core>=2.18,!=2.19.0,<2.20; python_version >= '3.11' # GPLv3
jmespath>=0.9.3 # MIT
# ini parsing
oslo.config>=5.2.0 # Apache-2.0
# password generation
oslo.utils>=3.33.0 # Apache-2.0
# Password hashing
passlib[bcrypt]>=1.0.0 # BSD
# CLI
cliff>=4.7.0 # Apache-2.0
# coverage testing
coverage!=4.4,>=4.0 # Apache-2.0

View File

@@ -13,6 +13,7 @@
# limitations under the License.
import builtins
import contextlib
import json
import os
import sys
@@ -20,6 +21,18 @@ import sys
from ansible.module_utils import basic
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_bytes
try:
from ansible.module_utils.testing import patch_module_args
except ImportError:
# TODO(dougszu): Remove this exception handler when Python 3.10 support
# is not required. Python 3.10 isn't supported by Ansible Core 2.18 which
# provides patch_module_args
@contextlib.contextmanager
def patch_module_args(args):
serialized_args = to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': args}))
with mock.patch.object(basic, '_ANSIBLE_ARGS', serialized_args):
yield
from importlib.machinery import SourceFileLoader
from oslotest import base
from unittest import mock
@@ -33,13 +46,6 @@ kolla_toolbox = SourceFileLoader('kolla_toolbox',
kolla_toolbox_file).load_module()
def set_module_args(args):
"""Prepare arguments so they will be picked up during module creation."""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
class AnsibleExitJson(BaseException):
"""Exception to be raised by module.exit_json and caught by a test case."""
@@ -307,40 +313,40 @@ class TestModuleInteraction(TestKollaToolboxModule):
"""Class focused on testing user input data from playbook."""
def test_create_ansible_module_missing_required_module_name(self):
set_module_args({
ansible_module_args = {
'container_engine': 'docker'
})
error = self.assertRaises(AnsibleFailJson,
kolla_toolbox.create_ansible_module)
}
with patch_module_args(ansible_module_args):
error = self.assertRaises(AnsibleFailJson,
kolla_toolbox.create_ansible_module)
self.assertIn('missing required arguments: module_name',
error.result['msg'])
def test_create_ansible_module_missing_required_container_engine(self):
set_module_args({
ansible_module_args = {
'module_name': 'url'
})
error = self.assertRaises(AnsibleFailJson,
kolla_toolbox.create_ansible_module)
}
with patch_module_args(ansible_module_args):
error = self.assertRaises(AnsibleFailJson,
kolla_toolbox.create_ansible_module)
self.assertIn('missing required arguments: container_engine',
error.result['msg'])
def test_create_ansible_module_invalid_container_engine(self):
set_module_args({
ansible_module_args = {
'module_name': 'url',
'container_engine': 'podmano'
})
error = self.assertRaises(AnsibleFailJson,
kolla_toolbox.create_ansible_module)
}
with patch_module_args(ansible_module_args):
error = self.assertRaises(AnsibleFailJson,
kolla_toolbox.create_ansible_module)
self.assertIn(
'value of container_engine must be one of: podman, docker',
error.result['msg']
)
def test_create_ansible_module_success(self):
args = {
ansible_module_args = {
'container_engine': 'docker',
'module_name': 'file',
'module_args': {
@@ -357,12 +363,10 @@ class TestModuleInteraction(TestKollaToolboxModule):
'timeout': 180,
'api_version': '1.5'
}
set_module_args(args)
module = kolla_toolbox.create_ansible_module()
with patch_module_args(ansible_module_args):
module = kolla_toolbox.create_ansible_module()
self.assertIsInstance(module, AnsibleModule)
self.assertEqual(args, module.params)
self.assertEqual(ansible_module_args, module.params)
class TestContainerEngineClientIntraction(TestKollaToolboxModule):
@@ -381,14 +385,14 @@ class TestContainerEngineClientIntraction(TestKollaToolboxModule):
return self.original_import(name, globals, locals, fromlist, level)
def test_podman_client_params(self):
set_module_args({
ansible_module_args = {
'module_name': 'ping',
'container_engine': 'podman',
'api_version': '1.47',
'timeout': 155
})
module = kolla_toolbox.create_ansible_module()
}
with patch_module_args(ansible_module_args):
module = kolla_toolbox.create_ansible_module()
mock_podman = mock.MagicMock()
mock_podman_errors = mock.MagicMock()
import_dict = {'podman': mock_podman,
@@ -403,14 +407,14 @@ class TestContainerEngineClientIntraction(TestKollaToolboxModule):
)
def test_docker_client_params(self):
set_module_args({
ansible_module_args = {
'module_name': 'ping',
'container_engine': 'docker',
'api_version': '1.47',
'timeout': 155
})
module = kolla_toolbox.create_ansible_module()
}
with patch_module_args(ansible_module_args):
module = kolla_toolbox.create_ansible_module()
mock_docker = mock.MagicMock()
mock_docker_errors = mock.MagicMock()
import_dict = {'docker': mock_docker,
@@ -425,14 +429,14 @@ class TestContainerEngineClientIntraction(TestKollaToolboxModule):
)
def test_create_container_client_podman_not_called_with_auto(self):
set_module_args({
ansible_module_args = {
'module_name': 'ping',
'container_engine': 'podman',
'api_version': 'auto',
'timeout': 90
})
module = kolla_toolbox.create_ansible_module()
}
with patch_module_args(ansible_module_args):
module = kolla_toolbox.create_ansible_module()
mock_podman = mock.MagicMock()
mock_podman_errors = mock.MagicMock()
import_dict = {'podman': mock_podman,
@@ -446,12 +450,13 @@ class TestContainerEngineClientIntraction(TestKollaToolboxModule):
)
def test_create_container_client_podman_importerror(self):
set_module_args({
ansible_module_args = {
'module_name': 'ping',
'container_engine': 'podman'
})
}
self.module_to_mock_import = 'podman'
module = kolla_toolbox.create_ansible_module()
with patch_module_args(ansible_module_args):
module = kolla_toolbox.create_ansible_module()
with mock.patch('builtins.__import__',
side_effect=self.mock_import_error):
@@ -462,13 +467,13 @@ class TestContainerEngineClientIntraction(TestKollaToolboxModule):
error.result['msg'])
def test_create_container_client_docker_importerror(self):
set_module_args({
ansible_module_args = {
'module_name': 'ping',
'container_engine': 'docker'
})
}
self.module_to_mock_import = 'docker'
module = kolla_toolbox.create_ansible_module()
with patch_module_args(ansible_module_args):
module = kolla_toolbox.create_ansible_module()
with mock.patch('builtins.__import__',
side_effect=self.mock_import_error):

View File

@@ -46,11 +46,11 @@
# Test latest ansible-core version on Ubuntu, minimum supported on others.
# Use SLURP version (two releases back) on SLURP upgrades.
ansible_core_version_constraint: >-
{{ ansible_core_version_slurp if is_slurp else ansible_core_version_min if
(is_upgrade or ansible_facts.distribution != "Ubuntu") else ansible_core_version_max }}
ansible_core_version_slurp: "==2.16.*"
ansible_core_version_max: "==2.18.*"
ansible_core_version_min: "==2.17.*"
{{ ansible_core_version_slurp if is_slurp else ansible_core_version_min if is_upgrade
or ansible_facts.distribution != "Ubuntu" else ansible_core_version_max }}
ansible_core_version_slurp: "==2.17.*"
ansible_core_version_max: "==2.19.*"
ansible_core_version_min: "==2.18.*"
# NOTE(mgoddard): Test the use of interface names with dashes.
api_interface_name: "vxlan-0"
api_network_prefix: "192.0.2."

View File

@@ -31,7 +31,7 @@
- kolla-ansible-scenario-telemetry
- kolla-ansible-scenario-venus
- openstack-cover-jobs
- openstack-python3-jobs
- openstack-python3-jobs-kolla-ansible
- periodic-stable-jobs
- publish-openstack-docs-pti
- release-notes-jobs-python3

23
zuul.d/python3-jobs.yaml Normal file
View File

@@ -0,0 +1,23 @@
- project-template:
name: openstack-python3-jobs-kolla-ansible
# NOTE(mnasiadka): Local definition to skip py310 jobs on Flamingo
description: |
Runs unit tests for an OpenStack Python project under the CPython
version 3 releases designated for testing the latest release.
check:
jobs:
- openstack-tox-pep8
# py3.12 testing is added as mandatory from 2025.1 release onwards.
# From 2026.1, we run it as periodic only(assuming py3.10 and py3.13
# will be enough coverage to run on every change)
- openstack-tox-py312
gate:
jobs:
- openstack-tox-pep8
# py3.12 testing is added as mandatory from 2025.1 release onwards.
# From 2026.1, we run it as periodic only(assuming py3.10 and py3.13
# will be enough coverage to run on every change)
- openstack-tox-py312
post:
jobs:
- publish-openstack-python-branch-tarball