Add 2023.2 Bobcat support
* sync charm-helpers to classic charms * change openstack-origin/source default to bobcat * add mantic to metadata series * align testing with bobcat * add new bobcat bundles * add bobcat bundles to tests.yaml * add bobcat tests to osci.yaml and .zuul.yaml * update build-on and run-on bases Func-Test-Pr: https://github.com/openstack-charmers/zaza-openstack-tests/pull/1062 Change-Id: Ie2fdfae9a2304fd6cb55edd4c1b93f1fdbe4014d
This commit is contained in:
parent
0628e02983
commit
b6b29ce894
@ -1,4 +1,5 @@
|
|||||||
- project:
|
- project:
|
||||||
templates:
|
templates:
|
||||||
- openstack-python3-charm-yoga-jobs
|
- openstack-python3-charm-yoga-jobs
|
||||||
|
- openstack-python3-charm-jobs
|
||||||
- openstack-cover-jobs
|
- openstack-cover-jobs
|
||||||
|
@ -34,5 +34,5 @@ bases:
|
|||||||
channel: "22.04"
|
channel: "22.04"
|
||||||
architectures: [amd64, s390x, ppc64el, arm64]
|
architectures: [amd64, s390x, ppc64el, arm64]
|
||||||
- name: ubuntu
|
- name: ubuntu
|
||||||
channel: "22.10"
|
channel: "23.10"
|
||||||
architectures: [amd64, s390x, ppc64el, arm64]
|
architectures: [amd64, s390x, ppc64el, arm64]
|
||||||
|
@ -221,6 +221,13 @@ def https():
|
|||||||
return True
|
return True
|
||||||
if config_get('ssl_cert') and config_get('ssl_key'):
|
if config_get('ssl_cert') and config_get('ssl_key'):
|
||||||
return True
|
return True
|
||||||
|
# Local import to avoid ciruclar dependency.
|
||||||
|
import charmhelpers.contrib.openstack.cert_utils as cert_utils
|
||||||
|
if (
|
||||||
|
cert_utils.get_certificate_request() and not
|
||||||
|
cert_utils.get_requests_for_local_unit("certificates")
|
||||||
|
):
|
||||||
|
return False
|
||||||
for r_id in relation_ids('certificates'):
|
for r_id in relation_ids('certificates'):
|
||||||
for unit in relation_list(r_id):
|
for unit in relation_list(r_id):
|
||||||
ca = relation_get('ca', rid=r_id, unit=unit)
|
ca = relation_get('ca', rid=r_id, unit=unit)
|
||||||
|
@ -127,7 +127,9 @@ def deferred_events():
|
|||||||
"""
|
"""
|
||||||
events = []
|
events = []
|
||||||
for defer_file in deferred_events_files():
|
for defer_file in deferred_events_files():
|
||||||
events.append((defer_file, read_event_file(defer_file)))
|
event = read_event_file(defer_file)
|
||||||
|
if event.policy_requestor_name == hookenv.service_name():
|
||||||
|
events.append((defer_file, event))
|
||||||
return events
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
128
charmhelpers/contrib/openstack/files/check_deferred_restarts.py
Executable file
128
charmhelpers/contrib/openstack/files/check_deferred_restarts.py
Executable file
@ -0,0 +1,128 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
# Copyright 2014-2022 Canonical Limited.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Checks for services with deferred restarts.
|
||||||
|
|
||||||
|
This Nagios check will parse /var/lib/policy-rd.d/
|
||||||
|
to find any restarts that are currently deferred.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import glob
|
||||||
|
import sys
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
DEFERRED_EVENTS_DIR = '/var/lib/policy-rc.d'
|
||||||
|
|
||||||
|
|
||||||
|
def get_deferred_events():
|
||||||
|
"""Return a list of deferred events dicts from policy-rc.d files.
|
||||||
|
|
||||||
|
Events are read from DEFERRED_EVENTS_DIR and are of the form:
|
||||||
|
{
|
||||||
|
action: restart,
|
||||||
|
policy_requestor_name: rabbitmq-server,
|
||||||
|
policy_requestor_type: charm,
|
||||||
|
reason: 'Pkg update',
|
||||||
|
service: rabbitmq-server,
|
||||||
|
time: 1614328743
|
||||||
|
}
|
||||||
|
|
||||||
|
:raises OSError: Raised in case of a system error while reading a policy file
|
||||||
|
:raises yaml.YAMLError: Raised if parsing a policy file fails
|
||||||
|
|
||||||
|
:returns: List of deferred event dictionaries
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
deferred_events_files = glob.glob(
|
||||||
|
'{}/*.deferred'.format(DEFERRED_EVENTS_DIR))
|
||||||
|
|
||||||
|
deferred_events = []
|
||||||
|
for event_file in deferred_events_files:
|
||||||
|
with open(event_file, 'r') as f:
|
||||||
|
event = yaml.safe_load(f)
|
||||||
|
deferred_events.append(event)
|
||||||
|
|
||||||
|
return deferred_events
|
||||||
|
|
||||||
|
|
||||||
|
def get_deferred_restart_services(application=None):
|
||||||
|
"""Returns a list of services with deferred restarts.
|
||||||
|
|
||||||
|
:param str application: Name of the application that blocked the service restart.
|
||||||
|
If application is None, all services with deferred restarts
|
||||||
|
are returned. Services which are blocked by a non-charm
|
||||||
|
requestor are always returned.
|
||||||
|
|
||||||
|
:raises OSError: Raised in case of a system error while reading a policy file
|
||||||
|
:raises yaml.YAMLError: Raised if parsing a policy file fails
|
||||||
|
|
||||||
|
:returns: List of services with deferred restarts belonging to application.
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
|
||||||
|
deferred_restart_events = filter(
|
||||||
|
lambda e: e['action'] == 'restart', get_deferred_events())
|
||||||
|
|
||||||
|
deferred_restart_services = set()
|
||||||
|
for restart_event in deferred_restart_events:
|
||||||
|
if application:
|
||||||
|
if (
|
||||||
|
restart_event['policy_requestor_type'] != 'charm' or
|
||||||
|
restart_event['policy_requestor_type'] == 'charm' and
|
||||||
|
restart_event['policy_requestor_name'] == application
|
||||||
|
):
|
||||||
|
deferred_restart_services.add(restart_event['service'])
|
||||||
|
else:
|
||||||
|
deferred_restart_services.add(restart_event['service'])
|
||||||
|
|
||||||
|
return list(deferred_restart_services)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Check for services with deferred restarts."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Check for services with deferred restarts')
|
||||||
|
parser.add_argument(
|
||||||
|
'--application', help='Check services belonging to this application only')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
services = set(get_deferred_restart_services(args.application))
|
||||||
|
|
||||||
|
if len(services) == 0:
|
||||||
|
print('OK: No deferred service restarts.')
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
'CRITICAL: Restarts are deferred for services: {}.'.format(', '.join(services)))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
main()
|
||||||
|
except OSError as e:
|
||||||
|
print('CRITICAL: A system error occurred: {} ({})'.format(e.errno, e.strerror))
|
||||||
|
sys.exit(1)
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
print('CRITICAL: Failed to parse a policy file: {}'.format(str(e)))
|
||||||
|
sys.exit(1)
|
||||||
|
except Exception as e:
|
||||||
|
print('CRITICAL: An unknown error occurred: {}'.format(str(e)))
|
||||||
|
sys.exit(1)
|
@ -160,6 +160,7 @@ OPENSTACK_CODENAMES = OrderedDict([
|
|||||||
('2022.1', 'yoga'),
|
('2022.1', 'yoga'),
|
||||||
('2022.2', 'zed'),
|
('2022.2', 'zed'),
|
||||||
('2023.1', 'antelope'),
|
('2023.1', 'antelope'),
|
||||||
|
('2023.2', 'bobcat'),
|
||||||
])
|
])
|
||||||
|
|
||||||
# The ugly duckling - must list releases oldest to newest
|
# The ugly duckling - must list releases oldest to newest
|
||||||
@ -957,7 +958,7 @@ def os_requires_version(ostack_release, pkg):
|
|||||||
def wrap(f):
|
def wrap(f):
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def wrapped_f(*args):
|
def wrapped_f(*args):
|
||||||
if os_release(pkg) < ostack_release:
|
if CompareOpenStackReleases(os_release(pkg)) < ostack_release:
|
||||||
raise Exception("This hook is not supported on releases"
|
raise Exception("This hook is not supported on releases"
|
||||||
" before %s" % ostack_release)
|
" before %s" % ostack_release)
|
||||||
f(*args)
|
f(*args)
|
||||||
|
@ -28,7 +28,6 @@ import os
|
|||||||
import shutil
|
import shutil
|
||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import uuid
|
|
||||||
|
|
||||||
from subprocess import (
|
from subprocess import (
|
||||||
check_call,
|
check_call,
|
||||||
@ -1677,6 +1676,10 @@ class CephBrokerRq(object):
|
|||||||
The API is versioned and defaults to version 1.
|
The API is versioned and defaults to version 1.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
# The below hash is the result of running
|
||||||
|
# `hashlib.sha1('[]'.encode()).hexdigest()`
|
||||||
|
EMPTY_LIST_SHA = '97d170e1550eee4afc0af065b78cda302a97674c'
|
||||||
|
|
||||||
def __init__(self, api_version=1, request_id=None, raw_request_data=None):
|
def __init__(self, api_version=1, request_id=None, raw_request_data=None):
|
||||||
"""Initialize CephBrokerRq object.
|
"""Initialize CephBrokerRq object.
|
||||||
|
|
||||||
@ -1685,8 +1688,12 @@ class CephBrokerRq(object):
|
|||||||
|
|
||||||
:param api_version: API version for request (default: 1).
|
:param api_version: API version for request (default: 1).
|
||||||
:type api_version: Optional[int]
|
:type api_version: Optional[int]
|
||||||
:param request_id: Unique identifier for request.
|
:param request_id: Unique identifier for request. The identifier will
|
||||||
(default: string representation of generated UUID)
|
be updated as ops are added or removed from the
|
||||||
|
broker request. This ensures that Ceph will
|
||||||
|
correctly process requests where operations are
|
||||||
|
added after the initial request is processed.
|
||||||
|
(default: sha1 of operations)
|
||||||
:type request_id: Optional[str]
|
:type request_id: Optional[str]
|
||||||
:param raw_request_data: JSON-encoded string to build request from.
|
:param raw_request_data: JSON-encoded string to build request from.
|
||||||
:type raw_request_data: Optional[str]
|
:type raw_request_data: Optional[str]
|
||||||
@ -1695,16 +1702,20 @@ class CephBrokerRq(object):
|
|||||||
if raw_request_data:
|
if raw_request_data:
|
||||||
request_data = json.loads(raw_request_data)
|
request_data = json.loads(raw_request_data)
|
||||||
self.api_version = request_data['api-version']
|
self.api_version = request_data['api-version']
|
||||||
self.request_id = request_data['request-id']
|
|
||||||
self.set_ops(request_data['ops'])
|
self.set_ops(request_data['ops'])
|
||||||
|
self.request_id = request_data['request-id']
|
||||||
else:
|
else:
|
||||||
self.api_version = api_version
|
self.api_version = api_version
|
||||||
if request_id:
|
if request_id:
|
||||||
self.request_id = request_id
|
self.request_id = request_id
|
||||||
else:
|
else:
|
||||||
self.request_id = str(uuid.uuid1())
|
self.request_id = CephBrokerRq.EMPTY_LIST_SHA
|
||||||
self.ops = []
|
self.ops = []
|
||||||
|
|
||||||
|
def _hash_ops(self):
|
||||||
|
"""Return the sha1 of the requested Broker ops."""
|
||||||
|
return hashlib.sha1(json.dumps(self.ops, sort_keys=True).encode()).hexdigest()
|
||||||
|
|
||||||
def add_op(self, op):
|
def add_op(self, op):
|
||||||
"""Add an op if it is not already in the list.
|
"""Add an op if it is not already in the list.
|
||||||
|
|
||||||
@ -1713,6 +1724,7 @@ class CephBrokerRq(object):
|
|||||||
"""
|
"""
|
||||||
if op not in self.ops:
|
if op not in self.ops:
|
||||||
self.ops.append(op)
|
self.ops.append(op)
|
||||||
|
self.request_id = self._hash_ops()
|
||||||
|
|
||||||
def add_op_request_access_to_group(self, name, namespace=None,
|
def add_op_request_access_to_group(self, name, namespace=None,
|
||||||
permission=None, key_name=None,
|
permission=None, key_name=None,
|
||||||
@ -1991,6 +2003,7 @@ class CephBrokerRq(object):
|
|||||||
to allow comparisons to ensure validity.
|
to allow comparisons to ensure validity.
|
||||||
"""
|
"""
|
||||||
self.ops = ops
|
self.ops = ops
|
||||||
|
self.request_id = self._hash_ops()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def request(self):
|
def request(self):
|
||||||
|
@ -32,6 +32,7 @@ UBUNTU_RELEASES = (
|
|||||||
'jammy',
|
'jammy',
|
||||||
'kinetic',
|
'kinetic',
|
||||||
'lunar',
|
'lunar',
|
||||||
|
'mantic',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -238,6 +238,14 @@ CLOUD_ARCHIVE_POCKETS = {
|
|||||||
'antelope/proposed': 'jammy-proposed/antelope',
|
'antelope/proposed': 'jammy-proposed/antelope',
|
||||||
'jammy-antelope/proposed': 'jammy-proposed/antelope',
|
'jammy-antelope/proposed': 'jammy-proposed/antelope',
|
||||||
'jammy-proposed/antelope': 'jammy-proposed/antelope',
|
'jammy-proposed/antelope': 'jammy-proposed/antelope',
|
||||||
|
# bobcat
|
||||||
|
'bobcat': 'jammy-updates/bobcat',
|
||||||
|
'jammy-bobcat': 'jammy-updates/bobcat',
|
||||||
|
'jammy-bobcat/updates': 'jammy-updates/bobcat',
|
||||||
|
'jammy-updates/bobcat': 'jammy-updates/bobcat',
|
||||||
|
'bobcat/proposed': 'jammy-proposed/bobcat',
|
||||||
|
'jammy-bobcat/proposed': 'jammy-proposed/bobcat',
|
||||||
|
'jammy-proposed/bobcat': 'jammy-proposed/bobcat',
|
||||||
|
|
||||||
# OVN
|
# OVN
|
||||||
'focal-ovn-22.03': 'focal-updates/ovn-22.03',
|
'focal-ovn-22.03': 'focal-updates/ovn-22.03',
|
||||||
@ -270,6 +278,7 @@ OPENSTACK_RELEASES = (
|
|||||||
'yoga',
|
'yoga',
|
||||||
'zed',
|
'zed',
|
||||||
'antelope',
|
'antelope',
|
||||||
|
'bobcat',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -298,6 +307,7 @@ UBUNTU_OPENSTACK_RELEASE = OrderedDict([
|
|||||||
('jammy', 'yoga'),
|
('jammy', 'yoga'),
|
||||||
('kinetic', 'zed'),
|
('kinetic', 'zed'),
|
||||||
('lunar', 'antelope'),
|
('lunar', 'antelope'),
|
||||||
|
('mantic', 'bobcat'),
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
@ -591,7 +601,7 @@ def _get_key_by_keyid(keyid):
|
|||||||
curl_cmd = ['curl', keyserver_url.format(keyid)]
|
curl_cmd = ['curl', keyserver_url.format(keyid)]
|
||||||
# use proxy server settings in order to retrieve the key
|
# use proxy server settings in order to retrieve the key
|
||||||
return subprocess.check_output(curl_cmd,
|
return subprocess.check_output(curl_cmd,
|
||||||
env=env_proxy_settings(['https']))
|
env=env_proxy_settings(['https', 'no_proxy']))
|
||||||
|
|
||||||
|
|
||||||
def _dearmor_gpg_key(key_asc):
|
def _dearmor_gpg_key(key_asc):
|
||||||
|
@ -122,13 +122,12 @@ class Cache(object):
|
|||||||
:raises: subprocess.CalledProcessError
|
:raises: subprocess.CalledProcessError
|
||||||
"""
|
"""
|
||||||
pkgs = {}
|
pkgs = {}
|
||||||
cmd = ['dpkg-query', '--list']
|
cmd = [
|
||||||
|
'dpkg-query', '--show',
|
||||||
|
'--showformat',
|
||||||
|
r'${db:Status-Abbrev}\t${Package}\t${Version}\t${Architecture}\t${binary:Summary}\n'
|
||||||
|
]
|
||||||
cmd.extend(packages)
|
cmd.extend(packages)
|
||||||
if locale.getlocale() == (None, None):
|
|
||||||
# subprocess calls out to locale.getpreferredencoding(False) to
|
|
||||||
# determine encoding. Workaround for Trusty where the
|
|
||||||
# environment appears to not be set up correctly.
|
|
||||||
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
|
|
||||||
try:
|
try:
|
||||||
output = subprocess.check_output(cmd,
|
output = subprocess.check_output(cmd,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
@ -140,24 +139,17 @@ class Cache(object):
|
|||||||
if cp.returncode != 1:
|
if cp.returncode != 1:
|
||||||
raise
|
raise
|
||||||
output = cp.output
|
output = cp.output
|
||||||
headings = []
|
|
||||||
for line in output.splitlines():
|
for line in output.splitlines():
|
||||||
if line.startswith('||/'):
|
# only process lines for successfully installed packages
|
||||||
headings = line.split()
|
if not (line.startswith('ii ') or line.startswith('hi ')):
|
||||||
headings.pop(0)
|
|
||||||
continue
|
continue
|
||||||
elif (line.startswith('|') or line.startswith('+') or
|
status, name, version, arch, desc = line.split('\t', 4)
|
||||||
line.startswith('dpkg-query:')):
|
pkgs[name] = {
|
||||||
continue
|
'name': name,
|
||||||
else:
|
'version': version,
|
||||||
data = line.split(None, 4)
|
'architecture': arch,
|
||||||
status = data.pop(0)
|
'description': desc,
|
||||||
if status not in ('ii', 'hi'):
|
}
|
||||||
continue
|
|
||||||
pkg = {}
|
|
||||||
pkg.update({k.lower(): v for k, v in zip(headings, data)})
|
|
||||||
if 'name' in pkg:
|
|
||||||
pkgs.update({pkg['name']: pkg})
|
|
||||||
return pkgs
|
return pkgs
|
||||||
|
|
||||||
def _apt_cache_show(self, packages):
|
def _apt_cache_show(self, packages):
|
||||||
|
@ -9,7 +9,7 @@ tags:
|
|||||||
series:
|
series:
|
||||||
- focal
|
- focal
|
||||||
- jammy
|
- jammy
|
||||||
- kinetic
|
- mantic
|
||||||
requires:
|
requires:
|
||||||
juju-info:
|
juju-info:
|
||||||
interface: juju-info
|
interface: juju-info
|
||||||
|
@ -3,9 +3,6 @@
|
|||||||
- charm-unit-jobs-py38
|
- charm-unit-jobs-py38
|
||||||
- charm-unit-jobs-py310
|
- charm-unit-jobs-py310
|
||||||
- charm-yoga-functional-jobs
|
- charm-yoga-functional-jobs
|
||||||
check:
|
|
||||||
jobs:
|
|
||||||
- kinetic-zed
|
|
||||||
vars:
|
vars:
|
||||||
needs_charm_build: true
|
needs_charm_build: true
|
||||||
charm_build_name: hacluster
|
charm_build_name: hacluster
|
||||||
|
@ -43,7 +43,7 @@ applications:
|
|||||||
- '5'
|
- '5'
|
||||||
channel: yoga/edge
|
channel: yoga/edge
|
||||||
|
|
||||||
hacluster:
|
keystone-hacluster:
|
||||||
charm: ../../hacluster.charm
|
charm: ../../hacluster.charm
|
||||||
subordinate-to:
|
subordinate-to:
|
||||||
- keystone
|
- keystone
|
||||||
@ -51,7 +51,7 @@ applications:
|
|||||||
relations:
|
relations:
|
||||||
- - 'keystone:shared-db'
|
- - 'keystone:shared-db'
|
||||||
- 'keystone-mysql-router:shared-db'
|
- 'keystone-mysql-router:shared-db'
|
||||||
- - 'hacluster:ha'
|
- - 'keystone-hacluster:ha'
|
||||||
- 'keystone:ha'
|
- 'keystone:ha'
|
||||||
- - "keystone-mysql-router:db-router"
|
- - "keystone-mysql-router:db-router"
|
||||||
- "mysql-innodb-cluster:db-router"
|
- "mysql-innodb-cluster:db-router"
|
||||||
|
57
tests/bundles/jammy-bobcat.yaml
Normal file
57
tests/bundles/jammy-bobcat.yaml
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
variables:
|
||||||
|
openstack-origin: &openstack-origin cloud:jammy-bobcat
|
||||||
|
|
||||||
|
series: jammy
|
||||||
|
|
||||||
|
machines:
|
||||||
|
'0':
|
||||||
|
constraints: mem=3072M
|
||||||
|
'1':
|
||||||
|
constraints: mem=3072M
|
||||||
|
'2':
|
||||||
|
constraints: mem=3072M
|
||||||
|
'3':
|
||||||
|
'4':
|
||||||
|
'5':
|
||||||
|
|
||||||
|
applications:
|
||||||
|
|
||||||
|
keystone-mysql-router:
|
||||||
|
charm: ch:mysql-router
|
||||||
|
channel: latest/edge
|
||||||
|
|
||||||
|
mysql-innodb-cluster:
|
||||||
|
charm: ch:mysql-innodb-cluster
|
||||||
|
num_units: 3
|
||||||
|
options:
|
||||||
|
source: *openstack-origin
|
||||||
|
to:
|
||||||
|
- '0'
|
||||||
|
- '1'
|
||||||
|
- '2'
|
||||||
|
channel: latest/edge
|
||||||
|
|
||||||
|
keystone:
|
||||||
|
charm: ch:keystone
|
||||||
|
num_units: 3
|
||||||
|
options:
|
||||||
|
token-expiration: 60
|
||||||
|
openstack-origin: *openstack-origin
|
||||||
|
to:
|
||||||
|
- '3'
|
||||||
|
- '4'
|
||||||
|
- '5'
|
||||||
|
channel: yoga/edge
|
||||||
|
|
||||||
|
keystone-hacluster:
|
||||||
|
charm: ../../hacluster.charm
|
||||||
|
subordinate-to:
|
||||||
|
- keystone
|
||||||
|
|
||||||
|
relations:
|
||||||
|
- - 'keystone:shared-db'
|
||||||
|
- 'keystone-mysql-router:shared-db'
|
||||||
|
- - 'keystone-hacluster:ha'
|
||||||
|
- 'keystone:ha'
|
||||||
|
- - "keystone-mysql-router:db-router"
|
||||||
|
- "mysql-innodb-cluster:db-router"
|
@ -43,7 +43,7 @@ applications:
|
|||||||
- '5'
|
- '5'
|
||||||
channel: yoga/edge
|
channel: yoga/edge
|
||||||
|
|
||||||
hacluster:
|
keystone-hacluster:
|
||||||
charm: ../../hacluster.charm
|
charm: ../../hacluster.charm
|
||||||
subordinate-to:
|
subordinate-to:
|
||||||
- keystone
|
- keystone
|
||||||
@ -51,7 +51,7 @@ applications:
|
|||||||
relations:
|
relations:
|
||||||
- - 'keystone:shared-db'
|
- - 'keystone:shared-db'
|
||||||
- 'keystone-mysql-router:shared-db'
|
- 'keystone-mysql-router:shared-db'
|
||||||
- - 'hacluster:ha'
|
- - 'keystone-hacluster:ha'
|
||||||
- 'keystone:ha'
|
- 'keystone:ha'
|
||||||
- - "keystone-mysql-router:db-router"
|
- - "keystone-mysql-router:db-router"
|
||||||
- "mysql-innodb-cluster:db-router"
|
- "mysql-innodb-cluster:db-router"
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
variables:
|
variables:
|
||||||
openstack-origin: &openstack-origin distro
|
openstack-origin: &openstack-origin distro
|
||||||
|
|
||||||
series: kinetic
|
series: mantic
|
||||||
|
|
||||||
machines:
|
machines:
|
||||||
'0':
|
'0':
|
||||||
@ -43,7 +43,7 @@ applications:
|
|||||||
- '5'
|
- '5'
|
||||||
channel: zed/edge
|
channel: zed/edge
|
||||||
|
|
||||||
hacluster:
|
keystone-hacluster:
|
||||||
charm: ../../hacluster.charm
|
charm: ../../hacluster.charm
|
||||||
subordinate-to:
|
subordinate-to:
|
||||||
- keystone
|
- keystone
|
||||||
@ -51,7 +51,7 @@ applications:
|
|||||||
relations:
|
relations:
|
||||||
- - 'keystone:shared-db'
|
- - 'keystone:shared-db'
|
||||||
- 'keystone-mysql-router:shared-db'
|
- 'keystone-mysql-router:shared-db'
|
||||||
- - 'hacluster:ha'
|
- - 'keystone-hacluster:ha'
|
||||||
- 'keystone:ha'
|
- 'keystone:ha'
|
||||||
- - "keystone-mysql-router:db-router"
|
- - "keystone-mysql-router:db-router"
|
||||||
- "mysql-innodb-cluster:db-router"
|
- "mysql-innodb-cluster:db-router"
|
@ -8,7 +8,8 @@ gate_bundles:
|
|||||||
|
|
||||||
dev_bundles:
|
dev_bundles:
|
||||||
- jammy-yoga
|
- jammy-yoga
|
||||||
- kinetic-zed
|
- jammy-bobcat
|
||||||
|
- mantic-bobcat
|
||||||
|
|
||||||
configure:
|
configure:
|
||||||
- zaza.openstack.charm_tests.keystone.setup.add_demo_user
|
- zaza.openstack.charm_tests.keystone.setup.add_demo_user
|
||||||
@ -23,4 +24,4 @@ tests_options:
|
|||||||
hacluster-charm-name: hacluster
|
hacluster-charm-name: hacluster
|
||||||
force_deploy:
|
force_deploy:
|
||||||
- jammy-yoga
|
- jammy-yoga
|
||||||
- kinetic-zed
|
- mantic-bobcat
|
||||||
|
Loading…
Reference in New Issue
Block a user