Sync charmhelpers

Recent charmhelpers change forwards the broker requests to the
ceph-rbd-mirror with information about the RBD mirroring mode.
This is needed for Cinder Ceph Replication spec

Change-Id: I1d2b5351574a8741e55a8e6482d0c4a168562050
Co-authored-by: Ionut Balutiou <ibalutoiu@cloudbasesolutions.com>
This commit is contained in:
Marius Oprin 2020-11-19 13:48:24 +02:00
parent 31662405cc
commit 00e7129d87
5 changed files with 81 additions and 11 deletions

View File

@ -396,7 +396,8 @@ def get_ipv6_addr(iface=None, inc_aliases=False, fatal=True, exc_list=None,
if global_addrs: if global_addrs:
# Make sure any found global addresses are not temporary # Make sure any found global addresses are not temporary
cmd = ['ip', 'addr', 'show', iface] cmd = ['ip', 'addr', 'show', iface]
out = subprocess.check_output(cmd).decode('UTF-8') out = subprocess.check_output(
cmd).decode('UTF-8', errors='replace')
if dynamic_only: if dynamic_only:
key = re.compile("inet6 (.+)/[0-9]+ scope global.* dynamic.*") key = re.compile("inet6 (.+)/[0-9]+ scope global.* dynamic.*")
else: else:

View File

@ -18,6 +18,7 @@ from functools import wraps
import subprocess import subprocess
import json import json
import operator
import os import os
import sys import sys
import re import re
@ -33,7 +34,7 @@ from charmhelpers import deprecate
from charmhelpers.contrib.network import ip from charmhelpers.contrib.network import ip
from charmhelpers.core import unitdata from charmhelpers.core import decorators, unitdata
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
WORKLOAD_STATES, WORKLOAD_STATES,
@ -1295,7 +1296,7 @@ def _check_listening_on_ports_list(ports):
Returns a list of ports being listened to and a list of the Returns a list of ports being listened to and a list of the
booleans. booleans.
@param ports: LIST or port numbers. @param ports: LIST of port numbers.
@returns [(port_num, boolean), ...], [boolean] @returns [(port_num, boolean), ...], [boolean]
""" """
ports_open = [port_has_listener('0.0.0.0', p) for p in ports] ports_open = [port_has_listener('0.0.0.0', p) for p in ports]
@ -1564,6 +1565,21 @@ def manage_payload_services(action, services=None, charm_func=None):
return success, messages return success, messages
def make_wait_for_ports_barrier(ports, retry_count=5):
"""Make a function to wait for port shutdowns.
Create a function which closes over the provided ports. The function will
retry probing ports until they are closed or the retry count has been reached.
"""
@decorators.retry_on_predicate(retry_count, operator.not_, base_delay=0.1)
def retry_port_check():
_, ports_states = _check_listening_on_ports_list(ports)
juju_log("Probe ports {}, result: {}".format(ports, ports_states), level="DEBUG")
return any(ports_states)
return retry_port_check
def pause_unit(assess_status_func, services=None, ports=None, def pause_unit(assess_status_func, services=None, ports=None,
charm_func=None): charm_func=None):
"""Pause a unit by stopping the services and setting 'unit-paused' """Pause a unit by stopping the services and setting 'unit-paused'
@ -1599,6 +1615,7 @@ def pause_unit(assess_status_func, services=None, ports=None,
services=services, services=services,
charm_func=charm_func) charm_func=charm_func)
set_unit_paused() set_unit_paused()
if assess_status_func: if assess_status_func:
message = assess_status_func() message = assess_status_func()
if message: if message:

View File

@ -268,6 +268,7 @@ class BasePool(object):
'compression-max-blob-size': (int, None), 'compression-max-blob-size': (int, None),
'compression-max-blob-size-hdd': (int, None), 'compression-max-blob-size-hdd': (int, None),
'compression-max-blob-size-ssd': (int, None), 'compression-max-blob-size-ssd': (int, None),
'rbd-mirroring-mode': (str, ('image', 'pool'))
} }
def __init__(self, service, name=None, percent_data=None, app_name=None, def __init__(self, service, name=None, percent_data=None, app_name=None,
@ -1767,6 +1768,7 @@ class CephBrokerRq(object):
max_bytes=None, max_bytes=None,
max_objects=None, max_objects=None,
namespace=None, namespace=None,
rbd_mirroring_mode='pool',
weight=None): weight=None):
"""Build common part of a create pool operation. """Build common part of a create pool operation.
@ -1825,6 +1827,9 @@ class CephBrokerRq(object):
:type max_objects: Optional[int] :type max_objects: Optional[int]
:param namespace: Group namespace :param namespace: Group namespace
:type namespace: Optional[str] :type namespace: Optional[str]
:param rbd_mirroring_mode: Pool mirroring mode used when Ceph RBD
mirroring is enabled.
:type rbd_mirroring_mode: Optional[str]
:param weight: The percentage of data that is expected to be contained :param weight: The percentage of data that is expected to be contained
in the pool from the total available space on the OSDs. in the pool from the total available space on the OSDs.
Used to calculate number of Placement Groups to create Used to calculate number of Placement Groups to create
@ -1849,6 +1854,7 @@ class CephBrokerRq(object):
'max-bytes': max_bytes, 'max-bytes': max_bytes,
'max-objects': max_objects, 'max-objects': max_objects,
'group-namespace': namespace, 'group-namespace': namespace,
'rbd-mirroring-mode': rbd_mirroring_mode,
'weight': weight, 'weight': weight,
} }

View File

@ -53,3 +53,41 @@ def retry_on_exception(num_retries, base_delay=0, exc_type=Exception):
return _retry_on_exception_inner_2 return _retry_on_exception_inner_2
return _retry_on_exception_inner_1 return _retry_on_exception_inner_1
def retry_on_predicate(num_retries, predicate_fun, base_delay=0):
"""Retry based on return value
The return value of the decorated function is passed to the given predicate_fun. If the
result of the predicate is False, retry the decorated function up to num_retries times
An exponential backoff up to base_delay^num_retries seconds can be introduced by setting
base_delay to a nonzero value. The default is to run with a zero (i.e. no) delay
:param num_retries: Max. number of retries to perform
:type num_retries: int
:param predicate_fun: Predicate function to determine if a retry is necessary
:type predicate_fun: callable
:param base_delay: Starting value in seconds for exponential delay, defaults to 0 (no delay)
:type base_delay: float
"""
def _retry_on_pred_inner_1(f):
def _retry_on_pred_inner_2(*args, **kwargs):
retries = num_retries
multiplier = 1
delay = base_delay
while True:
result = f(*args, **kwargs)
if predicate_fun(result) or retries <= 0:
return result
delay *= multiplier
multiplier += 1
log("Result {}, retrying '{}' {} more times (delay={})".format(
result, f.__name__, retries, delay), level=INFO)
retries -= 1
if delay:
time.sleep(delay)
return _retry_on_pred_inner_2
return _retry_on_pred_inner_1

View File

@ -19,6 +19,7 @@
# Nick Moffitt <nick.moffitt@canonical.com> # Nick Moffitt <nick.moffitt@canonical.com>
# Matthew Wedgwood <matthew.wedgwood@canonical.com> # Matthew Wedgwood <matthew.wedgwood@canonical.com>
import errno
import os import os
import re import re
import pwd import pwd
@ -677,7 +678,7 @@ def check_hash(path, checksum, hash_type='md5'):
:param str checksum: Value of the checksum used to validate the file. :param str checksum: Value of the checksum used to validate the file.
:param str hash_type: Hash algorithm used to generate `checksum`. :param str hash_type: Hash algorithm used to generate `checksum`.
Can be any hash alrgorithm supported by :mod:`hashlib`, Can be any hash algorithm supported by :mod:`hashlib`,
such as md5, sha1, sha256, sha512, etc. such as md5, sha1, sha256, sha512, etc.
:raises ChecksumError: If the file fails the checksum :raises ChecksumError: If the file fails the checksum
@ -825,7 +826,8 @@ def list_nics(nic_type=None):
if nic_type: if nic_type:
for int_type in int_types: for int_type in int_types:
cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
ip_output = subprocess.check_output(cmd).decode('UTF-8') ip_output = subprocess.check_output(
cmd).decode('UTF-8', errors='replace')
ip_output = ip_output.split('\n') ip_output = ip_output.split('\n')
ip_output = (line for line in ip_output if line) ip_output = (line for line in ip_output if line)
for line in ip_output: for line in ip_output:
@ -841,7 +843,8 @@ def list_nics(nic_type=None):
interfaces.append(iface) interfaces.append(iface)
else: else:
cmd = ['ip', 'a'] cmd = ['ip', 'a']
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') ip_output = subprocess.check_output(
cmd).decode('UTF-8', errors='replace').split('\n')
ip_output = (line.strip() for line in ip_output if line) ip_output = (line.strip() for line in ip_output if line)
key = re.compile(r'^[0-9]+:\s+(.+):') key = re.compile(r'^[0-9]+:\s+(.+):')
@ -865,7 +868,8 @@ def set_nic_mtu(nic, mtu):
def get_nic_mtu(nic): def get_nic_mtu(nic):
"""Return the Maximum Transmission Unit (MTU) for a network interface.""" """Return the Maximum Transmission Unit (MTU) for a network interface."""
cmd = ['ip', 'addr', 'show', nic] cmd = ['ip', 'addr', 'show', nic]
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n') ip_output = subprocess.check_output(
cmd).decode('UTF-8', errors='replace').split('\n')
mtu = "" mtu = ""
for line in ip_output: for line in ip_output:
words = line.split() words = line.split()
@ -877,7 +881,7 @@ def get_nic_mtu(nic):
def get_nic_hwaddr(nic): def get_nic_hwaddr(nic):
"""Return the Media Access Control (MAC) for a network interface.""" """Return the Media Access Control (MAC) for a network interface."""
cmd = ['ip', '-o', '-0', 'addr', 'show', nic] cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
ip_output = subprocess.check_output(cmd).decode('UTF-8') ip_output = subprocess.check_output(cmd).decode('UTF-8', errors='replace')
hwaddr = "" hwaddr = ""
words = ip_output.split() words = ip_output.split()
if 'link/ether' in words: if 'link/ether' in words:
@ -889,7 +893,7 @@ def get_nic_hwaddr(nic):
def chdir(directory): def chdir(directory):
"""Change the current working directory to a different directory for a code """Change the current working directory to a different directory for a code
block and return the previous directory after the block exits. Useful to block and return the previous directory after the block exits. Useful to
run commands from a specificed directory. run commands from a specified directory.
:param str directory: The directory path to change to for this context. :param str directory: The directory path to change to for this context.
""" """
@ -924,9 +928,13 @@ def chownr(path, owner, group, follow_links=True, chowntopdir=False):
for root, dirs, files in os.walk(path, followlinks=follow_links): for root, dirs, files in os.walk(path, followlinks=follow_links):
for name in dirs + files: for name in dirs + files:
full = os.path.join(root, name) full = os.path.join(root, name)
broken_symlink = os.path.lexists(full) and not os.path.exists(full) try:
if not broken_symlink:
chown(full, uid, gid) chown(full, uid, gid)
except (IOError, OSError) as e:
# Intended to ignore "file not found". Catching both to be
# compatible with both Python 2.7 and 3.x.
if e.errno == errno.ENOENT:
pass
def lchownr(path, owner, group): def lchownr(path, owner, group):