Sync charm-helpers
Change-Id: I0ee707fb54896ff8053a9d16cf75f553c6e26c5e
This commit is contained in:
parent
880e0441c5
commit
4aed9892fc
@ -23,22 +23,22 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import six # flake8: noqa
|
import six # NOQA:F401
|
||||||
except ImportError:
|
except ImportError:
|
||||||
if sys.version_info.major == 2:
|
if sys.version_info.major == 2:
|
||||||
subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
|
subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
|
||||||
else:
|
else:
|
||||||
subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
|
subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
|
||||||
import six # flake8: noqa
|
import six # NOQA:F401
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import yaml # flake8: noqa
|
import yaml # NOQA:F401
|
||||||
except ImportError:
|
except ImportError:
|
||||||
if sys.version_info.major == 2:
|
if sys.version_info.major == 2:
|
||||||
subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
|
subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
|
||||||
else:
|
else:
|
||||||
subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
|
subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
|
||||||
import yaml # flake8: noqa
|
import yaml # NOQA:F401
|
||||||
|
|
||||||
|
|
||||||
# Holds a list of mapping of mangled function names that have been deprecated
|
# Holds a list of mapping of mangled function names that have been deprecated
|
||||||
|
@ -25,7 +25,9 @@ from charmhelpers.core.hookenv import (
|
|||||||
local_unit,
|
local_unit,
|
||||||
network_get_primary_address,
|
network_get_primary_address,
|
||||||
config,
|
config,
|
||||||
|
related_units,
|
||||||
relation_get,
|
relation_get,
|
||||||
|
relation_ids,
|
||||||
unit_get,
|
unit_get,
|
||||||
NoNetworkBinding,
|
NoNetworkBinding,
|
||||||
log,
|
log,
|
||||||
@ -225,3 +227,49 @@ def process_certificates(service_name, relation_id, unit,
|
|||||||
create_ip_cert_links(
|
create_ip_cert_links(
|
||||||
ssl_dir,
|
ssl_dir,
|
||||||
custom_hostname_link=custom_hostname_link)
|
custom_hostname_link=custom_hostname_link)
|
||||||
|
|
||||||
|
|
||||||
|
def get_requests_for_local_unit(relation_name=None):
|
||||||
|
"""Extract any certificates data targeted at this unit down relation_name.
|
||||||
|
|
||||||
|
:param relation_name: str Name of relation to check for data.
|
||||||
|
:returns: List of bundles of certificates.
|
||||||
|
:rtype: List of dicts
|
||||||
|
"""
|
||||||
|
local_name = local_unit().replace('/', '_')
|
||||||
|
raw_certs_key = '{}.processed_requests'.format(local_name)
|
||||||
|
relation_name = relation_name or 'certificates'
|
||||||
|
bundles = []
|
||||||
|
for rid in relation_ids(relation_name):
|
||||||
|
for unit in related_units(rid):
|
||||||
|
data = relation_get(rid=rid, unit=unit)
|
||||||
|
if data.get(raw_certs_key):
|
||||||
|
bundles.append({
|
||||||
|
'ca': data['ca'],
|
||||||
|
'chain': data.get('chain'),
|
||||||
|
'certs': json.loads(data[raw_certs_key])})
|
||||||
|
return bundles
|
||||||
|
|
||||||
|
|
||||||
|
def get_bundle_for_cn(cn, relation_name=None):
|
||||||
|
"""Extract certificates for the given cn.
|
||||||
|
|
||||||
|
:param cn: str Canonical Name on certificate.
|
||||||
|
:param relation_name: str Relation to check for certificates down.
|
||||||
|
:returns: Dictionary of certificate data,
|
||||||
|
:rtype: dict.
|
||||||
|
"""
|
||||||
|
entries = get_requests_for_local_unit(relation_name)
|
||||||
|
cert_bundle = {}
|
||||||
|
for entry in entries:
|
||||||
|
for _cn, bundle in entry['certs'].items():
|
||||||
|
if _cn == cn:
|
||||||
|
cert_bundle = {
|
||||||
|
'cert': bundle['cert'],
|
||||||
|
'key': bundle['key'],
|
||||||
|
'chain': entry['chain'],
|
||||||
|
'ca': entry['ca']}
|
||||||
|
break
|
||||||
|
if cert_bundle:
|
||||||
|
break
|
||||||
|
return cert_bundle
|
||||||
|
@ -642,7 +642,7 @@ class HAProxyContext(OSContextGenerator):
|
|||||||
return {}
|
return {}
|
||||||
|
|
||||||
l_unit = local_unit().replace('/', '-')
|
l_unit = local_unit().replace('/', '-')
|
||||||
cluster_hosts = {}
|
cluster_hosts = collections.OrderedDict()
|
||||||
|
|
||||||
# NOTE(jamespage): build out map of configured network endpoints
|
# NOTE(jamespage): build out map of configured network endpoints
|
||||||
# and associated backends
|
# and associated backends
|
||||||
|
@ -28,6 +28,7 @@ import json
|
|||||||
import re
|
import re
|
||||||
|
|
||||||
from charmhelpers.core.hookenv import (
|
from charmhelpers.core.hookenv import (
|
||||||
|
expected_related_units,
|
||||||
log,
|
log,
|
||||||
relation_set,
|
relation_set,
|
||||||
charm_name,
|
charm_name,
|
||||||
@ -110,12 +111,17 @@ def assert_charm_supports_dns_ha():
|
|||||||
def expect_ha():
|
def expect_ha():
|
||||||
""" Determine if the unit expects to be in HA
|
""" Determine if the unit expects to be in HA
|
||||||
|
|
||||||
Check for VIP or dns-ha settings which indicate the unit should expect to
|
Check juju goal-state if ha relation is expected, check for VIP or dns-ha
|
||||||
be related to hacluster.
|
settings which indicate the unit should expect to be related to hacluster.
|
||||||
|
|
||||||
@returns boolean
|
@returns boolean
|
||||||
"""
|
"""
|
||||||
return config('vip') or config('dns-ha')
|
ha_related_units = []
|
||||||
|
try:
|
||||||
|
ha_related_units = list(expected_related_units(reltype='ha'))
|
||||||
|
except (NotImplementedError, KeyError):
|
||||||
|
pass
|
||||||
|
return len(ha_related_units) > 0 or config('vip') or config('dns-ha')
|
||||||
|
|
||||||
|
|
||||||
def generate_ha_relation_data(service):
|
def generate_ha_relation_data(service):
|
||||||
|
@ -375,7 +375,7 @@ def get_swift_codename(version):
|
|||||||
return codenames[0]
|
return codenames[0]
|
||||||
|
|
||||||
# NOTE: fallback - attempt to match with just major.minor version
|
# NOTE: fallback - attempt to match with just major.minor version
|
||||||
match = re.match('^(\d+)\.(\d+)', version)
|
match = re.match(r'^(\d+)\.(\d+)', version)
|
||||||
if match:
|
if match:
|
||||||
major_minor_version = match.group(0)
|
major_minor_version = match.group(0)
|
||||||
for codename, versions in six.iteritems(SWIFT_CODENAMES):
|
for codename, versions in six.iteritems(SWIFT_CODENAMES):
|
||||||
@ -395,7 +395,7 @@ def get_os_codename_package(package, fatal=True):
|
|||||||
out = subprocess.check_output(cmd)
|
out = subprocess.check_output(cmd)
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
out = out.decode('UTF-8')
|
out = out.decode('UTF-8')
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError:
|
||||||
return None
|
return None
|
||||||
lines = out.split('\n')
|
lines = out.split('\n')
|
||||||
for line in lines:
|
for line in lines:
|
||||||
@ -427,11 +427,11 @@ def get_os_codename_package(package, fatal=True):
|
|||||||
vers = apt.upstream_version(pkg.current_ver.ver_str)
|
vers = apt.upstream_version(pkg.current_ver.ver_str)
|
||||||
if 'swift' in pkg.name:
|
if 'swift' in pkg.name:
|
||||||
# Fully x.y.z match for swift versions
|
# Fully x.y.z match for swift versions
|
||||||
match = re.match('^(\d+)\.(\d+)\.(\d+)', vers)
|
match = re.match(r'^(\d+)\.(\d+)\.(\d+)', vers)
|
||||||
else:
|
else:
|
||||||
# x.y match only for 20XX.X
|
# x.y match only for 20XX.X
|
||||||
# and ignore patch level for other packages
|
# and ignore patch level for other packages
|
||||||
match = re.match('^(\d+)\.(\d+)', vers)
|
match = re.match(r'^(\d+)\.(\d+)', vers)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
vers = match.group(0)
|
vers = match.group(0)
|
||||||
@ -1450,20 +1450,33 @@ def pausable_restart_on_change(restart_map, stopstart=False,
|
|||||||
|
|
||||||
see core.utils.restart_on_change() for more details.
|
see core.utils.restart_on_change() for more details.
|
||||||
|
|
||||||
|
Note restart_map can be a callable, in which case, restart_map is only
|
||||||
|
evaluated at runtime. This means that it is lazy and the underlying
|
||||||
|
function won't be called if the decorated function is never called. Note,
|
||||||
|
retains backwards compatibility for passing a non-callable dictionary.
|
||||||
|
|
||||||
@param f: the function to decorate
|
@param f: the function to decorate
|
||||||
@param restart_map: the restart map {conf_file: [services]}
|
@param restart_map: (optionally callable, which then returns the
|
||||||
|
restart_map) the restart map {conf_file: [services]}
|
||||||
@param stopstart: DEFAULT false; whether to stop, start or just restart
|
@param stopstart: DEFAULT false; whether to stop, start or just restart
|
||||||
@returns decorator to use a restart_on_change with pausability
|
@returns decorator to use a restart_on_change with pausability
|
||||||
"""
|
"""
|
||||||
def wrap(f):
|
def wrap(f):
|
||||||
|
# py27 compatible nonlocal variable. When py3 only, replace with
|
||||||
|
# nonlocal keyword
|
||||||
|
__restart_map_cache = {'cache': None}
|
||||||
|
|
||||||
@functools.wraps(f)
|
@functools.wraps(f)
|
||||||
def wrapped_f(*args, **kwargs):
|
def wrapped_f(*args, **kwargs):
|
||||||
if is_unit_paused_set():
|
if is_unit_paused_set():
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
if __restart_map_cache['cache'] is None:
|
||||||
|
__restart_map_cache['cache'] = restart_map() \
|
||||||
|
if callable(restart_map) else restart_map
|
||||||
# otherwise, normal restart_on_change functionality
|
# otherwise, normal restart_on_change functionality
|
||||||
return restart_on_change_helper(
|
return restart_on_change_helper(
|
||||||
(lambda: f(*args, **kwargs)), restart_map, stopstart,
|
(lambda: f(*args, **kwargs)), __restart_map_cache['cache'],
|
||||||
restart_functions)
|
stopstart, restart_functions)
|
||||||
return wrapped_f
|
return wrapped_f
|
||||||
return wrap
|
return wrap
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ def loopback_devices():
|
|||||||
devs = [d.strip().split(' ') for d in
|
devs = [d.strip().split(' ') for d in
|
||||||
check_output(cmd).splitlines() if d != '']
|
check_output(cmd).splitlines() if d != '']
|
||||||
for dev, _, f in devs:
|
for dev, _, f in devs:
|
||||||
loopbacks[dev.replace(':', '')] = re.search('\((\S+)\)', f).groups()[0]
|
loopbacks[dev.replace(':', '')] = re.search(r'\((\S+)\)', f).groups()[0]
|
||||||
return loopbacks
|
return loopbacks
|
||||||
|
|
||||||
|
|
||||||
|
@ -510,6 +510,67 @@ def related_units(relid=None):
|
|||||||
subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
|
subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
|
||||||
|
|
||||||
|
|
||||||
|
def expected_peer_units():
|
||||||
|
"""Get a generator for units we expect to join peer relation based on
|
||||||
|
goal-state.
|
||||||
|
|
||||||
|
The local unit is excluded from the result to make it easy to gauge
|
||||||
|
completion of all peers joining the relation with existing hook tools.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
log('peer {} of {} joined peer relation'
|
||||||
|
.format(len(related_units()),
|
||||||
|
len(list(expected_peer_units()))))
|
||||||
|
|
||||||
|
This function will raise NotImplementedError if used with juju versions
|
||||||
|
without goal-state support.
|
||||||
|
|
||||||
|
:returns: iterator
|
||||||
|
:rtype: types.GeneratorType
|
||||||
|
:raises: NotImplementedError
|
||||||
|
"""
|
||||||
|
if not has_juju_version("2.4.0"):
|
||||||
|
# goal-state first appeared in 2.4.0.
|
||||||
|
raise NotImplementedError("goal-state")
|
||||||
|
_goal_state = goal_state()
|
||||||
|
return (key for key in _goal_state['units']
|
||||||
|
if '/' in key and key != local_unit())
|
||||||
|
|
||||||
|
|
||||||
|
def expected_related_units(reltype=None):
|
||||||
|
"""Get a generator for units we expect to join relation based on
|
||||||
|
goal-state.
|
||||||
|
|
||||||
|
Note that you can not use this function for the peer relation, take a look
|
||||||
|
at expected_peer_units() for that.
|
||||||
|
|
||||||
|
This function will raise KeyError if you request information for a
|
||||||
|
relation type for which juju goal-state does not have information. It will
|
||||||
|
raise NotImplementedError if used with juju versions without goal-state
|
||||||
|
support.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
log('participant {} of {} joined relation {}'
|
||||||
|
.format(len(related_units()),
|
||||||
|
len(list(expected_related_units())),
|
||||||
|
relation_type()))
|
||||||
|
|
||||||
|
:param reltype: Relation type to list data for, default is to list data for
|
||||||
|
the realtion type we are currently executing a hook for.
|
||||||
|
:type reltype: str
|
||||||
|
:returns: iterator
|
||||||
|
:rtype: types.GeneratorType
|
||||||
|
:raises: KeyError, NotImplementedError
|
||||||
|
"""
|
||||||
|
if not has_juju_version("2.4.4"):
|
||||||
|
# goal-state existed in 2.4.0, but did not list individual units to
|
||||||
|
# join a relation in 2.4.1 through 2.4.3. (LP: #1794739)
|
||||||
|
raise NotImplementedError("goal-state relation unit count")
|
||||||
|
reltype = reltype or relation_type()
|
||||||
|
_goal_state = goal_state()
|
||||||
|
return (key for key in _goal_state['relations'][reltype] if '/' in key)
|
||||||
|
|
||||||
|
|
||||||
@cached
|
@cached
|
||||||
def relation_for_unit(unit=None, rid=None):
|
def relation_for_unit(unit=None, rid=None):
|
||||||
"""Get the json represenation of a unit's relation"""
|
"""Get the json represenation of a unit's relation"""
|
||||||
@ -998,6 +1059,7 @@ def application_version_set(version):
|
|||||||
|
|
||||||
|
|
||||||
@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
|
@translate_exc(from_exc=OSError, to_exc=NotImplementedError)
|
||||||
|
@cached
|
||||||
def goal_state():
|
def goal_state():
|
||||||
"""Juju goal state values"""
|
"""Juju goal state values"""
|
||||||
cmd = ['goal-state', '--format=json']
|
cmd = ['goal-state', '--format=json']
|
||||||
|
@ -40,7 +40,7 @@ from charmhelpers.osplatform import get_platform
|
|||||||
|
|
||||||
__platform__ = get_platform()
|
__platform__ = get_platform()
|
||||||
if __platform__ == "ubuntu":
|
if __platform__ == "ubuntu":
|
||||||
from charmhelpers.core.host_factory.ubuntu import (
|
from charmhelpers.core.host_factory.ubuntu import ( # NOQA:F401
|
||||||
service_available,
|
service_available,
|
||||||
add_new_group,
|
add_new_group,
|
||||||
lsb_release,
|
lsb_release,
|
||||||
@ -48,7 +48,7 @@ if __platform__ == "ubuntu":
|
|||||||
CompareHostReleases,
|
CompareHostReleases,
|
||||||
) # flake8: noqa -- ignore F401 for this import
|
) # flake8: noqa -- ignore F401 for this import
|
||||||
elif __platform__ == "centos":
|
elif __platform__ == "centos":
|
||||||
from charmhelpers.core.host_factory.centos import (
|
from charmhelpers.core.host_factory.centos import ( # NOQA:F401
|
||||||
service_available,
|
service_available,
|
||||||
add_new_group,
|
add_new_group,
|
||||||
lsb_release,
|
lsb_release,
|
||||||
@ -58,6 +58,7 @@ elif __platform__ == "centos":
|
|||||||
|
|
||||||
UPDATEDB_PATH = '/etc/updatedb.conf'
|
UPDATEDB_PATH = '/etc/updatedb.conf'
|
||||||
|
|
||||||
|
|
||||||
def service_start(service_name, **kwargs):
|
def service_start(service_name, **kwargs):
|
||||||
"""Start a system service.
|
"""Start a system service.
|
||||||
|
|
||||||
@ -287,8 +288,8 @@ def service_running(service_name, **kwargs):
|
|||||||
for key, value in six.iteritems(kwargs):
|
for key, value in six.iteritems(kwargs):
|
||||||
parameter = '%s=%s' % (key, value)
|
parameter = '%s=%s' % (key, value)
|
||||||
cmd.append(parameter)
|
cmd.append(parameter)
|
||||||
output = subprocess.check_output(cmd,
|
output = subprocess.check_output(
|
||||||
stderr=subprocess.STDOUT).decode('UTF-8')
|
cmd, stderr=subprocess.STDOUT).decode('UTF-8')
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
@ -482,8 +483,10 @@ def chage(username, lastday=None, expiredate=None, inactive=None,
|
|||||||
cmd.append(username)
|
cmd.append(username)
|
||||||
subprocess.check_call(cmd)
|
subprocess.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
|
remove_password_expiry = functools.partial(chage, expiredate='-1', inactive='-1', mindays='0', maxdays='-1')
|
||||||
|
|
||||||
|
|
||||||
def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
|
def rsync(from_path, to_path, flags='-r', options=None, timeout=None):
|
||||||
"""Replicate the contents of a path"""
|
"""Replicate the contents of a path"""
|
||||||
options = options or ['--delete', '--executability']
|
options = options or ['--delete', '--executability']
|
||||||
@ -535,13 +538,15 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
|||||||
# lets see if we can grab the file and compare the context, to avoid doing
|
# lets see if we can grab the file and compare the context, to avoid doing
|
||||||
# a write.
|
# a write.
|
||||||
existing_content = None
|
existing_content = None
|
||||||
existing_uid, existing_gid = None, None
|
existing_uid, existing_gid, existing_perms = None, None, None
|
||||||
try:
|
try:
|
||||||
with open(path, 'rb') as target:
|
with open(path, 'rb') as target:
|
||||||
existing_content = target.read()
|
existing_content = target.read()
|
||||||
stat = os.stat(path)
|
stat = os.stat(path)
|
||||||
existing_uid, existing_gid = stat.st_uid, stat.st_gid
|
existing_uid, existing_gid, existing_perms = (
|
||||||
except:
|
stat.st_uid, stat.st_gid, stat.st_mode
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
pass
|
pass
|
||||||
if content != existing_content:
|
if content != existing_content:
|
||||||
log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
|
log("Writing file {} {}:{} {:o}".format(path, owner, group, perms),
|
||||||
@ -554,7 +559,7 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
|||||||
target.write(content)
|
target.write(content)
|
||||||
return
|
return
|
||||||
# the contents were the same, but we might still need to change the
|
# the contents were the same, but we might still need to change the
|
||||||
# ownership.
|
# ownership or permissions.
|
||||||
if existing_uid != uid:
|
if existing_uid != uid:
|
||||||
log("Changing uid on already existing content: {} -> {}"
|
log("Changing uid on already existing content: {} -> {}"
|
||||||
.format(existing_uid, uid), level=DEBUG)
|
.format(existing_uid, uid), level=DEBUG)
|
||||||
@ -563,6 +568,10 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
|||||||
log("Changing gid on already existing content: {} -> {}"
|
log("Changing gid on already existing content: {} -> {}"
|
||||||
.format(existing_gid, gid), level=DEBUG)
|
.format(existing_gid, gid), level=DEBUG)
|
||||||
os.chown(path, -1, gid)
|
os.chown(path, -1, gid)
|
||||||
|
if existing_perms != perms:
|
||||||
|
log("Changing permissions on existing content: {} -> {}"
|
||||||
|
.format(existing_perms, perms), level=DEBUG)
|
||||||
|
os.chmod(path, perms)
|
||||||
|
|
||||||
|
|
||||||
def fstab_remove(mp):
|
def fstab_remove(mp):
|
||||||
@ -827,7 +836,7 @@ def list_nics(nic_type=None):
|
|||||||
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
|
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
|
||||||
ip_output = (line.strip() for line in ip_output if line)
|
ip_output = (line.strip() for line in ip_output if line)
|
||||||
|
|
||||||
key = re.compile('^[0-9]+:\s+(.+):')
|
key = re.compile(r'^[0-9]+:\s+(.+):')
|
||||||
for line in ip_output:
|
for line in ip_output:
|
||||||
matched = re.search(key, line)
|
matched = re.search(key, line)
|
||||||
if matched:
|
if matched:
|
||||||
|
@ -26,12 +26,12 @@ from charmhelpers.core.hookenv import (
|
|||||||
|
|
||||||
__platform__ = get_platform()
|
__platform__ = get_platform()
|
||||||
if __platform__ == "ubuntu":
|
if __platform__ == "ubuntu":
|
||||||
from charmhelpers.core.kernel_factory.ubuntu import (
|
from charmhelpers.core.kernel_factory.ubuntu import ( # NOQA:F401
|
||||||
persistent_modprobe,
|
persistent_modprobe,
|
||||||
update_initramfs,
|
update_initramfs,
|
||||||
) # flake8: noqa -- ignore F401 for this import
|
) # flake8: noqa -- ignore F401 for this import
|
||||||
elif __platform__ == "centos":
|
elif __platform__ == "centos":
|
||||||
from charmhelpers.core.kernel_factory.centos import (
|
from charmhelpers.core.kernel_factory.centos import ( # NOQA:F401
|
||||||
persistent_modprobe,
|
persistent_modprobe,
|
||||||
update_initramfs,
|
update_initramfs,
|
||||||
) # flake8: noqa -- ignore F401 for this import
|
) # flake8: noqa -- ignore F401 for this import
|
||||||
|
@ -294,7 +294,7 @@ def apt_unhold(packages, fatal=False):
|
|||||||
def import_key(key):
|
def import_key(key):
|
||||||
"""Import an ASCII Armor key.
|
"""Import an ASCII Armor key.
|
||||||
|
|
||||||
/!\ A Radix64 format keyid is also supported for backwards
|
A Radix64 format keyid is also supported for backwards
|
||||||
compatibility, but should never be used; the key retrieval
|
compatibility, but should never be used; the key retrieval
|
||||||
mechanism is insecure and subject to man-in-the-middle attacks
|
mechanism is insecure and subject to man-in-the-middle attacks
|
||||||
voiding all signature checks using that key.
|
voiding all signature checks using that key.
|
||||||
@ -454,6 +454,9 @@ def _add_apt_repository(spec):
|
|||||||
|
|
||||||
:param spec: the parameter to pass to add_apt_repository
|
:param spec: the parameter to pass to add_apt_repository
|
||||||
"""
|
"""
|
||||||
|
if '{series}' in spec:
|
||||||
|
series = lsb_release()['DISTRIB_CODENAME']
|
||||||
|
spec = spec.replace('{series}', series)
|
||||||
_run_with_retries(['add-apt-repository', '--yes', spec])
|
_run_with_retries(['add-apt-repository', '--yes', spec])
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user