Sync charm-helpers

Syncing updated code from charm helpers to fix a
problem introduced by PR #324, fixed in PR #332.

Change-Id: Ie7a880dd0b9064e6e99c023b2ee9a1fc332b9def
This commit is contained in:
Rodrigo Barbieri 2019-05-31 10:17:47 -03:00
parent 206970c6c1
commit 93dbc7350c
7 changed files with 78 additions and 22 deletions

View File

@ -294,8 +294,10 @@ class OpenStackAmuletDeployment(AmuletDeployment):
('bionic', None): self.bionic_queens,
('bionic', 'cloud:bionic-rocky'): self.bionic_rocky,
('bionic', 'cloud:bionic-stein'): self.bionic_stein,
('bionic', 'cloud:bionic-train'): self.bionic_train,
('cosmic', None): self.cosmic_rocky,
('disco', None): self.disco_stein,
('eoan', None): self.eoan_train,
}
return releases[(self.series, self.openstack)]
@ -313,6 +315,7 @@ class OpenStackAmuletDeployment(AmuletDeployment):
('bionic', 'queens'),
('cosmic', 'rocky'),
('disco', 'stein'),
('eoan', 'train'),
])
if self.openstack:
os_origin = self.openstack.split(':')[1]

View File

@ -54,11 +54,15 @@ NOVA_CLIENT_VERSION = "2"
OPENSTACK_RELEASES_PAIRS = [
'trusty_icehouse', 'trusty_kilo', 'trusty_liberty',
'trusty_mitaka', 'xenial_mitaka', 'xenial_newton',
'yakkety_newton', 'xenial_ocata', 'zesty_ocata',
'xenial_pike', 'artful_pike', 'xenial_queens',
'bionic_queens', 'bionic_rocky', 'cosmic_rocky',
'bionic_stein', 'disco_stein']
'trusty_mitaka', 'xenial_mitaka',
'xenial_newton', 'yakkety_newton',
'xenial_ocata', 'zesty_ocata',
'xenial_pike', 'artful_pike',
'xenial_queens', 'bionic_queens',
'bionic_rocky', 'cosmic_rocky',
'bionic_stein', 'disco_stein',
'bionic_train', 'eoan_train',
]
class OpenStackAmuletUtils(AmuletUtils):

View File

@ -114,11 +114,6 @@ except ImportError:
apt_install('python3-psutil', fatal=True)
import psutil
if six.PY3:
json_error = json.decoder.JSONDecodeError
else:
json_error = ValueError
CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
ADDRESS_TYPES = ['admin', 'internal', 'public']
HAPROXY_RUN_DIR = '/var/run/haproxy/'
@ -546,13 +541,19 @@ class NovaVendorMetadataContext(OSContextGenerator):
def __call__(self):
cmp_os_release = CompareOpenStackReleases(
os_release(self.os_release_pkg))
ctxt = {}
ctxt = {'vendor_data': False}
vdata_providers = []
vdata = config('vendor-data')
vdata_url = config('vendor-data-url')
if vdata:
try:
# validate the JSON. If invalid, we do not set anything here
json.loads(vdata)
except (TypeError, ValueError) as e:
log('Error decoding vendor-data. {}'.format(e), level=ERROR)
else:
ctxt['vendor_data'] = True
# Mitaka does not support DynamicJSON
# so vendordata_providers is not needed
@ -592,7 +593,7 @@ class NovaVendorMetadataJSONContext(OSContextGenerator):
try:
# validate the JSON. If invalid, we return empty.
json.loads(vdata)
except (TypeError, json_error) as e:
except (TypeError, ValueError) as e:
log('Error decoding vendor-data. {}'.format(e), level=ERROR)
else:
ctxt['vendor_data_json'] = vdata
@ -781,6 +782,25 @@ class CephContext(OSContextGenerator):
ensure_packages(['ceph-common'])
return ctxt
def context_complete(self, ctxt):
"""Overridden here to ensure the context is actually complete.
We set `key` and `auth` to None here, by default, to ensure
that the context will always evaluate to incomplete until the
Ceph relation has actually sent these details; otherwise,
there is a potential race condition between the relation
appearing and the first unit actually setting this data on the
relation.
:param ctxt: The current context members
:type ctxt: Dict[str, ANY]
:returns: True if the context is complete
:rtype: bool
"""
if 'auth' not in ctxt or 'key' not in ctxt:
return False
return super(CephContext, self).context_complete(ctxt)
class HAProxyContext(OSContextGenerator):
"""Provides half a context for the haproxy template, which describes

View File

@ -217,6 +217,11 @@ def neutron_plugins():
plugins['nsx']['config'] = '/etc/neutron/nsx.ini'
plugins['vsp']['driver'] = (
'nuage_neutron.plugins.nuage.plugin.NuagePlugin')
if CompareOpenStackReleases(release) >= 'newton':
plugins['vsp']['config'] = '/etc/neutron/plugins/ml2/ml2_conf.ini'
plugins['vsp']['driver'] = 'neutron.plugins.ml2.plugin.Ml2Plugin'
plugins['vsp']['server_packages'] = ['neutron-server',
'neutron-plugin-ml2']
return plugins

View File

@ -120,6 +120,7 @@ OPENSTACK_RELEASES = (
'queens',
'rocky',
'stein',
'train',
)
UBUNTU_OPENSTACK_RELEASE = OrderedDict([
@ -139,6 +140,7 @@ UBUNTU_OPENSTACK_RELEASE = OrderedDict([
('bionic', 'queens'),
('cosmic', 'rocky'),
('disco', 'stein'),
('eoan', 'train'),
])
@ -159,6 +161,7 @@ OPENSTACK_CODENAMES = OrderedDict([
('2018.1', 'queens'),
('2018.2', 'rocky'),
('2019.1', 'stein'),
('2019.2', 'train'),
])
# The ugly duckling - must list releases oldest to newest
@ -195,6 +198,8 @@ SWIFT_CODENAMES = OrderedDict([
['2.18.0', '2.19.0']),
('stein',
['2.20.0', '2.21.0']),
('train',
['2.22.0']),
])
# >= Liberty version->codename mapping
@ -208,6 +213,7 @@ PACKAGE_CODENAMES = {
('17', 'queens'),
('18', 'rocky'),
('19', 'stein'),
('20', 'train'),
]),
'neutron-common': OrderedDict([
('7', 'liberty'),
@ -218,6 +224,7 @@ PACKAGE_CODENAMES = {
('12', 'queens'),
('13', 'rocky'),
('14', 'stein'),
('15', 'train'),
]),
'cinder-common': OrderedDict([
('7', 'liberty'),
@ -228,6 +235,7 @@ PACKAGE_CODENAMES = {
('12', 'queens'),
('13', 'rocky'),
('14', 'stein'),
('15', 'train'),
]),
'keystone': OrderedDict([
('8', 'liberty'),
@ -238,6 +246,7 @@ PACKAGE_CODENAMES = {
('13', 'queens'),
('14', 'rocky'),
('15', 'stein'),
('16', 'train'),
]),
'horizon-common': OrderedDict([
('8', 'liberty'),
@ -248,6 +257,7 @@ PACKAGE_CODENAMES = {
('13', 'queens'),
('14', 'rocky'),
('15', 'stein'),
('16', 'train'),
]),
'ceilometer-common': OrderedDict([
('5', 'liberty'),
@ -258,6 +268,7 @@ PACKAGE_CODENAMES = {
('10', 'queens'),
('11', 'rocky'),
('12', 'stein'),
('13', 'train'),
]),
'heat-common': OrderedDict([
('5', 'liberty'),
@ -268,6 +279,7 @@ PACKAGE_CODENAMES = {
('10', 'queens'),
('11', 'rocky'),
('12', 'stein'),
('13', 'train'),
]),
'glance-common': OrderedDict([
('11', 'liberty'),
@ -278,6 +290,7 @@ PACKAGE_CODENAMES = {
('16', 'queens'),
('17', 'rocky'),
('18', 'stein'),
('19', 'train'),
]),
'openstack-dashboard': OrderedDict([
('8', 'liberty'),
@ -288,6 +301,7 @@ PACKAGE_CODENAMES = {
('13', 'queens'),
('14', 'rocky'),
('15', 'stein'),
('16', 'train'),
]),
}

View File

@ -1488,7 +1488,7 @@ def is_broker_action_done(action, rid=None, unit=None):
@param action: name of action to be performed
@returns True if action complete otherwise False
"""
rdata = relation_get(rid, unit) or {}
rdata = relation_get(rid=rid, unit=unit) or {}
broker_rsp = rdata.get(get_broker_rsp_key())
if not broker_rsp:
return False
@ -1510,7 +1510,7 @@ def mark_broker_action_done(action, rid=None, unit=None):
@param action: name of action to be performed
@returns None
"""
rdata = relation_get(rid, unit) or {}
rdata = relation_get(rid=rid, unit=unit) or {}
broker_rsp = rdata.get(get_broker_rsp_key())
if not broker_rsp:
return

View File

@ -173,6 +173,14 @@ CLOUD_ARCHIVE_POCKETS = {
'stein/proposed': 'bionic-proposed/stein',
'bionic-stein/proposed': 'bionic-proposed/stein',
'bionic-proposed/stein': 'bionic-proposed/stein',
# Train
'train': 'bionic-updates/train',
'bionic-train': 'bionic-updates/train',
'bionic-train/updates': 'bionic-updates/train',
'bionic-updates/train': 'bionic-updates/train',
'train/proposed': 'bionic-proposed/train',
'bionic-train/proposed': 'bionic-proposed/train',
'bionic-proposed/train': 'bionic-proposed/train',
}
@ -522,14 +530,16 @@ def add_source(source, key=None, fail_invalid=False):
for r, fn in six.iteritems(_mapping):
m = re.match(r, source)
if m:
# call the assoicated function with the captured groups
# raises SourceConfigError on error.
fn(*m.groups())
if key:
# Import key before adding the source which depends on it,
# as refreshing packages could fail otherwise.
try:
import_key(key)
except GPGKeyError as e:
raise SourceConfigError(str(e))
# call the associated function with the captured groups
# raises SourceConfigError on error.
fn(*m.groups())
break
else:
# nothing matched. log an error and maybe sys.exit