[gnuoy,r=james-page] Add support for different AMQP brokers for nova and neutron.
This commit is contained in:
commit
589c60c2c4
@ -50,6 +50,14 @@ options:
|
|||||||
type: string
|
type: string
|
||||||
description: RabbitMQ Virtual Host
|
description: RabbitMQ Virtual Host
|
||||||
default: openstack
|
default: openstack
|
||||||
|
nova-rabbit-user:
|
||||||
|
type: string
|
||||||
|
description: RabbitMQ Nova user
|
||||||
|
default: nova
|
||||||
|
nova-rabbit-vhost:
|
||||||
|
type: string
|
||||||
|
description: RabbitMQ Nova Virtual Host
|
||||||
|
default: openstack
|
||||||
debug:
|
debug:
|
||||||
default: False
|
default: False
|
||||||
type: boolean
|
type: boolean
|
||||||
|
1
hooks/amqp-nova-relation-changed
Symbolic link
1
hooks/amqp-nova-relation-changed
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
quantum_hooks.py
|
1
hooks/amqp-nova-relation-departed
Symbolic link
1
hooks/amqp-nova-relation-departed
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
quantum_hooks.py
|
1
hooks/amqp-nova-relation-joined
Symbolic link
1
hooks/amqp-nova-relation-joined
Symbolic link
@ -0,0 +1 @@
|
|||||||
|
quantum_hooks.py
|
38
hooks/charmhelpers/contrib/openstack/amulet/deployment.py
Normal file
38
hooks/charmhelpers/contrib/openstack/amulet/deployment.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
from charmhelpers.contrib.amulet.deployment import (
|
||||||
|
AmuletDeployment
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OpenStackAmuletDeployment(AmuletDeployment):
|
||||||
|
"""This class inherits from AmuletDeployment and has additional support
|
||||||
|
that is specifically for use by OpenStack charms."""
|
||||||
|
|
||||||
|
def __init__(self, series=None, openstack=None):
|
||||||
|
"""Initialize the deployment environment."""
|
||||||
|
self.openstack = None
|
||||||
|
super(OpenStackAmuletDeployment, self).__init__(series)
|
||||||
|
|
||||||
|
if openstack:
|
||||||
|
self.openstack = openstack
|
||||||
|
|
||||||
|
def _configure_services(self, configs):
|
||||||
|
"""Configure all of the services."""
|
||||||
|
for service, config in configs.iteritems():
|
||||||
|
if service == self.this_service:
|
||||||
|
config['openstack-origin'] = self.openstack
|
||||||
|
self.d.configure(service, config)
|
||||||
|
|
||||||
|
def _get_openstack_release(self):
|
||||||
|
"""Return an integer representing the enum value of the openstack
|
||||||
|
release."""
|
||||||
|
self.precise_essex, self.precise_folsom, self.precise_grizzly, \
|
||||||
|
self.precise_havana, self.precise_icehouse, \
|
||||||
|
self.trusty_icehouse = range(6)
|
||||||
|
releases = {
|
||||||
|
('precise', None): self.precise_essex,
|
||||||
|
('precise', 'cloud:precise-folsom'): self.precise_folsom,
|
||||||
|
('precise', 'cloud:precise-grizzly'): self.precise_grizzly,
|
||||||
|
('precise', 'cloud:precise-havana'): self.precise_havana,
|
||||||
|
('precise', 'cloud:precise-icehouse'): self.precise_icehouse,
|
||||||
|
('trusty', None): self.trusty_icehouse}
|
||||||
|
return releases[(self.series, self.openstack)]
|
151
hooks/charmhelpers/contrib/openstack/amulet/utils.py
Normal file
151
hooks/charmhelpers/contrib/openstack/amulet/utils.py
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
import glanceclient.v1.client as glance_client
|
||||||
|
import keystoneclient.v2_0 as keystone_client
|
||||||
|
import novaclient.v1_1.client as nova_client
|
||||||
|
|
||||||
|
from charmhelpers.contrib.amulet.utils import (
|
||||||
|
AmuletUtils
|
||||||
|
)
|
||||||
|
|
||||||
|
DEBUG = logging.DEBUG
|
||||||
|
ERROR = logging.ERROR
|
||||||
|
|
||||||
|
|
||||||
|
class OpenStackAmuletUtils(AmuletUtils):
|
||||||
|
"""This class inherits from AmuletUtils and has additional support
|
||||||
|
that is specifically for use by OpenStack charms."""
|
||||||
|
|
||||||
|
def __init__(self, log_level=ERROR):
|
||||||
|
"""Initialize the deployment environment."""
|
||||||
|
super(OpenStackAmuletUtils, self).__init__(log_level)
|
||||||
|
|
||||||
|
def validate_endpoint_data(self, endpoints, admin_port, internal_port,
|
||||||
|
public_port, expected):
|
||||||
|
"""Validate actual endpoint data vs expected endpoint data. The ports
|
||||||
|
are used to find the matching endpoint."""
|
||||||
|
found = False
|
||||||
|
for ep in endpoints:
|
||||||
|
self.log.debug('endpoint: {}'.format(repr(ep)))
|
||||||
|
if admin_port in ep.adminurl and internal_port in ep.internalurl \
|
||||||
|
and public_port in ep.publicurl:
|
||||||
|
found = True
|
||||||
|
actual = {'id': ep.id,
|
||||||
|
'region': ep.region,
|
||||||
|
'adminurl': ep.adminurl,
|
||||||
|
'internalurl': ep.internalurl,
|
||||||
|
'publicurl': ep.publicurl,
|
||||||
|
'service_id': ep.service_id}
|
||||||
|
ret = self._validate_dict_data(expected, actual)
|
||||||
|
if ret:
|
||||||
|
return 'unexpected endpoint data - {}'.format(ret)
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
return 'endpoint not found'
|
||||||
|
|
||||||
|
def validate_svc_catalog_endpoint_data(self, expected, actual):
|
||||||
|
"""Validate a list of actual service catalog endpoints vs a list of
|
||||||
|
expected service catalog endpoints."""
|
||||||
|
self.log.debug('actual: {}'.format(repr(actual)))
|
||||||
|
for k, v in expected.iteritems():
|
||||||
|
if k in actual:
|
||||||
|
ret = self._validate_dict_data(expected[k][0], actual[k][0])
|
||||||
|
if ret:
|
||||||
|
return self.endpoint_error(k, ret)
|
||||||
|
else:
|
||||||
|
return "endpoint {} does not exist".format(k)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def validate_tenant_data(self, expected, actual):
|
||||||
|
"""Validate a list of actual tenant data vs list of expected tenant
|
||||||
|
data."""
|
||||||
|
self.log.debug('actual: {}'.format(repr(actual)))
|
||||||
|
for e in expected:
|
||||||
|
found = False
|
||||||
|
for act in actual:
|
||||||
|
a = {'enabled': act.enabled, 'description': act.description,
|
||||||
|
'name': act.name, 'id': act.id}
|
||||||
|
if e['name'] == a['name']:
|
||||||
|
found = True
|
||||||
|
ret = self._validate_dict_data(e, a)
|
||||||
|
if ret:
|
||||||
|
return "unexpected tenant data - {}".format(ret)
|
||||||
|
if not found:
|
||||||
|
return "tenant {} does not exist".format(e.name)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def validate_role_data(self, expected, actual):
|
||||||
|
"""Validate a list of actual role data vs a list of expected role
|
||||||
|
data."""
|
||||||
|
self.log.debug('actual: {}'.format(repr(actual)))
|
||||||
|
for e in expected:
|
||||||
|
found = False
|
||||||
|
for act in actual:
|
||||||
|
a = {'name': act.name, 'id': act.id}
|
||||||
|
if e['name'] == a['name']:
|
||||||
|
found = True
|
||||||
|
ret = self._validate_dict_data(e, a)
|
||||||
|
if ret:
|
||||||
|
return "unexpected role data - {}".format(ret)
|
||||||
|
if not found:
|
||||||
|
return "role {} does not exist".format(e.name)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def validate_user_data(self, expected, actual):
|
||||||
|
"""Validate a list of actual user data vs a list of expected user
|
||||||
|
data."""
|
||||||
|
self.log.debug('actual: {}'.format(repr(actual)))
|
||||||
|
for e in expected:
|
||||||
|
found = False
|
||||||
|
for act in actual:
|
||||||
|
a = {'enabled': act.enabled, 'name': act.name,
|
||||||
|
'email': act.email, 'tenantId': act.tenantId,
|
||||||
|
'id': act.id}
|
||||||
|
if e['name'] == a['name']:
|
||||||
|
found = True
|
||||||
|
ret = self._validate_dict_data(e, a)
|
||||||
|
if ret:
|
||||||
|
return "unexpected user data - {}".format(ret)
|
||||||
|
if not found:
|
||||||
|
return "user {} does not exist".format(e.name)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def validate_flavor_data(self, expected, actual):
|
||||||
|
"""Validate a list of actual flavors vs a list of expected flavors."""
|
||||||
|
self.log.debug('actual: {}'.format(repr(actual)))
|
||||||
|
act = [a.name for a in actual]
|
||||||
|
return self._validate_list_data(expected, act)
|
||||||
|
|
||||||
|
def tenant_exists(self, keystone, tenant):
|
||||||
|
"""Return True if tenant exists"""
|
||||||
|
return tenant in [t.name for t in keystone.tenants.list()]
|
||||||
|
|
||||||
|
def authenticate_keystone_admin(self, keystone_sentry, user, password,
|
||||||
|
tenant):
|
||||||
|
"""Authenticates admin user with the keystone admin endpoint."""
|
||||||
|
service_ip = \
|
||||||
|
keystone_sentry.relation('shared-db',
|
||||||
|
'mysql:shared-db')['private-address']
|
||||||
|
ep = "http://{}:35357/v2.0".format(service_ip.strip().decode('utf-8'))
|
||||||
|
return keystone_client.Client(username=user, password=password,
|
||||||
|
tenant_name=tenant, auth_url=ep)
|
||||||
|
|
||||||
|
def authenticate_keystone_user(self, keystone, user, password, tenant):
|
||||||
|
"""Authenticates a regular user with the keystone public endpoint."""
|
||||||
|
ep = keystone.service_catalog.url_for(service_type='identity',
|
||||||
|
endpoint_type='publicURL')
|
||||||
|
return keystone_client.Client(username=user, password=password,
|
||||||
|
tenant_name=tenant, auth_url=ep)
|
||||||
|
|
||||||
|
def authenticate_glance_admin(self, keystone):
|
||||||
|
"""Authenticates admin user with glance."""
|
||||||
|
ep = keystone.service_catalog.url_for(service_type='image',
|
||||||
|
endpoint_type='adminURL')
|
||||||
|
return glance_client.Client(ep, token=keystone.auth_token)
|
||||||
|
|
||||||
|
def authenticate_nova_user(self, keystone, user, password, tenant):
|
||||||
|
"""Authenticates a regular user with nova-api."""
|
||||||
|
ep = keystone.service_catalog.url_for(service_type='identity',
|
||||||
|
endpoint_type='publicURL')
|
||||||
|
return nova_client.Client(username=user, api_key=password,
|
||||||
|
project_id=tenant, auth_url=ep)
|
@ -243,23 +243,31 @@ class IdentityServiceContext(OSContextGenerator):
|
|||||||
|
|
||||||
|
|
||||||
class AMQPContext(OSContextGenerator):
|
class AMQPContext(OSContextGenerator):
|
||||||
interfaces = ['amqp']
|
|
||||||
|
|
||||||
def __init__(self, ssl_dir=None):
|
def __init__(self, ssl_dir=None, rel_name='amqp', relation_prefix=None):
|
||||||
self.ssl_dir = ssl_dir
|
self.ssl_dir = ssl_dir
|
||||||
|
self.rel_name = rel_name
|
||||||
|
self.relation_prefix = relation_prefix
|
||||||
|
self.interfaces = [rel_name]
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
log('Generating template context for amqp')
|
log('Generating template context for amqp')
|
||||||
conf = config()
|
conf = config()
|
||||||
|
user_setting = 'rabbit-user'
|
||||||
|
vhost_setting = 'rabbit-vhost'
|
||||||
|
if self.relation_prefix:
|
||||||
|
user_setting = self.relation_prefix + '-rabbit-user'
|
||||||
|
vhost_setting = self.relation_prefix + '-rabbit-vhost'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
username = conf['rabbit-user']
|
username = conf[user_setting]
|
||||||
vhost = conf['rabbit-vhost']
|
vhost = conf[vhost_setting]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
log('Could not generate shared_db context. '
|
log('Could not generate shared_db context. '
|
||||||
'Missing required charm config options: %s.' % e)
|
'Missing required charm config options: %s.' % e)
|
||||||
raise OSContextError
|
raise OSContextError
|
||||||
ctxt = {}
|
ctxt = {}
|
||||||
for rid in relation_ids('amqp'):
|
for rid in relation_ids(self.rel_name):
|
||||||
ha_vip_only = False
|
ha_vip_only = False
|
||||||
for unit in related_units(rid):
|
for unit in related_units(rid):
|
||||||
if relation_get('clustered', rid=rid, unit=unit):
|
if relation_get('clustered', rid=rid, unit=unit):
|
||||||
@ -541,6 +549,26 @@ class NeutronContext(OSContextGenerator):
|
|||||||
|
|
||||||
return nvp_ctxt
|
return nvp_ctxt
|
||||||
|
|
||||||
|
def n1kv_ctxt(self):
|
||||||
|
driver = neutron_plugin_attribute(self.plugin, 'driver',
|
||||||
|
self.network_manager)
|
||||||
|
n1kv_config = neutron_plugin_attribute(self.plugin, 'config',
|
||||||
|
self.network_manager)
|
||||||
|
n1kv_ctxt = {
|
||||||
|
'core_plugin': driver,
|
||||||
|
'neutron_plugin': 'n1kv',
|
||||||
|
'neutron_security_groups': self.neutron_security_groups,
|
||||||
|
'local_ip': unit_private_ip(),
|
||||||
|
'config': n1kv_config,
|
||||||
|
'vsm_ip': config('n1kv-vsm-ip'),
|
||||||
|
'vsm_username': config('n1kv-vsm-username'),
|
||||||
|
'vsm_password': config('n1kv-vsm-password'),
|
||||||
|
'restrict_policy_profiles': config(
|
||||||
|
'n1kv_restrict_policy_profiles'),
|
||||||
|
}
|
||||||
|
|
||||||
|
return n1kv_ctxt
|
||||||
|
|
||||||
def neutron_ctxt(self):
|
def neutron_ctxt(self):
|
||||||
if https():
|
if https():
|
||||||
proto = 'https'
|
proto = 'https'
|
||||||
@ -572,6 +600,8 @@ class NeutronContext(OSContextGenerator):
|
|||||||
ctxt.update(self.ovs_ctxt())
|
ctxt.update(self.ovs_ctxt())
|
||||||
elif self.plugin in ['nvp', 'nsx']:
|
elif self.plugin in ['nvp', 'nsx']:
|
||||||
ctxt.update(self.nvp_ctxt())
|
ctxt.update(self.nvp_ctxt())
|
||||||
|
elif self.plugin == 'n1kv':
|
||||||
|
ctxt.update(self.n1kv_ctxt())
|
||||||
|
|
||||||
alchemy_flags = config('neutron-alchemy-flags')
|
alchemy_flags = config('neutron-alchemy-flags')
|
||||||
if alchemy_flags:
|
if alchemy_flags:
|
||||||
|
@ -128,6 +128,20 @@ def neutron_plugins():
|
|||||||
'server_packages': ['neutron-server',
|
'server_packages': ['neutron-server',
|
||||||
'neutron-plugin-vmware'],
|
'neutron-plugin-vmware'],
|
||||||
'server_services': ['neutron-server']
|
'server_services': ['neutron-server']
|
||||||
|
},
|
||||||
|
'n1kv': {
|
||||||
|
'config': '/etc/neutron/plugins/cisco/cisco_plugins.ini',
|
||||||
|
'driver': 'neutron.plugins.cisco.network_plugin.PluginV2',
|
||||||
|
'contexts': [
|
||||||
|
context.SharedDBContext(user=config('neutron-database-user'),
|
||||||
|
database=config('neutron-database'),
|
||||||
|
relation_prefix='neutron',
|
||||||
|
ssl_dir=NEUTRON_CONF_DIR)],
|
||||||
|
'services': [],
|
||||||
|
'packages': [['neutron-plugin-cisco']],
|
||||||
|
'server_packages': ['neutron-server',
|
||||||
|
'neutron-plugin-cisco'],
|
||||||
|
'server_services': ['neutron-server']
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if release >= 'icehouse':
|
if release >= 'icehouse':
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
# Common python helper functions used for OpenStack charms.
|
# Common python helper functions used for OpenStack charms.
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
import apt_pkg as apt
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
@ -85,6 +84,8 @@ def get_os_codename_install_source(src):
|
|||||||
'''Derive OpenStack release codename from a given installation source.'''
|
'''Derive OpenStack release codename from a given installation source.'''
|
||||||
ubuntu_rel = lsb_release()['DISTRIB_CODENAME']
|
ubuntu_rel = lsb_release()['DISTRIB_CODENAME']
|
||||||
rel = ''
|
rel = ''
|
||||||
|
if src is None:
|
||||||
|
return rel
|
||||||
if src in ['distro', 'distro-proposed']:
|
if src in ['distro', 'distro-proposed']:
|
||||||
try:
|
try:
|
||||||
rel = UBUNTU_OPENSTACK_RELEASE[ubuntu_rel]
|
rel = UBUNTU_OPENSTACK_RELEASE[ubuntu_rel]
|
||||||
@ -132,6 +133,7 @@ def get_os_version_codename(codename):
|
|||||||
|
|
||||||
def get_os_codename_package(package, fatal=True):
|
def get_os_codename_package(package, fatal=True):
|
||||||
'''Derive OpenStack release codename from an installed package.'''
|
'''Derive OpenStack release codename from an installed package.'''
|
||||||
|
import apt_pkg as apt
|
||||||
apt.init()
|
apt.init()
|
||||||
|
|
||||||
# Tell apt to build an in-memory cache to prevent race conditions (if
|
# Tell apt to build an in-memory cache to prevent race conditions (if
|
||||||
@ -189,7 +191,7 @@ def get_os_version_package(pkg, fatal=True):
|
|||||||
for version, cname in vers_map.iteritems():
|
for version, cname in vers_map.iteritems():
|
||||||
if cname == codename:
|
if cname == codename:
|
||||||
return version
|
return version
|
||||||
#e = "Could not determine OpenStack version for package: %s" % pkg
|
# e = "Could not determine OpenStack version for package: %s" % pkg
|
||||||
# error_out(e)
|
# error_out(e)
|
||||||
|
|
||||||
|
|
||||||
@ -325,6 +327,7 @@ def openstack_upgrade_available(package):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import apt_pkg as apt
|
||||||
src = config('openstack-origin')
|
src = config('openstack-origin')
|
||||||
cur_vers = get_os_version_package(package)
|
cur_vers = get_os_version_package(package)
|
||||||
available_vers = get_os_version_install_source(src)
|
available_vers = get_os_version_install_source(src)
|
||||||
|
@ -37,6 +37,7 @@ def zap_disk(block_device):
|
|||||||
check_call(['dd', 'if=/dev/zero', 'of=%s' % (block_device),
|
check_call(['dd', 'if=/dev/zero', 'of=%s' % (block_device),
|
||||||
'bs=512', 'count=100', 'seek=%s' % (gpt_end)])
|
'bs=512', 'count=100', 'seek=%s' % (gpt_end)])
|
||||||
|
|
||||||
|
|
||||||
def is_device_mounted(device):
|
def is_device_mounted(device):
|
||||||
'''Given a device path, return True if that device is mounted, and False
|
'''Given a device path, return True if that device is mounted, and False
|
||||||
if it isn't.
|
if it isn't.
|
||||||
|
116
hooks/charmhelpers/core/fstab.py
Normal file
116
hooks/charmhelpers/core/fstab.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class Fstab(file):
|
||||||
|
"""This class extends file in order to implement a file reader/writer
|
||||||
|
for file `/etc/fstab`
|
||||||
|
"""
|
||||||
|
|
||||||
|
class Entry(object):
|
||||||
|
"""Entry class represents a non-comment line on the `/etc/fstab` file
|
||||||
|
"""
|
||||||
|
def __init__(self, device, mountpoint, filesystem,
|
||||||
|
options, d=0, p=0):
|
||||||
|
self.device = device
|
||||||
|
self.mountpoint = mountpoint
|
||||||
|
self.filesystem = filesystem
|
||||||
|
|
||||||
|
if not options:
|
||||||
|
options = "defaults"
|
||||||
|
|
||||||
|
self.options = options
|
||||||
|
self.d = d
|
||||||
|
self.p = p
|
||||||
|
|
||||||
|
def __eq__(self, o):
|
||||||
|
return str(self) == str(o)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{} {} {} {} {} {}".format(self.device,
|
||||||
|
self.mountpoint,
|
||||||
|
self.filesystem,
|
||||||
|
self.options,
|
||||||
|
self.d,
|
||||||
|
self.p)
|
||||||
|
|
||||||
|
DEFAULT_PATH = os.path.join(os.path.sep, 'etc', 'fstab')
|
||||||
|
|
||||||
|
def __init__(self, path=None):
|
||||||
|
if path:
|
||||||
|
self._path = path
|
||||||
|
else:
|
||||||
|
self._path = self.DEFAULT_PATH
|
||||||
|
file.__init__(self, self._path, 'r+')
|
||||||
|
|
||||||
|
def _hydrate_entry(self, line):
|
||||||
|
# NOTE: use split with no arguments to split on any
|
||||||
|
# whitespace including tabs
|
||||||
|
return Fstab.Entry(*filter(
|
||||||
|
lambda x: x not in ('', None),
|
||||||
|
line.strip("\n").split()))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def entries(self):
|
||||||
|
self.seek(0)
|
||||||
|
for line in self.readlines():
|
||||||
|
try:
|
||||||
|
if not line.startswith("#"):
|
||||||
|
yield self._hydrate_entry(line)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_entry_by_attr(self, attr, value):
|
||||||
|
for entry in self.entries:
|
||||||
|
e_attr = getattr(entry, attr)
|
||||||
|
if e_attr == value:
|
||||||
|
return entry
|
||||||
|
return None
|
||||||
|
|
||||||
|
def add_entry(self, entry):
|
||||||
|
if self.get_entry_by_attr('device', entry.device):
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.write(str(entry) + '\n')
|
||||||
|
self.truncate()
|
||||||
|
return entry
|
||||||
|
|
||||||
|
def remove_entry(self, entry):
|
||||||
|
self.seek(0)
|
||||||
|
|
||||||
|
lines = self.readlines()
|
||||||
|
|
||||||
|
found = False
|
||||||
|
for index, line in enumerate(lines):
|
||||||
|
if not line.startswith("#"):
|
||||||
|
if self._hydrate_entry(line) == entry:
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not found:
|
||||||
|
return False
|
||||||
|
|
||||||
|
lines.remove(line)
|
||||||
|
|
||||||
|
self.seek(0)
|
||||||
|
self.write(''.join(lines))
|
||||||
|
self.truncate()
|
||||||
|
return True
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def remove_by_mountpoint(cls, mountpoint, path=None):
|
||||||
|
fstab = cls(path=path)
|
||||||
|
entry = fstab.get_entry_by_attr('mountpoint', mountpoint)
|
||||||
|
if entry:
|
||||||
|
return fstab.remove_entry(entry)
|
||||||
|
return False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def add(cls, device, mountpoint, filesystem, options=None, path=None):
|
||||||
|
return cls(path=path).add_entry(Fstab.Entry(device,
|
||||||
|
mountpoint, filesystem,
|
||||||
|
options=options))
|
@ -12,11 +12,11 @@ import random
|
|||||||
import string
|
import string
|
||||||
import subprocess
|
import subprocess
|
||||||
import hashlib
|
import hashlib
|
||||||
import apt_pkg
|
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from hookenv import log
|
from hookenv import log
|
||||||
|
from fstab import Fstab
|
||||||
|
|
||||||
|
|
||||||
def service_start(service_name):
|
def service_start(service_name):
|
||||||
@ -35,7 +35,8 @@ def service_restart(service_name):
|
|||||||
|
|
||||||
|
|
||||||
def service_reload(service_name, restart_on_failure=False):
|
def service_reload(service_name, restart_on_failure=False):
|
||||||
"""Reload a system service, optionally falling back to restart if reload fails"""
|
"""Reload a system service, optionally falling back to restart if
|
||||||
|
reload fails"""
|
||||||
service_result = service('reload', service_name)
|
service_result = service('reload', service_name)
|
||||||
if not service_result and restart_on_failure:
|
if not service_result and restart_on_failure:
|
||||||
service_result = service('restart', service_name)
|
service_result = service('restart', service_name)
|
||||||
@ -144,7 +145,19 @@ def write_file(path, content, owner='root', group='root', perms=0444):
|
|||||||
target.write(content)
|
target.write(content)
|
||||||
|
|
||||||
|
|
||||||
def mount(device, mountpoint, options=None, persist=False):
|
def fstab_remove(mp):
|
||||||
|
"""Remove the given mountpoint entry from /etc/fstab
|
||||||
|
"""
|
||||||
|
return Fstab.remove_by_mountpoint(mp)
|
||||||
|
|
||||||
|
|
||||||
|
def fstab_add(dev, mp, fs, options=None):
|
||||||
|
"""Adds the given device entry to the /etc/fstab file
|
||||||
|
"""
|
||||||
|
return Fstab.add(dev, mp, fs, options=options)
|
||||||
|
|
||||||
|
|
||||||
|
def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
|
||||||
"""Mount a filesystem at a particular mountpoint"""
|
"""Mount a filesystem at a particular mountpoint"""
|
||||||
cmd_args = ['mount']
|
cmd_args = ['mount']
|
||||||
if options is not None:
|
if options is not None:
|
||||||
@ -155,9 +168,9 @@ def mount(device, mountpoint, options=None, persist=False):
|
|||||||
except subprocess.CalledProcessError, e:
|
except subprocess.CalledProcessError, e:
|
||||||
log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
|
log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if persist:
|
if persist:
|
||||||
# TODO: update fstab
|
return fstab_add(device, mountpoint, filesystem, options=options)
|
||||||
pass
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -169,9 +182,9 @@ def umount(mountpoint, persist=False):
|
|||||||
except subprocess.CalledProcessError, e:
|
except subprocess.CalledProcessError, e:
|
||||||
log('Error unmounting {}\n{}'.format(mountpoint, e.output))
|
log('Error unmounting {}\n{}'.format(mountpoint, e.output))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if persist:
|
if persist:
|
||||||
# TODO: update fstab
|
return fstab_remove(mountpoint)
|
||||||
pass
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@ -304,6 +317,7 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
|
|||||||
0 => Installed revno is the same as supplied arg
|
0 => Installed revno is the same as supplied arg
|
||||||
-1 => Installed revno is less than supplied arg
|
-1 => Installed revno is less than supplied arg
|
||||||
'''
|
'''
|
||||||
|
import apt_pkg
|
||||||
if not pkgcache:
|
if not pkgcache:
|
||||||
apt_pkg.init()
|
apt_pkg.init()
|
||||||
pkgcache = apt_pkg.Cache()
|
pkgcache = apt_pkg.Cache()
|
||||||
|
@ -13,7 +13,6 @@ from charmhelpers.core.hookenv import (
|
|||||||
config,
|
config,
|
||||||
log,
|
log,
|
||||||
)
|
)
|
||||||
import apt_pkg
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
@ -117,6 +116,7 @@ class BaseFetchHandler(object):
|
|||||||
|
|
||||||
def filter_installed_packages(packages):
|
def filter_installed_packages(packages):
|
||||||
"""Returns a list of packages that require installation"""
|
"""Returns a list of packages that require installation"""
|
||||||
|
import apt_pkg
|
||||||
apt_pkg.init()
|
apt_pkg.init()
|
||||||
|
|
||||||
# Tell apt to build an in-memory cache to prevent race conditions (if
|
# Tell apt to build an in-memory cache to prevent race conditions (if
|
||||||
|
@ -39,7 +39,8 @@ class BzrUrlFetchHandler(BaseFetchHandler):
|
|||||||
def install(self, source):
|
def install(self, source):
|
||||||
url_parts = self.parse_url(source)
|
url_parts = self.parse_url(source)
|
||||||
branch_name = url_parts.path.strip("/").split("/")[-1]
|
branch_name = url_parts.path.strip("/").split("/")[-1]
|
||||||
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", branch_name)
|
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
|
||||||
|
branch_name)
|
||||||
if not os.path.exists(dest_dir):
|
if not os.path.exists(dest_dir):
|
||||||
mkdir(dest_dir, perms=0755)
|
mkdir(dest_dir, perms=0755)
|
||||||
try:
|
try:
|
||||||
|
@ -83,6 +83,8 @@ def config_changed():
|
|||||||
pgsql_db_joined(relation_id=r_id)
|
pgsql_db_joined(relation_id=r_id)
|
||||||
for r_id in relation_ids('amqp'):
|
for r_id in relation_ids('amqp'):
|
||||||
amqp_joined(relation_id=r_id)
|
amqp_joined(relation_id=r_id)
|
||||||
|
for r_id in relation_ids('amqp-nova'):
|
||||||
|
amqp_nova_joined(relation_id=r_id)
|
||||||
if valid_plugin():
|
if valid_plugin():
|
||||||
CONFIGS.write_all()
|
CONFIGS.write_all()
|
||||||
configure_ovs()
|
configure_ovs()
|
||||||
@ -123,6 +125,13 @@ def pgsql_db_joined(relation_id=None):
|
|||||||
relation_id=relation_id)
|
relation_id=relation_id)
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.hook('amqp-nova-relation-joined')
|
||||||
|
def amqp_nova_joined(relation_id=None):
|
||||||
|
relation_set(relation_id=relation_id,
|
||||||
|
username=config('nova-rabbit-user'),
|
||||||
|
vhost=config('nova-rabbit-vhost'))
|
||||||
|
|
||||||
|
|
||||||
@hooks.hook('amqp-relation-joined')
|
@hooks.hook('amqp-relation-joined')
|
||||||
def amqp_joined(relation_id=None):
|
def amqp_joined(relation_id=None):
|
||||||
relation_set(relation_id=relation_id,
|
relation_set(relation_id=relation_id,
|
||||||
@ -130,6 +139,16 @@ def amqp_joined(relation_id=None):
|
|||||||
vhost=config('rabbit-vhost'))
|
vhost=config('rabbit-vhost'))
|
||||||
|
|
||||||
|
|
||||||
|
@hooks.hook('amqp-nova-relation-departed')
|
||||||
|
@hooks.hook('amqp-nova-relation-changed')
|
||||||
|
@restart_on_change(restart_map())
|
||||||
|
def amqp_nova_changed():
|
||||||
|
if 'amqp-nova' not in CONFIGS.complete_contexts():
|
||||||
|
log('amqp relation incomplete. Peer not ready?')
|
||||||
|
return
|
||||||
|
CONFIGS.write_all()
|
||||||
|
|
||||||
|
|
||||||
@hooks.hook('amqp-relation-departed')
|
@hooks.hook('amqp-relation-departed')
|
||||||
@restart_on_change(restart_map())
|
@restart_on_change(restart_map())
|
||||||
def amqp_departed():
|
def amqp_departed():
|
||||||
|
@ -7,7 +7,8 @@ from charmhelpers.core.hookenv import (
|
|||||||
log,
|
log,
|
||||||
config,
|
config,
|
||||||
relations_of_type,
|
relations_of_type,
|
||||||
unit_private_ip
|
unit_private_ip,
|
||||||
|
is_relation_made,
|
||||||
)
|
)
|
||||||
from charmhelpers.fetch import (
|
from charmhelpers.fetch import (
|
||||||
apt_upgrade,
|
apt_upgrade,
|
||||||
@ -183,8 +184,7 @@ NOVA_CONF = "/etc/nova/nova.conf"
|
|||||||
|
|
||||||
NOVA_CONFIG_FILES = {
|
NOVA_CONFIG_FILES = {
|
||||||
NOVA_CONF: {
|
NOVA_CONF: {
|
||||||
'hook_contexts': [context.AMQPContext(ssl_dir=NOVA_CONF_DIR),
|
'hook_contexts': [context.SharedDBContext(ssl_dir=NOVA_CONF_DIR),
|
||||||
context.SharedDBContext(ssl_dir=NOVA_CONF_DIR),
|
|
||||||
context.PostgresqlDBContext(),
|
context.PostgresqlDBContext(),
|
||||||
NetworkServiceContext(),
|
NetworkServiceContext(),
|
||||||
QuantumGatewayContext(),
|
QuantumGatewayContext(),
|
||||||
@ -352,6 +352,17 @@ def register_configs():
|
|||||||
if drop_config in CONFIG_FILES[name][plugin]:
|
if drop_config in CONFIG_FILES[name][plugin]:
|
||||||
CONFIG_FILES[name][plugin].pop(drop_config)
|
CONFIG_FILES[name][plugin].pop(drop_config)
|
||||||
|
|
||||||
|
if is_relation_made('amqp-nova'):
|
||||||
|
amqp_nova_ctxt = context.AMQPContext(
|
||||||
|
ssl_dir=NOVA_CONF_DIR,
|
||||||
|
rel_name='amqp-nova',
|
||||||
|
relation_prefix='nova')
|
||||||
|
else:
|
||||||
|
amqp_nova_ctxt = context.AMQPContext(
|
||||||
|
ssl_dir=NOVA_CONF_DIR,
|
||||||
|
rel_name='amqp')
|
||||||
|
CONFIG_FILES[name][plugin][NOVA_CONF][
|
||||||
|
'hook_contexts'].append(amqp_nova_ctxt)
|
||||||
for conf in CONFIG_FILES[name][plugin]:
|
for conf in CONFIG_FILES[name][plugin]:
|
||||||
configs.register(conf,
|
configs.register(conf,
|
||||||
CONFIG_FILES[name][plugin][conf]['hook_contexts'])
|
CONFIG_FILES[name][plugin][conf]['hook_contexts'])
|
||||||
|
@ -25,6 +25,8 @@ requires:
|
|||||||
interface: pgsql
|
interface: pgsql
|
||||||
amqp:
|
amqp:
|
||||||
interface: rabbitmq
|
interface: rabbitmq
|
||||||
|
amqp-nova:
|
||||||
|
interface: rabbitmq
|
||||||
peers:
|
peers:
|
||||||
cluster:
|
cluster:
|
||||||
interface: quantum-gateway-ha
|
interface: quantum-gateway-ha
|
||||||
|
@ -93,6 +93,24 @@ class TestQuantumHooks(CharmTestCase):
|
|||||||
self.assertTrue(self.log.called)
|
self.assertTrue(self.log.called)
|
||||||
_exit.assert_called_with(1)
|
_exit.assert_called_with(1)
|
||||||
|
|
||||||
|
def test_config_changed(self):
|
||||||
|
def mock_relids(rel):
|
||||||
|
return ['relid']
|
||||||
|
self.openstack_upgrade_available.return_value = True
|
||||||
|
self.valid_plugin.return_value = True
|
||||||
|
self.relation_ids.side_effect = mock_relids
|
||||||
|
_db_joined = self.patch('db_joined')
|
||||||
|
_pgsql_db_joined = self.patch('pgsql_db_joined')
|
||||||
|
_amqp_joined = self.patch('amqp_joined')
|
||||||
|
_amqp_nova_joined = self.patch('amqp_nova_joined')
|
||||||
|
self._call_hook('config-changed')
|
||||||
|
self.assertTrue(self.do_openstack_upgrade.called)
|
||||||
|
self.assertTrue(self.configure_ovs.called)
|
||||||
|
self.assertTrue(_db_joined.called)
|
||||||
|
self.assertTrue(_pgsql_db_joined.called)
|
||||||
|
self.assertTrue(_amqp_joined.called)
|
||||||
|
self.assertTrue(_amqp_nova_joined.called)
|
||||||
|
|
||||||
def test_config_changed_upgrade(self):
|
def test_config_changed_upgrade(self):
|
||||||
self.openstack_upgrade_available.return_value = True
|
self.openstack_upgrade_available.return_value = True
|
||||||
self.valid_plugin.return_value = True
|
self.valid_plugin.return_value = True
|
||||||
@ -164,6 +182,34 @@ class TestQuantumHooks(CharmTestCase):
|
|||||||
self._call_hook('amqp-relation-changed')
|
self._call_hook('amqp-relation-changed')
|
||||||
self.assertTrue(self.CONFIGS.write_all.called)
|
self.assertTrue(self.CONFIGS.write_all.called)
|
||||||
|
|
||||||
|
def test_amqp_departed_no_rel(self):
|
||||||
|
self.CONFIGS.complete_contexts.return_value = []
|
||||||
|
self._call_hook('amqp-relation-departed')
|
||||||
|
self.assertFalse(self.CONFIGS.write_all.called)
|
||||||
|
|
||||||
|
def test_amqp_departed(self):
|
||||||
|
self.CONFIGS.complete_contexts.return_value = ['amqp']
|
||||||
|
self._call_hook('amqp-relation-departed')
|
||||||
|
self.assertTrue(self.CONFIGS.write_all.called)
|
||||||
|
|
||||||
|
def test_amqp_nova_joined(self):
|
||||||
|
self._call_hook('amqp-nova-relation-joined')
|
||||||
|
self.relation_set.assert_called_with(
|
||||||
|
username='nova',
|
||||||
|
vhost='openstack',
|
||||||
|
relation_id=None
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_amqp_nova_changed_no_rel(self):
|
||||||
|
self.CONFIGS.complete_contexts.return_value = []
|
||||||
|
self._call_hook('amqp-nova-relation-changed')
|
||||||
|
self.assertFalse(self.CONFIGS.write_all.called)
|
||||||
|
|
||||||
|
def test_amqp_nova_changed(self):
|
||||||
|
self.CONFIGS.complete_contexts.return_value = ['amqp-nova']
|
||||||
|
self._call_hook('amqp-nova-relation-changed')
|
||||||
|
self.assertTrue(self.CONFIGS.write_all.called)
|
||||||
|
|
||||||
def test_shared_db_changed(self):
|
def test_shared_db_changed(self):
|
||||||
self._call_hook('shared-db-relation-changed')
|
self._call_hook('shared-db-relation-changed')
|
||||||
self.assertTrue(self.CONFIGS.write_all.called)
|
self.assertTrue(self.CONFIGS.write_all.called)
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
from mock import MagicMock, call, patch
|
from mock import MagicMock, call, patch
|
||||||
import collections
|
import collections
|
||||||
|
|
||||||
import charmhelpers.contrib.openstack.templating as templating
|
import charmhelpers.contrib.openstack.templating as templating
|
||||||
|
|
||||||
templating.OSConfigRenderer = MagicMock()
|
templating.OSConfigRenderer = MagicMock()
|
||||||
@ -42,7 +41,8 @@ TO_PATCH = [
|
|||||||
'service_stop',
|
'service_stop',
|
||||||
'determine_dkms_package',
|
'determine_dkms_package',
|
||||||
'service_restart',
|
'service_restart',
|
||||||
'remap_plugin'
|
'remap_plugin',
|
||||||
|
'is_relation_made',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@ -62,6 +62,7 @@ class TestQuantumUtils(CharmTestCase):
|
|||||||
super(TestQuantumUtils, self).setUp(quantum_utils, TO_PATCH)
|
super(TestQuantumUtils, self).setUp(quantum_utils, TO_PATCH)
|
||||||
self.networking_name.return_value = 'neutron'
|
self.networking_name.return_value = 'neutron'
|
||||||
self.headers_package.return_value = 'linux-headers-2.6.18'
|
self.headers_package.return_value = 'linux-headers-2.6.18'
|
||||||
|
|
||||||
def noop(value):
|
def noop(value):
|
||||||
return value
|
return value
|
||||||
self.remap_plugin.side_effect = noop
|
self.remap_plugin.side_effect = noop
|
||||||
@ -138,6 +139,7 @@ class TestQuantumUtils(CharmTestCase):
|
|||||||
|
|
||||||
def test_do_openstack_upgrade(self):
|
def test_do_openstack_upgrade(self):
|
||||||
self.config.side_effect = self.test_config.get
|
self.config.side_effect = self.test_config.get
|
||||||
|
self.is_relation_made.return_value = False
|
||||||
self.test_config.set('openstack-origin', 'cloud:precise-havana')
|
self.test_config.set('openstack-origin', 'cloud:precise-havana')
|
||||||
self.test_config.set('plugin', 'ovs')
|
self.test_config.set('plugin', 'ovs')
|
||||||
self.get_os_codename_install_source.return_value = 'havana'
|
self.get_os_codename_install_source.return_value = 'havana'
|
||||||
@ -157,6 +159,25 @@ class TestQuantumUtils(CharmTestCase):
|
|||||||
|
|
||||||
def test_register_configs_ovs(self):
|
def test_register_configs_ovs(self):
|
||||||
self.config.return_value = 'ovs'
|
self.config.return_value = 'ovs'
|
||||||
|
self.is_relation_made.return_value = False
|
||||||
|
configs = quantum_utils.register_configs()
|
||||||
|
confs = [quantum_utils.NEUTRON_DHCP_AGENT_CONF,
|
||||||
|
quantum_utils.NEUTRON_METADATA_AGENT_CONF,
|
||||||
|
quantum_utils.NOVA_CONF,
|
||||||
|
quantum_utils.NEUTRON_CONF,
|
||||||
|
quantum_utils.NEUTRON_L3_AGENT_CONF,
|
||||||
|
quantum_utils.NEUTRON_OVS_PLUGIN_CONF,
|
||||||
|
quantum_utils.EXT_PORT_CONF]
|
||||||
|
for conf in confs:
|
||||||
|
configs.register.assert_any_call(
|
||||||
|
conf,
|
||||||
|
quantum_utils.CONFIG_FILES['neutron'][quantum_utils.OVS][conf]
|
||||||
|
['hook_contexts']
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_register_configs_amqp_nova(self):
|
||||||
|
self.config.return_value = 'ovs'
|
||||||
|
self.is_relation_made.return_value = True
|
||||||
configs = quantum_utils.register_configs()
|
configs = quantum_utils.register_configs()
|
||||||
confs = [quantum_utils.NEUTRON_DHCP_AGENT_CONF,
|
confs = [quantum_utils.NEUTRON_DHCP_AGENT_CONF,
|
||||||
quantum_utils.NEUTRON_METADATA_AGENT_CONF,
|
quantum_utils.NEUTRON_METADATA_AGENT_CONF,
|
||||||
@ -206,6 +227,7 @@ class TestQuantumUtils(CharmTestCase):
|
|||||||
|
|
||||||
def test_register_configs_nvp(self):
|
def test_register_configs_nvp(self):
|
||||||
self.config.return_value = 'nvp'
|
self.config.return_value = 'nvp'
|
||||||
|
self.is_relation_made.return_value = False
|
||||||
configs = quantum_utils.register_configs()
|
configs = quantum_utils.register_configs()
|
||||||
confs = [quantum_utils.NEUTRON_DHCP_AGENT_CONF,
|
confs = [quantum_utils.NEUTRON_DHCP_AGENT_CONF,
|
||||||
quantum_utils.NEUTRON_METADATA_AGENT_CONF,
|
quantum_utils.NEUTRON_METADATA_AGENT_CONF,
|
||||||
@ -273,6 +295,7 @@ class TestQuantumUtils(CharmTestCase):
|
|||||||
|
|
||||||
def test_register_configs_pre_install(self):
|
def test_register_configs_pre_install(self):
|
||||||
self.config.return_value = 'ovs'
|
self.config.return_value = 'ovs'
|
||||||
|
self.is_relation_made.return_value = False
|
||||||
self.networking_name.return_value = 'quantum'
|
self.networking_name.return_value = 'quantum'
|
||||||
configs = quantum_utils.register_configs()
|
configs = quantum_utils.register_configs()
|
||||||
confs = [quantum_utils.QUANTUM_DHCP_AGENT_CONF,
|
confs = [quantum_utils.QUANTUM_DHCP_AGENT_CONF,
|
||||||
|
@ -79,9 +79,9 @@ class TestConfig(object):
|
|||||||
return self.config
|
return self.config
|
||||||
|
|
||||||
def set(self, attr, value):
|
def set(self, attr, value):
|
||||||
if attr not in self.config:
|
if attr not in self.config:
|
||||||
raise KeyError
|
raise KeyError
|
||||||
self.config[attr] = value
|
self.config[attr] = value
|
||||||
|
|
||||||
|
|
||||||
class TestRelation(object):
|
class TestRelation(object):
|
||||||
|
Loading…
Reference in New Issue
Block a user