Resync helpers, refactor code to use cpu calcs from charmhelpers
This commit is contained in:
parent
40e545000f
commit
9d042778d2
@ -121,11 +121,12 @@ class OpenStackAmuletDeployment(AmuletDeployment):
|
|||||||
|
|
||||||
# Charms which should use the source config option
|
# Charms which should use the source config option
|
||||||
use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
|
use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
|
||||||
'ceph-osd', 'ceph-radosgw']
|
'ceph-osd', 'ceph-radosgw', 'ceph-mon']
|
||||||
|
|
||||||
# Charms which can not use openstack-origin, ie. many subordinates
|
# Charms which can not use openstack-origin, ie. many subordinates
|
||||||
no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe',
|
no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe',
|
||||||
'openvswitch-odl', 'neutron-api-odl', 'odl-controller']
|
'openvswitch-odl', 'neutron-api-odl', 'odl-controller',
|
||||||
|
'cinder-backup']
|
||||||
|
|
||||||
if self.openstack:
|
if self.openstack:
|
||||||
for svc in services:
|
for svc in services:
|
||||||
|
@ -90,6 +90,12 @@ from charmhelpers.contrib.network.ip import (
|
|||||||
from charmhelpers.contrib.openstack.utils import get_host_ip
|
from charmhelpers.contrib.openstack.utils import get_host_ip
|
||||||
from charmhelpers.core.unitdata import kv
|
from charmhelpers.core.unitdata import kv
|
||||||
|
|
||||||
|
try:
|
||||||
|
import psutil
|
||||||
|
except ImportError:
|
||||||
|
apt_install('python-psutil', fatal=True)
|
||||||
|
import psutil
|
||||||
|
|
||||||
CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
|
CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
|
||||||
ADDRESS_TYPES = ['admin', 'internal', 'public']
|
ADDRESS_TYPES = ['admin', 'internal', 'public']
|
||||||
|
|
||||||
@ -1258,13 +1264,11 @@ class WorkerConfigContext(OSContextGenerator):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def num_cpus(self):
|
def num_cpus(self):
|
||||||
try:
|
# NOTE: use cpu_count if present (16.04 support)
|
||||||
from psutil import NUM_CPUS
|
if hasattr(psutil, 'cpu_count'):
|
||||||
except ImportError:
|
return psutil.cpu_count()
|
||||||
apt_install('python-psutil', fatal=True)
|
else:
|
||||||
from psutil import NUM_CPUS
|
return psutil.NUM_CPUS
|
||||||
|
|
||||||
return NUM_CPUS
|
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
multiplier = config('worker-multiplier') or 0
|
multiplier = config('worker-multiplier') or 0
|
||||||
|
@ -50,7 +50,7 @@ def determine_dkms_package():
|
|||||||
if kernel_version() >= (3, 13):
|
if kernel_version() >= (3, 13):
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
return ['openvswitch-datapath-dkms']
|
return [headers_package(), 'openvswitch-datapath-dkms']
|
||||||
|
|
||||||
|
|
||||||
# legacy
|
# legacy
|
||||||
@ -70,7 +70,7 @@ def quantum_plugins():
|
|||||||
relation_prefix='neutron',
|
relation_prefix='neutron',
|
||||||
ssl_dir=QUANTUM_CONF_DIR)],
|
ssl_dir=QUANTUM_CONF_DIR)],
|
||||||
'services': ['quantum-plugin-openvswitch-agent'],
|
'services': ['quantum-plugin-openvswitch-agent'],
|
||||||
'packages': [[headers_package()] + determine_dkms_package(),
|
'packages': [determine_dkms_package(),
|
||||||
['quantum-plugin-openvswitch-agent']],
|
['quantum-plugin-openvswitch-agent']],
|
||||||
'server_packages': ['quantum-server',
|
'server_packages': ['quantum-server',
|
||||||
'quantum-plugin-openvswitch'],
|
'quantum-plugin-openvswitch'],
|
||||||
@ -111,7 +111,7 @@ def neutron_plugins():
|
|||||||
relation_prefix='neutron',
|
relation_prefix='neutron',
|
||||||
ssl_dir=NEUTRON_CONF_DIR)],
|
ssl_dir=NEUTRON_CONF_DIR)],
|
||||||
'services': ['neutron-plugin-openvswitch-agent'],
|
'services': ['neutron-plugin-openvswitch-agent'],
|
||||||
'packages': [[headers_package()] + determine_dkms_package(),
|
'packages': [determine_dkms_package(),
|
||||||
['neutron-plugin-openvswitch-agent']],
|
['neutron-plugin-openvswitch-agent']],
|
||||||
'server_packages': ['neutron-server',
|
'server_packages': ['neutron-server',
|
||||||
'neutron-plugin-openvswitch'],
|
'neutron-plugin-openvswitch'],
|
||||||
@ -155,7 +155,7 @@ def neutron_plugins():
|
|||||||
relation_prefix='neutron',
|
relation_prefix='neutron',
|
||||||
ssl_dir=NEUTRON_CONF_DIR)],
|
ssl_dir=NEUTRON_CONF_DIR)],
|
||||||
'services': [],
|
'services': [],
|
||||||
'packages': [[headers_package()] + determine_dkms_package(),
|
'packages': [determine_dkms_package(),
|
||||||
['neutron-plugin-cisco']],
|
['neutron-plugin-cisco']],
|
||||||
'server_packages': ['neutron-server',
|
'server_packages': ['neutron-server',
|
||||||
'neutron-plugin-cisco'],
|
'neutron-plugin-cisco'],
|
||||||
@ -174,7 +174,7 @@ def neutron_plugins():
|
|||||||
'neutron-dhcp-agent',
|
'neutron-dhcp-agent',
|
||||||
'nova-api-metadata',
|
'nova-api-metadata',
|
||||||
'etcd'],
|
'etcd'],
|
||||||
'packages': [[headers_package()] + determine_dkms_package(),
|
'packages': [determine_dkms_package(),
|
||||||
['calico-compute',
|
['calico-compute',
|
||||||
'bird',
|
'bird',
|
||||||
'neutron-dhcp-agent',
|
'neutron-dhcp-agent',
|
||||||
@ -219,7 +219,7 @@ def neutron_plugins():
|
|||||||
relation_prefix='neutron',
|
relation_prefix='neutron',
|
||||||
ssl_dir=NEUTRON_CONF_DIR)],
|
ssl_dir=NEUTRON_CONF_DIR)],
|
||||||
'services': [],
|
'services': [],
|
||||||
'packages': [[headers_package()] + determine_dkms_package()],
|
'packages': [determine_dkms_package()],
|
||||||
'server_packages': ['neutron-server',
|
'server_packages': ['neutron-server',
|
||||||
'python-neutron-plugin-midonet'],
|
'python-neutron-plugin-midonet'],
|
||||||
'server_services': ['neutron-server']
|
'server_services': ['neutron-server']
|
||||||
@ -233,6 +233,14 @@ def neutron_plugins():
|
|||||||
'neutron-plugin-ml2']
|
'neutron-plugin-ml2']
|
||||||
# NOTE: patch in vmware renames nvp->nsx for icehouse onwards
|
# NOTE: patch in vmware renames nvp->nsx for icehouse onwards
|
||||||
plugins['nvp'] = plugins['nsx']
|
plugins['nvp'] = plugins['nsx']
|
||||||
|
if release >= 'kilo':
|
||||||
|
plugins['midonet']['driver'] = (
|
||||||
|
'neutron.plugins.midonet.plugin.MidonetPluginV2')
|
||||||
|
if release >= 'liberty':
|
||||||
|
midonet_origin = config('midonet-origin')
|
||||||
|
if midonet_origin is not None and midonet_origin[4:5] == '1':
|
||||||
|
plugins['midonet']['driver'] = (
|
||||||
|
'midonet.neutron.plugin_v1.MidonetPluginV2')
|
||||||
return plugins
|
return plugins
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,16 +105,26 @@ OPENSTACK_CODENAMES = OrderedDict([
|
|||||||
|
|
||||||
# The ugly duckling - must list releases oldest to newest
|
# The ugly duckling - must list releases oldest to newest
|
||||||
SWIFT_CODENAMES = OrderedDict([
|
SWIFT_CODENAMES = OrderedDict([
|
||||||
('diablo', ['1.4.3']),
|
('diablo',
|
||||||
('essex', ['1.4.8']),
|
['1.4.3']),
|
||||||
('folsom', ['1.7.4']),
|
('essex',
|
||||||
('grizzly', ['1.7.6', '1.7.7', '1.8.0']),
|
['1.4.8']),
|
||||||
('havana', ['1.9.0', '1.9.1', '1.10.0']),
|
('folsom',
|
||||||
('icehouse', ['1.11.0', '1.12.0', '1.13.0', '1.13.1']),
|
['1.7.4']),
|
||||||
('juno', ['2.0.0', '2.1.0', '2.2.0']),
|
('grizzly',
|
||||||
('kilo', ['2.2.1', '2.2.2']),
|
['1.7.6', '1.7.7', '1.8.0']),
|
||||||
('liberty', ['2.3.0', '2.4.0', '2.5.0']),
|
('havana',
|
||||||
('mitaka', ['2.5.0']),
|
['1.9.0', '1.9.1', '1.10.0']),
|
||||||
|
('icehouse',
|
||||||
|
['1.11.0', '1.12.0', '1.13.0', '1.13.1']),
|
||||||
|
('juno',
|
||||||
|
['2.0.0', '2.1.0', '2.2.0']),
|
||||||
|
('kilo',
|
||||||
|
['2.2.1', '2.2.2']),
|
||||||
|
('liberty',
|
||||||
|
['2.3.0', '2.4.0', '2.5.0']),
|
||||||
|
('mitaka',
|
||||||
|
['2.5.0']),
|
||||||
])
|
])
|
||||||
|
|
||||||
# >= Liberty version->codename mapping
|
# >= Liberty version->codename mapping
|
||||||
|
@ -138,7 +138,8 @@ def service_running(service_name):
|
|||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if ("start/running" in output or "is running" in output):
|
if ("start/running" in output or "is running" in output or
|
||||||
|
"up and running" in output):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
@ -160,13 +161,13 @@ SYSTEMD_SYSTEM = '/run/systemd/system'
|
|||||||
|
|
||||||
|
|
||||||
def init_is_systemd():
|
def init_is_systemd():
|
||||||
|
"""Return True if the host system uses systemd, False otherwise."""
|
||||||
return os.path.isdir(SYSTEMD_SYSTEM)
|
return os.path.isdir(SYSTEMD_SYSTEM)
|
||||||
|
|
||||||
|
|
||||||
def adduser(username, password=None, shell='/bin/bash', system_user=False,
|
def adduser(username, password=None, shell='/bin/bash', system_user=False,
|
||||||
primary_group=None, secondary_groups=None):
|
primary_group=None, secondary_groups=None):
|
||||||
"""
|
"""Add a user to the system.
|
||||||
Add a user to the system.
|
|
||||||
|
|
||||||
Will log but otherwise succeed if the user already exists.
|
Will log but otherwise succeed if the user already exists.
|
||||||
|
|
||||||
@ -174,7 +175,7 @@ def adduser(username, password=None, shell='/bin/bash', system_user=False,
|
|||||||
:param str password: Password for user; if ``None``, create a system user
|
:param str password: Password for user; if ``None``, create a system user
|
||||||
:param str shell: The default shell for the user
|
:param str shell: The default shell for the user
|
||||||
:param bool system_user: Whether to create a login or system user
|
:param bool system_user: Whether to create a login or system user
|
||||||
:param str primary_group: Primary group for user; defaults to their username
|
:param str primary_group: Primary group for user; defaults to username
|
||||||
:param list secondary_groups: Optional list of additional groups
|
:param list secondary_groups: Optional list of additional groups
|
||||||
|
|
||||||
:returns: The password database entry struct, as returned by `pwd.getpwnam`
|
:returns: The password database entry struct, as returned by `pwd.getpwnam`
|
||||||
@ -300,14 +301,12 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
|||||||
|
|
||||||
|
|
||||||
def fstab_remove(mp):
|
def fstab_remove(mp):
|
||||||
"""Remove the given mountpoint entry from /etc/fstab
|
"""Remove the given mountpoint entry from /etc/fstab"""
|
||||||
"""
|
|
||||||
return Fstab.remove_by_mountpoint(mp)
|
return Fstab.remove_by_mountpoint(mp)
|
||||||
|
|
||||||
|
|
||||||
def fstab_add(dev, mp, fs, options=None):
|
def fstab_add(dev, mp, fs, options=None):
|
||||||
"""Adds the given device entry to the /etc/fstab file
|
"""Adds the given device entry to the /etc/fstab file"""
|
||||||
"""
|
|
||||||
return Fstab.add(dev, mp, fs, options=options)
|
return Fstab.add(dev, mp, fs, options=options)
|
||||||
|
|
||||||
|
|
||||||
@ -363,8 +362,7 @@ def fstab_mount(mountpoint):
|
|||||||
|
|
||||||
|
|
||||||
def file_hash(path, hash_type='md5'):
|
def file_hash(path, hash_type='md5'):
|
||||||
"""
|
"""Generate a hash checksum of the contents of 'path' or None if not found.
|
||||||
Generate a hash checksum of the contents of 'path' or None if not found.
|
|
||||||
|
|
||||||
:param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
|
:param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
|
||||||
such as md5, sha1, sha256, sha512, etc.
|
such as md5, sha1, sha256, sha512, etc.
|
||||||
@ -379,10 +377,9 @@ def file_hash(path, hash_type='md5'):
|
|||||||
|
|
||||||
|
|
||||||
def path_hash(path):
|
def path_hash(path):
|
||||||
"""
|
"""Generate a hash checksum of all files matching 'path'. Standard
|
||||||
Generate a hash checksum of all files matching 'path'. Standard wildcards
|
wildcards like '*' and '?' are supported, see documentation for the 'glob'
|
||||||
like '*' and '?' are supported, see documentation for the 'glob' module for
|
module for more information.
|
||||||
more information.
|
|
||||||
|
|
||||||
:return: dict: A { filename: hash } dictionary for all matched files.
|
:return: dict: A { filename: hash } dictionary for all matched files.
|
||||||
Empty if none found.
|
Empty if none found.
|
||||||
@ -394,8 +391,7 @@ def path_hash(path):
|
|||||||
|
|
||||||
|
|
||||||
def check_hash(path, checksum, hash_type='md5'):
|
def check_hash(path, checksum, hash_type='md5'):
|
||||||
"""
|
"""Validate a file using a cryptographic checksum.
|
||||||
Validate a file using a cryptographic checksum.
|
|
||||||
|
|
||||||
:param str checksum: Value of the checksum used to validate the file.
|
:param str checksum: Value of the checksum used to validate the file.
|
||||||
:param str hash_type: Hash algorithm used to generate `checksum`.
|
:param str hash_type: Hash algorithm used to generate `checksum`.
|
||||||
@ -410,6 +406,7 @@ def check_hash(path, checksum, hash_type='md5'):
|
|||||||
|
|
||||||
|
|
||||||
class ChecksumError(ValueError):
|
class ChecksumError(ValueError):
|
||||||
|
"""A class derived from Value error to indicate the checksum failed."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -515,7 +512,7 @@ def get_bond_master(interface):
|
|||||||
|
|
||||||
|
|
||||||
def list_nics(nic_type=None):
|
def list_nics(nic_type=None):
|
||||||
'''Return a list of nics of given type(s)'''
|
"""Return a list of nics of given type(s)"""
|
||||||
if isinstance(nic_type, six.string_types):
|
if isinstance(nic_type, six.string_types):
|
||||||
int_types = [nic_type]
|
int_types = [nic_type]
|
||||||
else:
|
else:
|
||||||
@ -557,12 +554,13 @@ def list_nics(nic_type=None):
|
|||||||
|
|
||||||
|
|
||||||
def set_nic_mtu(nic, mtu):
|
def set_nic_mtu(nic, mtu):
|
||||||
'''Set MTU on a network interface'''
|
"""Set the Maximum Transmission Unit (MTU) on a network interface."""
|
||||||
cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
|
cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
|
||||||
subprocess.check_call(cmd)
|
subprocess.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
def get_nic_mtu(nic):
|
def get_nic_mtu(nic):
|
||||||
|
"""Return the Maximum Transmission Unit (MTU) for a network interface."""
|
||||||
cmd = ['ip', 'addr', 'show', nic]
|
cmd = ['ip', 'addr', 'show', nic]
|
||||||
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
|
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
|
||||||
mtu = ""
|
mtu = ""
|
||||||
@ -574,6 +572,7 @@ def get_nic_mtu(nic):
|
|||||||
|
|
||||||
|
|
||||||
def get_nic_hwaddr(nic):
|
def get_nic_hwaddr(nic):
|
||||||
|
"""Return the Media Access Control (MAC) for a network interface."""
|
||||||
cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
|
cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
|
||||||
ip_output = subprocess.check_output(cmd).decode('UTF-8')
|
ip_output = subprocess.check_output(cmd).decode('UTF-8')
|
||||||
hwaddr = ""
|
hwaddr = ""
|
||||||
@ -584,7 +583,7 @@ def get_nic_hwaddr(nic):
|
|||||||
|
|
||||||
|
|
||||||
def cmp_pkgrevno(package, revno, pkgcache=None):
|
def cmp_pkgrevno(package, revno, pkgcache=None):
|
||||||
'''Compare supplied revno with the revno of the installed package
|
"""Compare supplied revno with the revno of the installed package
|
||||||
|
|
||||||
* 1 => Installed revno is greater than supplied arg
|
* 1 => Installed revno is greater than supplied arg
|
||||||
* 0 => Installed revno is the same as supplied arg
|
* 0 => Installed revno is the same as supplied arg
|
||||||
@ -593,7 +592,7 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
|
|||||||
This function imports apt_cache function from charmhelpers.fetch if
|
This function imports apt_cache function from charmhelpers.fetch if
|
||||||
the pkgcache argument is None. Be sure to add charmhelpers.fetch if
|
the pkgcache argument is None. Be sure to add charmhelpers.fetch if
|
||||||
you call this function, or pass an apt_pkg.Cache() instance.
|
you call this function, or pass an apt_pkg.Cache() instance.
|
||||||
'''
|
"""
|
||||||
import apt_pkg
|
import apt_pkg
|
||||||
if not pkgcache:
|
if not pkgcache:
|
||||||
from charmhelpers.fetch import apt_cache
|
from charmhelpers.fetch import apt_cache
|
||||||
@ -603,19 +602,27 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def chdir(d):
|
def chdir(directory):
|
||||||
|
"""Change the current working directory to a different directory for a code
|
||||||
|
block and return the previous directory after the block exits. Useful to
|
||||||
|
run commands from a specificed directory.
|
||||||
|
|
||||||
|
:param str directory: The directory path to change to for this context.
|
||||||
|
"""
|
||||||
cur = os.getcwd()
|
cur = os.getcwd()
|
||||||
try:
|
try:
|
||||||
yield os.chdir(d)
|
yield os.chdir(directory)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(cur)
|
os.chdir(cur)
|
||||||
|
|
||||||
|
|
||||||
def chownr(path, owner, group, follow_links=True, chowntopdir=False):
|
def chownr(path, owner, group, follow_links=True, chowntopdir=False):
|
||||||
"""
|
"""Recursively change user and group ownership of files and directories
|
||||||
Recursively change user and group ownership of files and directories
|
|
||||||
in given path. Doesn't chown path itself by default, only its children.
|
in given path. Doesn't chown path itself by default, only its children.
|
||||||
|
|
||||||
|
:param str path: The string path to start changing ownership.
|
||||||
|
:param str owner: The owner string to use when looking up the uid.
|
||||||
|
:param str group: The group string to use when looking up the gid.
|
||||||
:param bool follow_links: Also Chown links if True
|
:param bool follow_links: Also Chown links if True
|
||||||
:param bool chowntopdir: Also chown path itself if True
|
:param bool chowntopdir: Also chown path itself if True
|
||||||
"""
|
"""
|
||||||
@ -639,15 +646,23 @@ def chownr(path, owner, group, follow_links=True, chowntopdir=False):
|
|||||||
|
|
||||||
|
|
||||||
def lchownr(path, owner, group):
|
def lchownr(path, owner, group):
|
||||||
|
"""Recursively change user and group ownership of files and directories
|
||||||
|
in a given path, not following symbolic links. See the documentation for
|
||||||
|
'os.lchown' for more information.
|
||||||
|
|
||||||
|
:param str path: The string path to start changing ownership.
|
||||||
|
:param str owner: The owner string to use when looking up the uid.
|
||||||
|
:param str group: The group string to use when looking up the gid.
|
||||||
|
"""
|
||||||
chownr(path, owner, group, follow_links=False)
|
chownr(path, owner, group, follow_links=False)
|
||||||
|
|
||||||
|
|
||||||
def get_total_ram():
|
def get_total_ram():
|
||||||
'''The total amount of system RAM in bytes.
|
"""The total amount of system RAM in bytes.
|
||||||
|
|
||||||
This is what is reported by the OS, and may be overcommitted when
|
This is what is reported by the OS, and may be overcommitted when
|
||||||
there are multiple containers hosted on the same machine.
|
there are multiple containers hosted on the same machine.
|
||||||
'''
|
"""
|
||||||
with open('/proc/meminfo', 'r') as f:
|
with open('/proc/meminfo', 'r') as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
if line:
|
if line:
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from subprocess import check_call
|
from subprocess import check_call, CalledProcessError
|
||||||
from charmhelpers.fetch import (
|
from charmhelpers.fetch import (
|
||||||
BaseFetchHandler,
|
BaseFetchHandler,
|
||||||
UnhandledSource,
|
UnhandledSource,
|
||||||
@ -63,6 +63,8 @@ class GitUrlFetchHandler(BaseFetchHandler):
|
|||||||
branch_name)
|
branch_name)
|
||||||
try:
|
try:
|
||||||
self.clone(source, dest_dir, branch, depth)
|
self.clone(source, dest_dir, branch, depth)
|
||||||
|
except CalledProcessError as e:
|
||||||
|
raise UnhandledSource(e)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
raise UnhandledSource(e.strerror)
|
raise UnhandledSource(e.strerror)
|
||||||
return dest_dir
|
return dest_dir
|
||||||
|
@ -80,14 +80,11 @@ class SwiftStorageServerContext(OSContextGenerator):
|
|||||||
interfaces = []
|
interfaces = []
|
||||||
|
|
||||||
def __call__(self):
|
def __call__(self):
|
||||||
import psutil
|
|
||||||
multiplier = int(config('worker-multiplier')) or 1
|
|
||||||
ctxt = {
|
ctxt = {
|
||||||
'local_ip': unit_private_ip(),
|
'local_ip': unit_private_ip(),
|
||||||
'account_server_port': config('account-server-port'),
|
'account_server_port': config('account-server-port'),
|
||||||
'container_server_port': config('container-server-port'),
|
'container_server_port': config('container-server-port'),
|
||||||
'object_server_port': config('object-server-port'),
|
'object_server_port': config('object-server-port'),
|
||||||
'workers': str(psutil.NUM_CPUS * multiplier),
|
|
||||||
'object_server_threads_per_disk': config(
|
'object_server_threads_per_disk': config(
|
||||||
'object-server-threads-per-disk'),
|
'object-server-threads-per-disk'),
|
||||||
'account_max_connections': config('account-max-connections'),
|
'account_max_connections': config('account-max-connections'),
|
||||||
|
@ -132,7 +132,8 @@ def register_configs():
|
|||||||
for server in ['account', 'object', 'container']:
|
for server in ['account', 'object', 'container']:
|
||||||
configs.register('/etc/swift/%s-server.conf' % server,
|
configs.register('/etc/swift/%s-server.conf' % server,
|
||||||
[SwiftStorageServerContext(),
|
[SwiftStorageServerContext(),
|
||||||
context.BindHostContext()]),
|
context.BindHostContext(),
|
||||||
|
context.WorkerConfigContext()]),
|
||||||
return configs
|
return configs
|
||||||
|
|
||||||
|
|
||||||
|
@ -121,11 +121,12 @@ class OpenStackAmuletDeployment(AmuletDeployment):
|
|||||||
|
|
||||||
# Charms which should use the source config option
|
# Charms which should use the source config option
|
||||||
use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
|
use_source = ['mysql', 'mongodb', 'rabbitmq-server', 'ceph',
|
||||||
'ceph-osd', 'ceph-radosgw']
|
'ceph-osd', 'ceph-radosgw', 'ceph-mon']
|
||||||
|
|
||||||
# Charms which can not use openstack-origin, ie. many subordinates
|
# Charms which can not use openstack-origin, ie. many subordinates
|
||||||
no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe',
|
no_origin = ['cinder-ceph', 'hacluster', 'neutron-openvswitch', 'nrpe',
|
||||||
'openvswitch-odl', 'neutron-api-odl', 'odl-controller']
|
'openvswitch-odl', 'neutron-api-odl', 'odl-controller',
|
||||||
|
'cinder-backup']
|
||||||
|
|
||||||
if self.openstack:
|
if self.openstack:
|
||||||
for svc in services:
|
for svc in services:
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from mock import MagicMock
|
from mock import MagicMock, patch
|
||||||
from test_utils import CharmTestCase, patch_open
|
from test_utils import CharmTestCase, patch_open
|
||||||
|
|
||||||
import lib.swift_storage_context as swift_context
|
import lib.swift_storage_context as swift_context
|
||||||
@ -67,18 +67,15 @@ class SwiftStorageContextTests(CharmTestCase):
|
|||||||
_file.write.assert_called_with('RSYNC_ENABLE=true\n')
|
_file.write.assert_called_with('RSYNC_ENABLE=true\n')
|
||||||
|
|
||||||
def test_swift_storage_server_context(self):
|
def test_swift_storage_server_context(self):
|
||||||
import psutil
|
|
||||||
self.unit_private_ip.return_value = '10.0.0.5'
|
self.unit_private_ip.return_value = '10.0.0.5'
|
||||||
self.test_config.set('account-server-port', '500')
|
self.test_config.set('account-server-port', '500')
|
||||||
self.test_config.set('object-server-port', '501')
|
self.test_config.set('object-server-port', '501')
|
||||||
self.test_config.set('container-server-port', '502')
|
self.test_config.set('container-server-port', '502')
|
||||||
self.test_config.set('object-server-threads-per-disk', '3')
|
self.test_config.set('object-server-threads-per-disk', '3')
|
||||||
self.test_config.set('worker-multiplier', '3')
|
|
||||||
self.test_config.set('object-replicator-concurrency', '3')
|
self.test_config.set('object-replicator-concurrency', '3')
|
||||||
self.test_config.set('account-max-connections', '10')
|
self.test_config.set('account-max-connections', '10')
|
||||||
self.test_config.set('container-max-connections', '10')
|
self.test_config.set('container-max-connections', '10')
|
||||||
self.test_config.set('object-max-connections', '10')
|
self.test_config.set('object-max-connections', '10')
|
||||||
num_workers = psutil.NUM_CPUS * 3
|
|
||||||
ctxt = swift_context.SwiftStorageServerContext()
|
ctxt = swift_context.SwiftStorageServerContext()
|
||||||
result = ctxt()
|
result = ctxt()
|
||||||
ex = {
|
ex = {
|
||||||
@ -87,7 +84,6 @@ class SwiftStorageContextTests(CharmTestCase):
|
|||||||
'account_server_port': '500',
|
'account_server_port': '500',
|
||||||
'local_ip': '10.0.0.5',
|
'local_ip': '10.0.0.5',
|
||||||
'object_server_threads_per_disk': '3',
|
'object_server_threads_per_disk': '3',
|
||||||
'workers': str(num_workers),
|
|
||||||
'object_replicator_concurrency': '3',
|
'object_replicator_concurrency': '3',
|
||||||
'account_max_connections': '10',
|
'account_max_connections': '10',
|
||||||
'container_max_connections': '10',
|
'container_max_connections': '10',
|
||||||
|
@ -292,6 +292,7 @@ class SwiftStorageUtilsTests(CharmTestCase):
|
|||||||
renderer.assert_called_with(templates_dir=swift_utils.TEMPLATES,
|
renderer.assert_called_with(templates_dir=swift_utils.TEMPLATES,
|
||||||
openstack_release='essex')
|
openstack_release='essex')
|
||||||
|
|
||||||
|
@patch('charmhelpers.contrib.openstack.context.WorkerConfigContext')
|
||||||
@patch('charmhelpers.contrib.openstack.context.BindHostContext')
|
@patch('charmhelpers.contrib.openstack.context.BindHostContext')
|
||||||
@patch.object(swift_utils, 'SwiftStorageContext')
|
@patch.object(swift_utils, 'SwiftStorageContext')
|
||||||
@patch.object(swift_utils, 'RsyncContext')
|
@patch.object(swift_utils, 'RsyncContext')
|
||||||
@ -299,11 +300,12 @@ class SwiftStorageUtilsTests(CharmTestCase):
|
|||||||
@patch('charmhelpers.contrib.openstack.templating.OSConfigRenderer')
|
@patch('charmhelpers.contrib.openstack.templating.OSConfigRenderer')
|
||||||
def test_register_configs_post_install(self, renderer,
|
def test_register_configs_post_install(self, renderer,
|
||||||
swift, rsync, server,
|
swift, rsync, server,
|
||||||
bind_context):
|
bind_context, worker_context):
|
||||||
swift.return_value = 'swift_context'
|
swift.return_value = 'swift_context'
|
||||||
rsync.return_value = 'rsync_context'
|
rsync.return_value = 'rsync_context'
|
||||||
server.return_value = 'swift_server_context'
|
server.return_value = 'swift_server_context'
|
||||||
bind_context.return_value = 'bind_host_context'
|
bind_context.return_value = 'bind_host_context'
|
||||||
|
worker_context.return_value = 'worker_context'
|
||||||
self.get_os_codename_package.return_value = 'grizzly'
|
self.get_os_codename_package.return_value = 'grizzly'
|
||||||
configs = MagicMock()
|
configs = MagicMock()
|
||||||
configs.register = MagicMock()
|
configs.register = MagicMock()
|
||||||
@ -316,11 +318,14 @@ class SwiftStorageUtilsTests(CharmTestCase):
|
|||||||
call('/etc/rsync-juju.d/050-swift-storage.conf',
|
call('/etc/rsync-juju.d/050-swift-storage.conf',
|
||||||
['rsync_context', 'swift_context']),
|
['rsync_context', 'swift_context']),
|
||||||
call('/etc/swift/account-server.conf', ['swift_context',
|
call('/etc/swift/account-server.conf', ['swift_context',
|
||||||
'bind_host_context']),
|
'bind_host_context',
|
||||||
|
'worker_context']),
|
||||||
call('/etc/swift/object-server.conf', ['swift_context',
|
call('/etc/swift/object-server.conf', ['swift_context',
|
||||||
'bind_host_context']),
|
'bind_host_context',
|
||||||
|
'worker_context']),
|
||||||
call('/etc/swift/container-server.conf', ['swift_context',
|
call('/etc/swift/container-server.conf', ['swift_context',
|
||||||
'bind_host_context'])
|
'bind_host_context',
|
||||||
|
'worker_context'])
|
||||||
]
|
]
|
||||||
self.assertEquals(ex, configs.register.call_args_list)
|
self.assertEquals(ex, configs.register.call_args_list)
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user