Resync helpers
This commit is contained in:
commit
8c258d38cb
@ -57,6 +57,7 @@ from charmhelpers.core.host import (
|
|||||||
get_nic_hwaddr,
|
get_nic_hwaddr,
|
||||||
mkdir,
|
mkdir,
|
||||||
write_file,
|
write_file,
|
||||||
|
pwgen,
|
||||||
)
|
)
|
||||||
from charmhelpers.contrib.hahelpers.cluster import (
|
from charmhelpers.contrib.hahelpers.cluster import (
|
||||||
determine_apache_port,
|
determine_apache_port,
|
||||||
@ -87,6 +88,8 @@ from charmhelpers.contrib.network.ip import (
|
|||||||
is_bridge_member,
|
is_bridge_member,
|
||||||
)
|
)
|
||||||
from charmhelpers.contrib.openstack.utils import get_host_ip
|
from charmhelpers.contrib.openstack.utils import get_host_ip
|
||||||
|
from charmhelpers.core.unitdata import kv
|
||||||
|
|
||||||
CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
|
CA_CERT_PATH = '/usr/local/share/ca-certificates/keystone_juju_ca_cert.crt'
|
||||||
ADDRESS_TYPES = ['admin', 'internal', 'public']
|
ADDRESS_TYPES = ['admin', 'internal', 'public']
|
||||||
|
|
||||||
@ -636,11 +639,18 @@ class HAProxyContext(OSContextGenerator):
|
|||||||
ctxt['ipv6'] = True
|
ctxt['ipv6'] = True
|
||||||
ctxt['local_host'] = 'ip6-localhost'
|
ctxt['local_host'] = 'ip6-localhost'
|
||||||
ctxt['haproxy_host'] = '::'
|
ctxt['haproxy_host'] = '::'
|
||||||
ctxt['stat_port'] = ':::8888'
|
|
||||||
else:
|
else:
|
||||||
ctxt['local_host'] = '127.0.0.1'
|
ctxt['local_host'] = '127.0.0.1'
|
||||||
ctxt['haproxy_host'] = '0.0.0.0'
|
ctxt['haproxy_host'] = '0.0.0.0'
|
||||||
ctxt['stat_port'] = ':8888'
|
|
||||||
|
ctxt['stat_port'] = '8888'
|
||||||
|
|
||||||
|
db = kv()
|
||||||
|
ctxt['stat_password'] = db.get('stat-password')
|
||||||
|
if not ctxt['stat_password']:
|
||||||
|
ctxt['stat_password'] = db.set('stat-password',
|
||||||
|
pwgen(32))
|
||||||
|
db.flush()
|
||||||
|
|
||||||
for frontend in cluster_hosts:
|
for frontend in cluster_hosts:
|
||||||
if (len(cluster_hosts[frontend]['backends']) > 1 or
|
if (len(cluster_hosts[frontend]['backends']) > 1 or
|
||||||
|
@ -103,68 +103,67 @@ OPENSTACK_CODENAMES = OrderedDict([
|
|||||||
('2016.1', 'mitaka'),
|
('2016.1', 'mitaka'),
|
||||||
])
|
])
|
||||||
|
|
||||||
# The ugly duckling
|
# The ugly duckling - must list releases oldest to newest
|
||||||
SWIFT_CODENAMES = OrderedDict([
|
SWIFT_CODENAMES = OrderedDict([
|
||||||
('1.4.3', 'diablo'),
|
('diablo',
|
||||||
('1.4.8', 'essex'),
|
['1.4.3']),
|
||||||
('1.7.4', 'folsom'),
|
('essex',
|
||||||
('1.8.0', 'grizzly'),
|
['1.4.8']),
|
||||||
('1.7.7', 'grizzly'),
|
('folsom',
|
||||||
('1.7.6', 'grizzly'),
|
['1.7.4']),
|
||||||
('1.10.0', 'havana'),
|
('grizzly',
|
||||||
('1.9.1', 'havana'),
|
['1.7.6', '1.7.7', '1.8.0']),
|
||||||
('1.9.0', 'havana'),
|
('havana',
|
||||||
('1.13.1', 'icehouse'),
|
['1.9.0', '1.9.1', '1.10.0']),
|
||||||
('1.13.0', 'icehouse'),
|
('icehouse',
|
||||||
('1.12.0', 'icehouse'),
|
['1.11.0', '1.12.0', '1.13.0', '1.13.1']),
|
||||||
('1.11.0', 'icehouse'),
|
('juno',
|
||||||
('2.0.0', 'juno'),
|
['2.0.0', '2.1.0', '2.2.0']),
|
||||||
('2.1.0', 'juno'),
|
('kilo',
|
||||||
('2.2.0', 'juno'),
|
['2.2.1', '2.2.2']),
|
||||||
('2.2.1', 'kilo'),
|
('liberty',
|
||||||
('2.2.2', 'kilo'),
|
['2.3.0', '2.4.0', '2.5.0']),
|
||||||
('2.3.0', 'liberty'),
|
('mitaka',
|
||||||
('2.4.0', 'liberty'),
|
['2.5.0']),
|
||||||
('2.5.0', 'liberty'),
|
|
||||||
])
|
])
|
||||||
|
|
||||||
# >= Liberty version->codename mapping
|
# >= Liberty version->codename mapping
|
||||||
PACKAGE_CODENAMES = {
|
PACKAGE_CODENAMES = {
|
||||||
'nova-common': OrderedDict([
|
'nova-common': OrderedDict([
|
||||||
('12.0.0', 'liberty'),
|
('12.0', 'liberty'),
|
||||||
('13.0.0', 'mitaka'),
|
('13.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'neutron-common': OrderedDict([
|
'neutron-common': OrderedDict([
|
||||||
('7.0.0', 'liberty'),
|
('7.0', 'liberty'),
|
||||||
('8.0.0', 'mitaka'),
|
('8.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'cinder-common': OrderedDict([
|
'cinder-common': OrderedDict([
|
||||||
('7.0.0', 'liberty'),
|
('7.0', 'liberty'),
|
||||||
('8.0.0', 'mitaka'),
|
('8.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'keystone': OrderedDict([
|
'keystone': OrderedDict([
|
||||||
('8.0.0', 'liberty'),
|
('8.0', 'liberty'),
|
||||||
('9.0.0', 'mitaka'),
|
('9.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'horizon-common': OrderedDict([
|
'horizon-common': OrderedDict([
|
||||||
('8.0.0', 'liberty'),
|
('8.0', 'liberty'),
|
||||||
('9.0.0', 'mitaka'),
|
('9.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'ceilometer-common': OrderedDict([
|
'ceilometer-common': OrderedDict([
|
||||||
('5.0.0', 'liberty'),
|
('5.0', 'liberty'),
|
||||||
('6.0.0', 'mitaka'),
|
('6.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'heat-common': OrderedDict([
|
'heat-common': OrderedDict([
|
||||||
('5.0.0', 'liberty'),
|
('5.0', 'liberty'),
|
||||||
('6.0.0', 'mitaka'),
|
('6.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'glance-common': OrderedDict([
|
'glance-common': OrderedDict([
|
||||||
('11.0.0', 'liberty'),
|
('11.0', 'liberty'),
|
||||||
('12.0.0', 'mitaka'),
|
('12.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
'openstack-dashboard': OrderedDict([
|
'openstack-dashboard': OrderedDict([
|
||||||
('8.0.0', 'liberty'),
|
('8.0', 'liberty'),
|
||||||
('9.0.0', 'mitaka'),
|
('9.0', 'mitaka'),
|
||||||
]),
|
]),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -227,6 +226,33 @@ def get_os_version_codename(codename, version_map=OPENSTACK_CODENAMES):
|
|||||||
error_out(e)
|
error_out(e)
|
||||||
|
|
||||||
|
|
||||||
|
def get_os_version_codename_swift(codename):
|
||||||
|
'''Determine OpenStack version number of swift from codename.'''
|
||||||
|
for k, v in six.iteritems(SWIFT_CODENAMES):
|
||||||
|
if k == codename:
|
||||||
|
return v[-1]
|
||||||
|
e = 'Could not derive swift version for '\
|
||||||
|
'codename: %s' % codename
|
||||||
|
error_out(e)
|
||||||
|
|
||||||
|
|
||||||
|
def get_swift_codename(version):
|
||||||
|
'''Determine OpenStack codename that corresponds to swift version.'''
|
||||||
|
codenames = [k for k, v in six.iteritems(SWIFT_CODENAMES) if version in v]
|
||||||
|
if len(codenames) > 1:
|
||||||
|
# If more than one release codename contains this version we determine
|
||||||
|
# the actual codename based on the highest available install source.
|
||||||
|
for codename in reversed(codenames):
|
||||||
|
releases = UBUNTU_OPENSTACK_RELEASE
|
||||||
|
release = [k for k, v in six.iteritems(releases) if codename in v]
|
||||||
|
ret = subprocess.check_output(['apt-cache', 'policy', 'swift'])
|
||||||
|
if codename in ret or release[0] in ret:
|
||||||
|
return codename
|
||||||
|
elif len(codenames) == 1:
|
||||||
|
return codenames[0]
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_os_codename_package(package, fatal=True):
|
def get_os_codename_package(package, fatal=True):
|
||||||
'''Derive OpenStack release codename from an installed package.'''
|
'''Derive OpenStack release codename from an installed package.'''
|
||||||
import apt_pkg as apt
|
import apt_pkg as apt
|
||||||
@ -251,7 +277,14 @@ def get_os_codename_package(package, fatal=True):
|
|||||||
error_out(e)
|
error_out(e)
|
||||||
|
|
||||||
vers = apt.upstream_version(pkg.current_ver.ver_str)
|
vers = apt.upstream_version(pkg.current_ver.ver_str)
|
||||||
match = re.match('^(\d+)\.(\d+)\.(\d+)', vers)
|
if 'swift' in pkg.name:
|
||||||
|
# Fully x.y.z match for swift versions
|
||||||
|
match = re.match('^(\d+)\.(\d+)\.(\d+)', vers)
|
||||||
|
else:
|
||||||
|
# x.y match only for 20XX.X
|
||||||
|
# and ignore patch level for other packages
|
||||||
|
match = re.match('^(\d+)\.(\d+)', vers)
|
||||||
|
|
||||||
if match:
|
if match:
|
||||||
vers = match.group(0)
|
vers = match.group(0)
|
||||||
|
|
||||||
@ -263,13 +296,8 @@ def get_os_codename_package(package, fatal=True):
|
|||||||
# < Liberty co-ordinated project versions
|
# < Liberty co-ordinated project versions
|
||||||
try:
|
try:
|
||||||
if 'swift' in pkg.name:
|
if 'swift' in pkg.name:
|
||||||
swift_vers = vers[:5]
|
return get_swift_codename(vers)
|
||||||
if swift_vers not in SWIFT_CODENAMES:
|
|
||||||
# Deal with 1.10.0 upward
|
|
||||||
swift_vers = vers[:6]
|
|
||||||
return SWIFT_CODENAMES[swift_vers]
|
|
||||||
else:
|
else:
|
||||||
vers = vers[:6]
|
|
||||||
return OPENSTACK_CODENAMES[vers]
|
return OPENSTACK_CODENAMES[vers]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if not fatal:
|
if not fatal:
|
||||||
@ -287,12 +315,14 @@ def get_os_version_package(pkg, fatal=True):
|
|||||||
|
|
||||||
if 'swift' in pkg:
|
if 'swift' in pkg:
|
||||||
vers_map = SWIFT_CODENAMES
|
vers_map = SWIFT_CODENAMES
|
||||||
|
for cname, version in six.iteritems(vers_map):
|
||||||
|
if cname == codename:
|
||||||
|
return version[-1]
|
||||||
else:
|
else:
|
||||||
vers_map = OPENSTACK_CODENAMES
|
vers_map = OPENSTACK_CODENAMES
|
||||||
|
for version, cname in six.iteritems(vers_map):
|
||||||
for version, cname in six.iteritems(vers_map):
|
if cname == codename:
|
||||||
if cname == codename:
|
return version
|
||||||
return version
|
|
||||||
# e = "Could not determine OpenStack version for package: %s" % pkg
|
# e = "Could not determine OpenStack version for package: %s" % pkg
|
||||||
# error_out(e)
|
# error_out(e)
|
||||||
|
|
||||||
@ -458,11 +488,16 @@ def openstack_upgrade_available(package):
|
|||||||
cur_vers = get_os_version_package(package)
|
cur_vers = get_os_version_package(package)
|
||||||
if "swift" in package:
|
if "swift" in package:
|
||||||
codename = get_os_codename_install_source(src)
|
codename = get_os_codename_install_source(src)
|
||||||
available_vers = get_os_version_codename(codename, SWIFT_CODENAMES)
|
avail_vers = get_os_version_codename_swift(codename)
|
||||||
else:
|
else:
|
||||||
available_vers = get_os_version_install_source(src)
|
avail_vers = get_os_version_install_source(src)
|
||||||
apt.init()
|
apt.init()
|
||||||
return apt.version_compare(available_vers, cur_vers) == 1
|
if "swift" in package:
|
||||||
|
major_cur_vers = cur_vers.split('.', 1)[0]
|
||||||
|
major_avail_vers = avail_vers.split('.', 1)[0]
|
||||||
|
major_diff = apt.version_compare(major_avail_vers, major_cur_vers)
|
||||||
|
return avail_vers > cur_vers and (major_diff == 1 or major_diff == 0)
|
||||||
|
return apt.version_compare(avail_vers, cur_vers) == 1
|
||||||
|
|
||||||
|
|
||||||
def ensure_block_device(block_device):
|
def ensure_block_device(block_device):
|
||||||
@ -591,7 +626,7 @@ def _git_yaml_load(projects_yaml):
|
|||||||
return yaml.load(projects_yaml)
|
return yaml.load(projects_yaml)
|
||||||
|
|
||||||
|
|
||||||
def git_clone_and_install(projects_yaml, core_project, depth=1):
|
def git_clone_and_install(projects_yaml, core_project):
|
||||||
"""
|
"""
|
||||||
Clone/install all specified OpenStack repositories.
|
Clone/install all specified OpenStack repositories.
|
||||||
|
|
||||||
@ -641,6 +676,9 @@ def git_clone_and_install(projects_yaml, core_project, depth=1):
|
|||||||
for p in projects['repositories']:
|
for p in projects['repositories']:
|
||||||
repo = p['repository']
|
repo = p['repository']
|
||||||
branch = p['branch']
|
branch = p['branch']
|
||||||
|
depth = '1'
|
||||||
|
if 'depth' in p.keys():
|
||||||
|
depth = p['depth']
|
||||||
if p['name'] == 'requirements':
|
if p['name'] == 'requirements':
|
||||||
repo_dir = _git_clone_and_install_single(repo, branch, depth,
|
repo_dir = _git_clone_and_install_single(repo, branch, depth,
|
||||||
parent_dir, http_proxy,
|
parent_dir, http_proxy,
|
||||||
@ -685,19 +723,13 @@ def _git_clone_and_install_single(repo, branch, depth, parent_dir, http_proxy,
|
|||||||
"""
|
"""
|
||||||
Clone and install a single git repository.
|
Clone and install a single git repository.
|
||||||
"""
|
"""
|
||||||
dest_dir = os.path.join(parent_dir, os.path.basename(repo))
|
|
||||||
|
|
||||||
if not os.path.exists(parent_dir):
|
if not os.path.exists(parent_dir):
|
||||||
juju_log('Directory already exists at {}. '
|
juju_log('Directory already exists at {}. '
|
||||||
'No need to create directory.'.format(parent_dir))
|
'No need to create directory.'.format(parent_dir))
|
||||||
os.mkdir(parent_dir)
|
os.mkdir(parent_dir)
|
||||||
|
|
||||||
if not os.path.exists(dest_dir):
|
juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch))
|
||||||
juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch))
|
repo_dir = install_remote(repo, dest=parent_dir, branch=branch, depth=depth)
|
||||||
repo_dir = install_remote(repo, dest=parent_dir, branch=branch,
|
|
||||||
depth=depth)
|
|
||||||
else:
|
|
||||||
repo_dir = dest_dir
|
|
||||||
|
|
||||||
venv = os.path.join(parent_dir, 'venv')
|
venv = os.path.join(parent_dir, 'venv')
|
||||||
|
|
||||||
|
@ -72,7 +72,9 @@ def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d"):
|
|||||||
stopped = service_stop(service_name)
|
stopped = service_stop(service_name)
|
||||||
upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
|
upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
|
||||||
sysv_file = os.path.join(initd_dir, service_name)
|
sysv_file = os.path.join(initd_dir, service_name)
|
||||||
if os.path.exists(upstart_file):
|
if init_is_systemd():
|
||||||
|
service('disable', service_name)
|
||||||
|
elif os.path.exists(upstart_file):
|
||||||
override_path = os.path.join(
|
override_path = os.path.join(
|
||||||
init_dir, '{}.override'.format(service_name))
|
init_dir, '{}.override'.format(service_name))
|
||||||
with open(override_path, 'w') as fh:
|
with open(override_path, 'w') as fh:
|
||||||
@ -80,9 +82,9 @@ def service_pause(service_name, init_dir="/etc/init", initd_dir="/etc/init.d"):
|
|||||||
elif os.path.exists(sysv_file):
|
elif os.path.exists(sysv_file):
|
||||||
subprocess.check_call(["update-rc.d", service_name, "disable"])
|
subprocess.check_call(["update-rc.d", service_name, "disable"])
|
||||||
else:
|
else:
|
||||||
# XXX: Support SystemD too
|
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Unable to detect {0} as either Upstart {1} or SysV {2}".format(
|
"Unable to detect {0} as SystemD, Upstart {1} or"
|
||||||
|
" SysV {2}".format(
|
||||||
service_name, upstart_file, sysv_file))
|
service_name, upstart_file, sysv_file))
|
||||||
return stopped
|
return stopped
|
||||||
|
|
||||||
@ -94,7 +96,9 @@ def service_resume(service_name, init_dir="/etc/init",
|
|||||||
Reenable starting again at boot. Start the service"""
|
Reenable starting again at boot. Start the service"""
|
||||||
upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
|
upstart_file = os.path.join(init_dir, "{}.conf".format(service_name))
|
||||||
sysv_file = os.path.join(initd_dir, service_name)
|
sysv_file = os.path.join(initd_dir, service_name)
|
||||||
if os.path.exists(upstart_file):
|
if init_is_systemd():
|
||||||
|
service('enable', service_name)
|
||||||
|
elif os.path.exists(upstart_file):
|
||||||
override_path = os.path.join(
|
override_path = os.path.join(
|
||||||
init_dir, '{}.override'.format(service_name))
|
init_dir, '{}.override'.format(service_name))
|
||||||
if os.path.exists(override_path):
|
if os.path.exists(override_path):
|
||||||
@ -102,9 +106,9 @@ def service_resume(service_name, init_dir="/etc/init",
|
|||||||
elif os.path.exists(sysv_file):
|
elif os.path.exists(sysv_file):
|
||||||
subprocess.check_call(["update-rc.d", service_name, "enable"])
|
subprocess.check_call(["update-rc.d", service_name, "enable"])
|
||||||
else:
|
else:
|
||||||
# XXX: Support SystemD too
|
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Unable to detect {0} as either Upstart {1} or SysV {2}".format(
|
"Unable to detect {0} as SystemD, Upstart {1} or"
|
||||||
|
" SysV {2}".format(
|
||||||
service_name, upstart_file, sysv_file))
|
service_name, upstart_file, sysv_file))
|
||||||
|
|
||||||
started = service_running(service_name)
|
started = service_running(service_name)
|
||||||
@ -115,23 +119,29 @@ def service_resume(service_name, init_dir="/etc/init",
|
|||||||
|
|
||||||
def service(action, service_name):
|
def service(action, service_name):
|
||||||
"""Control a system service"""
|
"""Control a system service"""
|
||||||
cmd = ['service', service_name, action]
|
if init_is_systemd():
|
||||||
|
cmd = ['systemctl', action, service_name]
|
||||||
|
else:
|
||||||
|
cmd = ['service', service_name, action]
|
||||||
return subprocess.call(cmd) == 0
|
return subprocess.call(cmd) == 0
|
||||||
|
|
||||||
|
|
||||||
def service_running(service):
|
def service_running(service_name):
|
||||||
"""Determine whether a system service is running"""
|
"""Determine whether a system service is running"""
|
||||||
try:
|
if init_is_systemd():
|
||||||
output = subprocess.check_output(
|
return service('is-active', service_name)
|
||||||
['service', service, 'status'],
|
|
||||||
stderr=subprocess.STDOUT).decode('UTF-8')
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
return False
|
|
||||||
else:
|
else:
|
||||||
if ("start/running" in output or "is running" in output):
|
try:
|
||||||
return True
|
output = subprocess.check_output(
|
||||||
else:
|
['service', service_name, 'status'],
|
||||||
|
stderr=subprocess.STDOUT).decode('UTF-8')
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
return False
|
return False
|
||||||
|
else:
|
||||||
|
if ("start/running" in output or "is running" in output):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def service_available(service_name):
|
def service_available(service_name):
|
||||||
@ -146,10 +156,17 @@ def service_available(service_name):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
SYSTEMD_SYSTEM = '/run/systemd/system'
|
||||||
|
|
||||||
|
|
||||||
|
def init_is_systemd():
|
||||||
|
"""Return True if the host system uses systemd, False otherwise."""
|
||||||
|
return os.path.isdir(SYSTEMD_SYSTEM)
|
||||||
|
|
||||||
|
|
||||||
def adduser(username, password=None, shell='/bin/bash', system_user=False,
|
def adduser(username, password=None, shell='/bin/bash', system_user=False,
|
||||||
primary_group=None, secondary_groups=None):
|
primary_group=None, secondary_groups=None):
|
||||||
"""
|
"""Add a user to the system.
|
||||||
Add a user to the system.
|
|
||||||
|
|
||||||
Will log but otherwise succeed if the user already exists.
|
Will log but otherwise succeed if the user already exists.
|
||||||
|
|
||||||
@ -157,7 +174,7 @@ def adduser(username, password=None, shell='/bin/bash', system_user=False,
|
|||||||
:param str password: Password for user; if ``None``, create a system user
|
:param str password: Password for user; if ``None``, create a system user
|
||||||
:param str shell: The default shell for the user
|
:param str shell: The default shell for the user
|
||||||
:param bool system_user: Whether to create a login or system user
|
:param bool system_user: Whether to create a login or system user
|
||||||
:param str primary_group: Primary group for user; defaults to their username
|
:param str primary_group: Primary group for user; defaults to username
|
||||||
:param list secondary_groups: Optional list of additional groups
|
:param list secondary_groups: Optional list of additional groups
|
||||||
|
|
||||||
:returns: The password database entry struct, as returned by `pwd.getpwnam`
|
:returns: The password database entry struct, as returned by `pwd.getpwnam`
|
||||||
@ -283,14 +300,12 @@ def write_file(path, content, owner='root', group='root', perms=0o444):
|
|||||||
|
|
||||||
|
|
||||||
def fstab_remove(mp):
|
def fstab_remove(mp):
|
||||||
"""Remove the given mountpoint entry from /etc/fstab
|
"""Remove the given mountpoint entry from /etc/fstab"""
|
||||||
"""
|
|
||||||
return Fstab.remove_by_mountpoint(mp)
|
return Fstab.remove_by_mountpoint(mp)
|
||||||
|
|
||||||
|
|
||||||
def fstab_add(dev, mp, fs, options=None):
|
def fstab_add(dev, mp, fs, options=None):
|
||||||
"""Adds the given device entry to the /etc/fstab file
|
"""Adds the given device entry to the /etc/fstab file"""
|
||||||
"""
|
|
||||||
return Fstab.add(dev, mp, fs, options=options)
|
return Fstab.add(dev, mp, fs, options=options)
|
||||||
|
|
||||||
|
|
||||||
@ -346,8 +361,7 @@ def fstab_mount(mountpoint):
|
|||||||
|
|
||||||
|
|
||||||
def file_hash(path, hash_type='md5'):
|
def file_hash(path, hash_type='md5'):
|
||||||
"""
|
"""Generate a hash checksum of the contents of 'path' or None if not found.
|
||||||
Generate a hash checksum of the contents of 'path' or None if not found.
|
|
||||||
|
|
||||||
:param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
|
:param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
|
||||||
such as md5, sha1, sha256, sha512, etc.
|
such as md5, sha1, sha256, sha512, etc.
|
||||||
@ -362,10 +376,9 @@ def file_hash(path, hash_type='md5'):
|
|||||||
|
|
||||||
|
|
||||||
def path_hash(path):
|
def path_hash(path):
|
||||||
"""
|
"""Generate a hash checksum of all files matching 'path'. Standard
|
||||||
Generate a hash checksum of all files matching 'path'. Standard wildcards
|
wildcards like '*' and '?' are supported, see documentation for the 'glob'
|
||||||
like '*' and '?' are supported, see documentation for the 'glob' module for
|
module for more information.
|
||||||
more information.
|
|
||||||
|
|
||||||
:return: dict: A { filename: hash } dictionary for all matched files.
|
:return: dict: A { filename: hash } dictionary for all matched files.
|
||||||
Empty if none found.
|
Empty if none found.
|
||||||
@ -377,8 +390,7 @@ def path_hash(path):
|
|||||||
|
|
||||||
|
|
||||||
def check_hash(path, checksum, hash_type='md5'):
|
def check_hash(path, checksum, hash_type='md5'):
|
||||||
"""
|
"""Validate a file using a cryptographic checksum.
|
||||||
Validate a file using a cryptographic checksum.
|
|
||||||
|
|
||||||
:param str checksum: Value of the checksum used to validate the file.
|
:param str checksum: Value of the checksum used to validate the file.
|
||||||
:param str hash_type: Hash algorithm used to generate `checksum`.
|
:param str hash_type: Hash algorithm used to generate `checksum`.
|
||||||
@ -393,6 +405,7 @@ def check_hash(path, checksum, hash_type='md5'):
|
|||||||
|
|
||||||
|
|
||||||
class ChecksumError(ValueError):
|
class ChecksumError(ValueError):
|
||||||
|
"""A class derived from Value error to indicate the checksum failed."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@ -498,7 +511,7 @@ def get_bond_master(interface):
|
|||||||
|
|
||||||
|
|
||||||
def list_nics(nic_type=None):
|
def list_nics(nic_type=None):
|
||||||
'''Return a list of nics of given type(s)'''
|
"""Return a list of nics of given type(s)"""
|
||||||
if isinstance(nic_type, six.string_types):
|
if isinstance(nic_type, six.string_types):
|
||||||
int_types = [nic_type]
|
int_types = [nic_type]
|
||||||
else:
|
else:
|
||||||
@ -540,12 +553,13 @@ def list_nics(nic_type=None):
|
|||||||
|
|
||||||
|
|
||||||
def set_nic_mtu(nic, mtu):
|
def set_nic_mtu(nic, mtu):
|
||||||
'''Set MTU on a network interface'''
|
"""Set the Maximum Transmission Unit (MTU) on a network interface."""
|
||||||
cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
|
cmd = ['ip', 'link', 'set', nic, 'mtu', mtu]
|
||||||
subprocess.check_call(cmd)
|
subprocess.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
def get_nic_mtu(nic):
|
def get_nic_mtu(nic):
|
||||||
|
"""Return the Maximum Transmission Unit (MTU) for a network interface."""
|
||||||
cmd = ['ip', 'addr', 'show', nic]
|
cmd = ['ip', 'addr', 'show', nic]
|
||||||
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
|
ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
|
||||||
mtu = ""
|
mtu = ""
|
||||||
@ -557,6 +571,7 @@ def get_nic_mtu(nic):
|
|||||||
|
|
||||||
|
|
||||||
def get_nic_hwaddr(nic):
|
def get_nic_hwaddr(nic):
|
||||||
|
"""Return the Media Access Control (MAC) for a network interface."""
|
||||||
cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
|
cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
|
||||||
ip_output = subprocess.check_output(cmd).decode('UTF-8')
|
ip_output = subprocess.check_output(cmd).decode('UTF-8')
|
||||||
hwaddr = ""
|
hwaddr = ""
|
||||||
@ -567,7 +582,7 @@ def get_nic_hwaddr(nic):
|
|||||||
|
|
||||||
|
|
||||||
def cmp_pkgrevno(package, revno, pkgcache=None):
|
def cmp_pkgrevno(package, revno, pkgcache=None):
|
||||||
'''Compare supplied revno with the revno of the installed package
|
"""Compare supplied revno with the revno of the installed package
|
||||||
|
|
||||||
* 1 => Installed revno is greater than supplied arg
|
* 1 => Installed revno is greater than supplied arg
|
||||||
* 0 => Installed revno is the same as supplied arg
|
* 0 => Installed revno is the same as supplied arg
|
||||||
@ -576,7 +591,7 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
|
|||||||
This function imports apt_cache function from charmhelpers.fetch if
|
This function imports apt_cache function from charmhelpers.fetch if
|
||||||
the pkgcache argument is None. Be sure to add charmhelpers.fetch if
|
the pkgcache argument is None. Be sure to add charmhelpers.fetch if
|
||||||
you call this function, or pass an apt_pkg.Cache() instance.
|
you call this function, or pass an apt_pkg.Cache() instance.
|
||||||
'''
|
"""
|
||||||
import apt_pkg
|
import apt_pkg
|
||||||
if not pkgcache:
|
if not pkgcache:
|
||||||
from charmhelpers.fetch import apt_cache
|
from charmhelpers.fetch import apt_cache
|
||||||
@ -586,19 +601,27 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
|
|||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def chdir(d):
|
def chdir(directory):
|
||||||
|
"""Change the current working directory to a different directory for a code
|
||||||
|
block and return the previous directory after the block exits. Useful to
|
||||||
|
run commands from a specificed directory.
|
||||||
|
|
||||||
|
:param str directory: The directory path to change to for this context.
|
||||||
|
"""
|
||||||
cur = os.getcwd()
|
cur = os.getcwd()
|
||||||
try:
|
try:
|
||||||
yield os.chdir(d)
|
yield os.chdir(directory)
|
||||||
finally:
|
finally:
|
||||||
os.chdir(cur)
|
os.chdir(cur)
|
||||||
|
|
||||||
|
|
||||||
def chownr(path, owner, group, follow_links=True, chowntopdir=False):
|
def chownr(path, owner, group, follow_links=True, chowntopdir=False):
|
||||||
"""
|
"""Recursively change user and group ownership of files and directories
|
||||||
Recursively change user and group ownership of files and directories
|
|
||||||
in given path. Doesn't chown path itself by default, only its children.
|
in given path. Doesn't chown path itself by default, only its children.
|
||||||
|
|
||||||
|
:param str path: The string path to start changing ownership.
|
||||||
|
:param str owner: The owner string to use when looking up the uid.
|
||||||
|
:param str group: The group string to use when looking up the gid.
|
||||||
:param bool follow_links: Also Chown links if True
|
:param bool follow_links: Also Chown links if True
|
||||||
:param bool chowntopdir: Also chown path itself if True
|
:param bool chowntopdir: Also chown path itself if True
|
||||||
"""
|
"""
|
||||||
@ -622,15 +645,23 @@ def chownr(path, owner, group, follow_links=True, chowntopdir=False):
|
|||||||
|
|
||||||
|
|
||||||
def lchownr(path, owner, group):
|
def lchownr(path, owner, group):
|
||||||
|
"""Recursively change user and group ownership of files and directories
|
||||||
|
in a given path, not following symbolic links. See the documentation for
|
||||||
|
'os.lchown' for more information.
|
||||||
|
|
||||||
|
:param str path: The string path to start changing ownership.
|
||||||
|
:param str owner: The owner string to use when looking up the uid.
|
||||||
|
:param str group: The group string to use when looking up the gid.
|
||||||
|
"""
|
||||||
chownr(path, owner, group, follow_links=False)
|
chownr(path, owner, group, follow_links=False)
|
||||||
|
|
||||||
|
|
||||||
def get_total_ram():
|
def get_total_ram():
|
||||||
'''The total amount of system RAM in bytes.
|
"""The total amount of system RAM in bytes.
|
||||||
|
|
||||||
This is what is reported by the OS, and may be overcommitted when
|
This is what is reported by the OS, and may be overcommitted when
|
||||||
there are multiple containers hosted on the same machine.
|
there are multiple containers hosted on the same machine.
|
||||||
'''
|
"""
|
||||||
with open('/proc/meminfo', 'r') as f:
|
with open('/proc/meminfo', 'r') as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
if line:
|
if line:
|
||||||
|
@ -22,7 +22,6 @@ from charmhelpers.fetch import (
|
|||||||
filter_installed_packages,
|
filter_installed_packages,
|
||||||
apt_install,
|
apt_install,
|
||||||
)
|
)
|
||||||
from charmhelpers.core.host import mkdir
|
|
||||||
|
|
||||||
if filter_installed_packages(['git']) != []:
|
if filter_installed_packages(['git']) != []:
|
||||||
apt_install(['git'])
|
apt_install(['git'])
|
||||||
@ -50,8 +49,8 @@ class GitUrlFetchHandler(BaseFetchHandler):
|
|||||||
cmd = ['git', '-C', dest, 'pull', source, branch]
|
cmd = ['git', '-C', dest, 'pull', source, branch]
|
||||||
else:
|
else:
|
||||||
cmd = ['git', 'clone', source, dest, '--branch', branch]
|
cmd = ['git', 'clone', source, dest, '--branch', branch]
|
||||||
if depth:
|
if depth:
|
||||||
cmd.extend(['--depth', depth])
|
cmd.extend(['--depth', depth])
|
||||||
check_call(cmd)
|
check_call(cmd)
|
||||||
|
|
||||||
def install(self, source, branch="master", dest=None, depth=None):
|
def install(self, source, branch="master", dest=None, depth=None):
|
||||||
@ -62,8 +61,6 @@ class GitUrlFetchHandler(BaseFetchHandler):
|
|||||||
else:
|
else:
|
||||||
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
|
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
|
||||||
branch_name)
|
branch_name)
|
||||||
if not os.path.exists(dest_dir):
|
|
||||||
mkdir(dest_dir, perms=0o755)
|
|
||||||
try:
|
try:
|
||||||
self.clone(source, dest_dir, branch, depth)
|
self.clone(source, dest_dir, branch, depth)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
|
Loading…
Reference in New Issue
Block a user