Sync charm-helpers
This commit is contained in:
parent
3b758c9032
commit
6ab462e881
@ -15,6 +15,7 @@
|
|||||||
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
from collections import OrderedDict
|
||||||
from charmhelpers.contrib.amulet.deployment import (
|
from charmhelpers.contrib.amulet.deployment import (
|
||||||
AmuletDeployment
|
AmuletDeployment
|
||||||
)
|
)
|
||||||
@ -100,12 +101,34 @@ class OpenStackAmuletDeployment(AmuletDeployment):
|
|||||||
"""
|
"""
|
||||||
(self.precise_essex, self.precise_folsom, self.precise_grizzly,
|
(self.precise_essex, self.precise_folsom, self.precise_grizzly,
|
||||||
self.precise_havana, self.precise_icehouse,
|
self.precise_havana, self.precise_icehouse,
|
||||||
self.trusty_icehouse) = range(6)
|
self.trusty_icehouse, self.trusty_juno, self.trusty_kilo) = range(8)
|
||||||
releases = {
|
releases = {
|
||||||
('precise', None): self.precise_essex,
|
('precise', None): self.precise_essex,
|
||||||
('precise', 'cloud:precise-folsom'): self.precise_folsom,
|
('precise', 'cloud:precise-folsom'): self.precise_folsom,
|
||||||
('precise', 'cloud:precise-grizzly'): self.precise_grizzly,
|
('precise', 'cloud:precise-grizzly'): self.precise_grizzly,
|
||||||
('precise', 'cloud:precise-havana'): self.precise_havana,
|
('precise', 'cloud:precise-havana'): self.precise_havana,
|
||||||
('precise', 'cloud:precise-icehouse'): self.precise_icehouse,
|
('precise', 'cloud:precise-icehouse'): self.precise_icehouse,
|
||||||
('trusty', None): self.trusty_icehouse}
|
('trusty', None): self.trusty_icehouse,
|
||||||
|
('trusty', 'cloud:trusty-juno'): self.trusty_juno,
|
||||||
|
('trusty', 'cloud:trusty-kilo'): self.trusty_kilo}
|
||||||
return releases[(self.series, self.openstack)]
|
return releases[(self.series, self.openstack)]
|
||||||
|
|
||||||
|
def _get_openstack_release_string(self):
|
||||||
|
"""Get openstack release string.
|
||||||
|
|
||||||
|
Return a string representing the openstack release.
|
||||||
|
"""
|
||||||
|
releases = OrderedDict([
|
||||||
|
('precise', 'essex'),
|
||||||
|
('quantal', 'folsom'),
|
||||||
|
('raring', 'grizzly'),
|
||||||
|
('saucy', 'havana'),
|
||||||
|
('trusty', 'icehouse'),
|
||||||
|
('utopic', 'juno'),
|
||||||
|
('vivid', 'kilo'),
|
||||||
|
])
|
||||||
|
if self.openstack:
|
||||||
|
os_origin = self.openstack.split(':')[1]
|
||||||
|
return os_origin.split('%s-' % self.series)[1].split('/')[0]
|
||||||
|
else:
|
||||||
|
return releases[self.series]
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import time
|
import time
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
from subprocess import check_call
|
from subprocess import check_call
|
||||||
@ -48,6 +49,8 @@ from charmhelpers.core.hookenv import (
|
|||||||
from charmhelpers.core.sysctl import create as sysctl_create
|
from charmhelpers.core.sysctl import create as sysctl_create
|
||||||
|
|
||||||
from charmhelpers.core.host import (
|
from charmhelpers.core.host import (
|
||||||
|
list_nics,
|
||||||
|
get_nic_hwaddr,
|
||||||
mkdir,
|
mkdir,
|
||||||
write_file,
|
write_file,
|
||||||
)
|
)
|
||||||
@ -65,12 +68,18 @@ from charmhelpers.contrib.hahelpers.apache import (
|
|||||||
from charmhelpers.contrib.openstack.neutron import (
|
from charmhelpers.contrib.openstack.neutron import (
|
||||||
neutron_plugin_attribute,
|
neutron_plugin_attribute,
|
||||||
)
|
)
|
||||||
|
from charmhelpers.contrib.openstack.ip import (
|
||||||
|
resolve_address,
|
||||||
|
INTERNAL,
|
||||||
|
)
|
||||||
from charmhelpers.contrib.network.ip import (
|
from charmhelpers.contrib.network.ip import (
|
||||||
get_address_in_network,
|
get_address_in_network,
|
||||||
|
get_ipv4_addr,
|
||||||
get_ipv6_addr,
|
get_ipv6_addr,
|
||||||
get_netmask_for_address,
|
get_netmask_for_address,
|
||||||
format_ipv6_addr,
|
format_ipv6_addr,
|
||||||
is_address_in_network,
|
is_address_in_network,
|
||||||
|
is_bridge_member,
|
||||||
)
|
)
|
||||||
from charmhelpers.contrib.openstack.utils import get_host_ip
|
from charmhelpers.contrib.openstack.utils import get_host_ip
|
||||||
|
|
||||||
@ -727,7 +736,14 @@ class ApacheSSLContext(OSContextGenerator):
|
|||||||
'endpoints': [],
|
'endpoints': [],
|
||||||
'ext_ports': []}
|
'ext_ports': []}
|
||||||
|
|
||||||
for cn in self.canonical_names():
|
cns = self.canonical_names()
|
||||||
|
if cns:
|
||||||
|
for cn in cns:
|
||||||
|
self.configure_cert(cn)
|
||||||
|
else:
|
||||||
|
# Expect cert/key provided in config (currently assumed that ca
|
||||||
|
# uses ip for cn)
|
||||||
|
cn = resolve_address(endpoint_type=INTERNAL)
|
||||||
self.configure_cert(cn)
|
self.configure_cert(cn)
|
||||||
|
|
||||||
addresses = self.get_network_addresses()
|
addresses = self.get_network_addresses()
|
||||||
@ -883,6 +899,48 @@ class NeutronContext(OSContextGenerator):
|
|||||||
return ctxt
|
return ctxt
|
||||||
|
|
||||||
|
|
||||||
|
class NeutronPortContext(OSContextGenerator):
|
||||||
|
NIC_PREFIXES = ['eth', 'bond']
|
||||||
|
|
||||||
|
def resolve_ports(self, ports):
|
||||||
|
"""Resolve NICs not yet bound to bridge(s)
|
||||||
|
|
||||||
|
If hwaddress provided then returns resolved hwaddress otherwise NIC.
|
||||||
|
"""
|
||||||
|
if not ports:
|
||||||
|
return None
|
||||||
|
|
||||||
|
hwaddr_to_nic = {}
|
||||||
|
hwaddr_to_ip = {}
|
||||||
|
for nic in list_nics(self.NIC_PREFIXES):
|
||||||
|
hwaddr = get_nic_hwaddr(nic)
|
||||||
|
hwaddr_to_nic[hwaddr] = nic
|
||||||
|
addresses = get_ipv4_addr(nic, fatal=False)
|
||||||
|
addresses += get_ipv6_addr(iface=nic, fatal=False)
|
||||||
|
hwaddr_to_ip[hwaddr] = addresses
|
||||||
|
|
||||||
|
resolved = []
|
||||||
|
mac_regex = re.compile(r'([0-9A-F]{2}[:-]){5}([0-9A-F]{2})', re.I)
|
||||||
|
for entry in ports:
|
||||||
|
if re.match(mac_regex, entry):
|
||||||
|
# NIC is in known NICs and does NOT hace an IP address
|
||||||
|
if entry in hwaddr_to_nic and not hwaddr_to_ip[entry]:
|
||||||
|
# If the nic is part of a bridge then don't use it
|
||||||
|
if is_bridge_member(hwaddr_to_nic[entry]):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Entry is a MAC address for a valid interface that doesn't
|
||||||
|
# have an IP address assigned yet.
|
||||||
|
resolved.append(hwaddr_to_nic[entry])
|
||||||
|
else:
|
||||||
|
# If the passed entry is not a MAC address, assume it's a valid
|
||||||
|
# interface, and that the user put it there on purpose (we can
|
||||||
|
# trust it to be the real external network).
|
||||||
|
resolved.append(entry)
|
||||||
|
|
||||||
|
return resolved
|
||||||
|
|
||||||
|
|
||||||
class OSConfigFlagContext(OSContextGenerator):
|
class OSConfigFlagContext(OSContextGenerator):
|
||||||
"""Provides support for user-defined config flags.
|
"""Provides support for user-defined config flags.
|
||||||
|
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
# Various utilies for dealing with Neutron and the renaming from Quantum.
|
# Various utilies for dealing with Neutron and the renaming from Quantum.
|
||||||
|
|
||||||
|
import six
|
||||||
from subprocess import check_output
|
from subprocess import check_output
|
||||||
|
|
||||||
from charmhelpers.core.hookenv import (
|
from charmhelpers.core.hookenv import (
|
||||||
@ -237,3 +238,72 @@ def network_manager():
|
|||||||
else:
|
else:
|
||||||
# ensure accurate naming for all releases post-H
|
# ensure accurate naming for all releases post-H
|
||||||
return 'neutron'
|
return 'neutron'
|
||||||
|
|
||||||
|
|
||||||
|
def parse_mappings(mappings):
|
||||||
|
parsed = {}
|
||||||
|
if mappings:
|
||||||
|
mappings = mappings.split(' ')
|
||||||
|
for m in mappings:
|
||||||
|
p = m.partition(':')
|
||||||
|
if p[1] == ':':
|
||||||
|
parsed[p[0].strip()] = p[2].strip()
|
||||||
|
|
||||||
|
return parsed
|
||||||
|
|
||||||
|
|
||||||
|
def parse_bridge_mappings(mappings):
|
||||||
|
"""Parse bridge mappings.
|
||||||
|
|
||||||
|
Mappings must be a space-delimited list of provider:bridge mappings.
|
||||||
|
|
||||||
|
Returns dict of the form {provider:bridge}.
|
||||||
|
"""
|
||||||
|
return parse_mappings(mappings)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_data_port_mappings(mappings, default_bridge='br-data'):
|
||||||
|
"""Parse data port mappings.
|
||||||
|
|
||||||
|
Mappings must be a space-delimited list of bridge:port mappings.
|
||||||
|
|
||||||
|
Returns dict of the form {bridge:port}.
|
||||||
|
"""
|
||||||
|
_mappings = parse_mappings(mappings)
|
||||||
|
if not _mappings:
|
||||||
|
if not mappings:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# For backwards-compatibility we need to support port-only provided in
|
||||||
|
# config.
|
||||||
|
_mappings = {default_bridge: mappings.split(' ')[0]}
|
||||||
|
|
||||||
|
bridges = _mappings.keys()
|
||||||
|
ports = _mappings.values()
|
||||||
|
if len(set(bridges)) != len(bridges):
|
||||||
|
raise Exception("It is not allowed to have more than one port "
|
||||||
|
"configured on the same bridge")
|
||||||
|
|
||||||
|
if len(set(ports)) != len(ports):
|
||||||
|
raise Exception("It is not allowed to have the same port configured "
|
||||||
|
"on more than one bridge")
|
||||||
|
|
||||||
|
return _mappings
|
||||||
|
|
||||||
|
|
||||||
|
def parse_vlan_range_mappings(mappings):
|
||||||
|
"""Parse vlan range mappings.
|
||||||
|
|
||||||
|
Mappings must be a space-delimited list of provider:start:end mappings.
|
||||||
|
|
||||||
|
Returns dict of the form {provider: (start, end)}.
|
||||||
|
"""
|
||||||
|
_mappings = parse_mappings(mappings)
|
||||||
|
if not _mappings:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
mappings = {}
|
||||||
|
for p, r in six.iteritems(_mappings):
|
||||||
|
mappings[p] = tuple(r.split(':'))
|
||||||
|
|
||||||
|
return mappings
|
||||||
|
@ -20,18 +20,21 @@
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
import errno
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
|
|
||||||
import six
|
import six
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from charmhelpers.contrib.network import ip
|
from charmhelpers.contrib.network import ip
|
||||||
|
|
||||||
|
from charmhelpers.core import (
|
||||||
|
hookenv,
|
||||||
|
unitdata,
|
||||||
|
)
|
||||||
|
|
||||||
from charmhelpers.core.hookenv import (
|
from charmhelpers.core.hookenv import (
|
||||||
config,
|
config,
|
||||||
log as juju_log,
|
log as juju_log,
|
||||||
@ -332,6 +335,21 @@ def configure_installation_source(rel):
|
|||||||
error_out("Invalid openstack-release specified: %s" % rel)
|
error_out("Invalid openstack-release specified: %s" % rel)
|
||||||
|
|
||||||
|
|
||||||
|
def config_value_changed(option):
|
||||||
|
"""
|
||||||
|
Determine if config value changed since last call to this function.
|
||||||
|
"""
|
||||||
|
hook_data = unitdata.HookData()
|
||||||
|
with hook_data():
|
||||||
|
db = unitdata.kv()
|
||||||
|
current = hookenv.execution_environment()['conf'][option]
|
||||||
|
saved = db.get(option)
|
||||||
|
db.set(option, current)
|
||||||
|
if saved is None:
|
||||||
|
return False
|
||||||
|
return current != saved
|
||||||
|
|
||||||
|
|
||||||
def save_script_rc(script_path="scripts/scriptrc", **env_vars):
|
def save_script_rc(script_path="scripts/scriptrc", **env_vars):
|
||||||
"""
|
"""
|
||||||
Write an rc file in the charm-delivered directory containing
|
Write an rc file in the charm-delivered directory containing
|
||||||
@ -471,116 +489,103 @@ def os_requires_version(ostack_release, pkg):
|
|||||||
|
|
||||||
|
|
||||||
def git_install_requested():
|
def git_install_requested():
|
||||||
"""Returns true if openstack-origin-git is specified."""
|
"""
|
||||||
return config('openstack-origin-git') != None
|
Returns true if openstack-origin-git is specified.
|
||||||
|
"""
|
||||||
|
return config('openstack-origin-git').lower() != "none"
|
||||||
|
|
||||||
|
|
||||||
requirements_dir = None
|
requirements_dir = None
|
||||||
|
|
||||||
|
|
||||||
def git_clone_and_install(projects, core_project,
|
def git_clone_and_install(projects_yaml, core_project):
|
||||||
parent_dir='/mnt/openstack-git'):
|
"""
|
||||||
"""Clone/install all OpenStack repos specified in projects dictionary."""
|
Clone/install all specified OpenStack repositories.
|
||||||
global requirements_dir
|
|
||||||
update_reqs = True
|
|
||||||
|
|
||||||
if not projects:
|
The expected format of projects_yaml is:
|
||||||
|
repositories:
|
||||||
|
- {name: keystone,
|
||||||
|
repository: 'git://git.openstack.org/openstack/keystone.git',
|
||||||
|
branch: 'stable/icehouse'}
|
||||||
|
- {name: requirements,
|
||||||
|
repository: 'git://git.openstack.org/openstack/requirements.git',
|
||||||
|
branch: 'stable/icehouse'}
|
||||||
|
directory: /mnt/openstack-git
|
||||||
|
|
||||||
|
The directory key is optional.
|
||||||
|
"""
|
||||||
|
global requirements_dir
|
||||||
|
parent_dir = '/mnt/openstack-git'
|
||||||
|
|
||||||
|
if not projects_yaml:
|
||||||
return
|
return
|
||||||
|
|
||||||
# clone/install the requirements project first
|
projects = yaml.load(projects_yaml)
|
||||||
installed = _git_clone_and_install_subset(projects, parent_dir,
|
_git_validate_projects_yaml(projects, core_project)
|
||||||
whitelist=['requirements'])
|
|
||||||
if 'requirements' not in installed:
|
|
||||||
update_reqs = False
|
|
||||||
|
|
||||||
# clone/install all other projects except requirements and the core project
|
if 'directory' in projects.keys():
|
||||||
blacklist = ['requirements', core_project]
|
parent_dir = projects['directory']
|
||||||
_git_clone_and_install_subset(projects, parent_dir, blacklist=blacklist,
|
|
||||||
update_requirements=update_reqs)
|
|
||||||
|
|
||||||
# clone/install the core project
|
for p in projects['repositories']:
|
||||||
whitelist = [core_project]
|
repo = p['repository']
|
||||||
installed = _git_clone_and_install_subset(projects, parent_dir,
|
branch = p['branch']
|
||||||
whitelist=whitelist,
|
if p['name'] == 'requirements':
|
||||||
update_requirements=update_reqs)
|
repo_dir = _git_clone_and_install_single(repo, branch, parent_dir,
|
||||||
if core_project not in installed:
|
update_requirements=False)
|
||||||
error_out('{} git repository must be specified'.format(core_project))
|
|
||||||
|
|
||||||
|
|
||||||
def _git_clone_and_install_subset(projects, parent_dir, whitelist=[],
|
|
||||||
blacklist=[], update_requirements=False):
|
|
||||||
"""Clone/install subset of OpenStack repos specified in projects dict."""
|
|
||||||
global requirements_dir
|
|
||||||
installed = []
|
|
||||||
|
|
||||||
for proj, val in projects.items():
|
|
||||||
# The project subset is chosen based on the following 3 rules:
|
|
||||||
# 1) If project is in blacklist, we don't clone/install it, period.
|
|
||||||
# 2) If whitelist is empty, we clone/install everything else.
|
|
||||||
# 3) If whitelist is not empty, we clone/install everything in the
|
|
||||||
# whitelist.
|
|
||||||
if proj in blacklist:
|
|
||||||
continue
|
|
||||||
if whitelist and proj not in whitelist:
|
|
||||||
continue
|
|
||||||
repo = val['repository']
|
|
||||||
branch = val['branch']
|
|
||||||
repo_dir = _git_clone_and_install_single(repo, branch, parent_dir,
|
|
||||||
update_requirements)
|
|
||||||
if proj == 'requirements':
|
|
||||||
requirements_dir = repo_dir
|
requirements_dir = repo_dir
|
||||||
installed.append(proj)
|
|
||||||
return installed
|
|
||||||
|
|
||||||
|
|
||||||
def _git_clone_and_install_single(repo, branch, parent_dir,
|
|
||||||
update_requirements=False):
|
|
||||||
"""Clone and install a single git repository."""
|
|
||||||
dest_dir = os.path.join(parent_dir, os.path.basename(repo))
|
|
||||||
lock_dir = os.path.join(parent_dir, os.path.basename(repo) + '.lock')
|
|
||||||
|
|
||||||
# Note(coreycb): The parent directory for storing git repositories can be
|
|
||||||
# shared by multiple charms via bind mount, etc, so we use exception
|
|
||||||
# handling to ensure the test for existence and mkdir are atomic.
|
|
||||||
try:
|
|
||||||
os.mkdir(parent_dir)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.EEXIST:
|
|
||||||
juju_log('Directory already exists at {}. '
|
|
||||||
'No need to create directory.'.format(parent_dir))
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
juju_log('Host directory not mounted at {}. '
|
|
||||||
'Directory created.'.format(parent_dir))
|
|
||||||
|
|
||||||
# Note(coreycb): Similar to above, the cloned git repositories can be shared
|
|
||||||
# by multiple charms via bind mount, etc, so we use exception handling and
|
|
||||||
# special lock directories to ensure that a repository clone is only
|
|
||||||
# attempted once.
|
|
||||||
try:
|
|
||||||
os.mkdir(lock_dir)
|
|
||||||
except OSError as e:
|
|
||||||
if e.errno == errno.EEXIST:
|
|
||||||
juju_log('Lock directory exists at {}. Skip git clone and wait '
|
|
||||||
'for lock removal before installing.'.format(lock_dir))
|
|
||||||
while os.path.exists(lock_dir):
|
|
||||||
juju_log('Waiting for git clone to complete before installing.')
|
|
||||||
time.sleep(1)
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
if not os.path.exists(dest_dir):
|
|
||||||
juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch))
|
|
||||||
repo_dir = install_remote(repo, dest=parent_dir, branch=branch)
|
|
||||||
else:
|
else:
|
||||||
repo_dir = dest_dir
|
repo_dir = _git_clone_and_install_single(repo, branch, parent_dir,
|
||||||
|
update_requirements=True)
|
||||||
|
|
||||||
if update_requirements:
|
|
||||||
if not requirements_dir:
|
|
||||||
error_out('requirements repo must be cloned before '
|
|
||||||
'updating from global requirements.')
|
|
||||||
_git_update_requirements(repo_dir, requirements_dir)
|
|
||||||
|
|
||||||
os.rmdir(lock_dir)
|
def _git_validate_projects_yaml(projects, core_project):
|
||||||
|
"""
|
||||||
|
Validate the projects yaml.
|
||||||
|
"""
|
||||||
|
_git_ensure_key_exists('repositories', projects)
|
||||||
|
|
||||||
|
for project in projects['repositories']:
|
||||||
|
_git_ensure_key_exists('name', project.keys())
|
||||||
|
_git_ensure_key_exists('repository', project.keys())
|
||||||
|
_git_ensure_key_exists('branch', project.keys())
|
||||||
|
|
||||||
|
if projects['repositories'][0]['name'] != 'requirements':
|
||||||
|
error_out('{} git repo must be specified first'.format('requirements'))
|
||||||
|
|
||||||
|
if projects['repositories'][-1]['name'] != core_project:
|
||||||
|
error_out('{} git repo must be specified last'.format(core_project))
|
||||||
|
|
||||||
|
|
||||||
|
def _git_ensure_key_exists(key, keys):
|
||||||
|
"""
|
||||||
|
Ensure that key exists in keys.
|
||||||
|
"""
|
||||||
|
if key not in keys:
|
||||||
|
error_out('openstack-origin-git key \'{}\' is missing'.format(key))
|
||||||
|
|
||||||
|
|
||||||
|
def _git_clone_and_install_single(repo, branch, parent_dir, update_requirements):
|
||||||
|
"""
|
||||||
|
Clone and install a single git repository.
|
||||||
|
"""
|
||||||
|
dest_dir = os.path.join(parent_dir, os.path.basename(repo))
|
||||||
|
|
||||||
|
if not os.path.exists(parent_dir):
|
||||||
|
juju_log('Directory already exists at {}. '
|
||||||
|
'No need to create directory.'.format(parent_dir))
|
||||||
|
os.mkdir(parent_dir)
|
||||||
|
|
||||||
|
if not os.path.exists(dest_dir):
|
||||||
|
juju_log('Cloning git repo: {}, branch: {}'.format(repo, branch))
|
||||||
|
repo_dir = install_remote(repo, dest=parent_dir, branch=branch)
|
||||||
|
else:
|
||||||
|
repo_dir = dest_dir
|
||||||
|
|
||||||
|
if update_requirements:
|
||||||
|
if not requirements_dir:
|
||||||
|
error_out('requirements repo must be cloned before '
|
||||||
|
'updating from global requirements.')
|
||||||
|
_git_update_requirements(repo_dir, requirements_dir)
|
||||||
|
|
||||||
juju_log('Installing git repo from dir: {}'.format(repo_dir))
|
juju_log('Installing git repo from dir: {}'.format(repo_dir))
|
||||||
pip_install(repo_dir)
|
pip_install(repo_dir)
|
||||||
@ -589,16 +594,39 @@ def _git_clone_and_install_single(repo, branch, parent_dir,
|
|||||||
|
|
||||||
|
|
||||||
def _git_update_requirements(package_dir, reqs_dir):
|
def _git_update_requirements(package_dir, reqs_dir):
|
||||||
"""Update from global requirements.
|
"""
|
||||||
|
Update from global requirements.
|
||||||
|
|
||||||
Update an OpenStack git directory's requirements.txt and
|
Update an OpenStack git directory's requirements.txt and
|
||||||
test-requirements.txt from global-requirements.txt."""
|
test-requirements.txt from global-requirements.txt.
|
||||||
|
"""
|
||||||
orig_dir = os.getcwd()
|
orig_dir = os.getcwd()
|
||||||
os.chdir(reqs_dir)
|
os.chdir(reqs_dir)
|
||||||
cmd = "python update.py {}".format(package_dir)
|
cmd = ['python', 'update.py', package_dir]
|
||||||
try:
|
try:
|
||||||
subprocess.check_call(cmd.split(' '))
|
subprocess.check_call(cmd)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
package = os.path.basename(package_dir)
|
package = os.path.basename(package_dir)
|
||||||
error_out("Error updating {} from global-requirements.txt".format(package))
|
error_out("Error updating {} from global-requirements.txt".format(package))
|
||||||
os.chdir(orig_dir)
|
os.chdir(orig_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def git_src_dir(projects_yaml, project):
|
||||||
|
"""
|
||||||
|
Return the directory where the specified project's source is located.
|
||||||
|
"""
|
||||||
|
parent_dir = '/mnt/openstack-git'
|
||||||
|
|
||||||
|
if not projects_yaml:
|
||||||
|
return
|
||||||
|
|
||||||
|
projects = yaml.load(projects_yaml)
|
||||||
|
|
||||||
|
if 'directory' in projects.keys():
|
||||||
|
parent_dir = projects['directory']
|
||||||
|
|
||||||
|
for p in projects['repositories']:
|
||||||
|
if p['name'] == project:
|
||||||
|
return os.path.join(parent_dir, os.path.basename(p['repository']))
|
||||||
|
|
||||||
|
return None
|
||||||
|
@ -566,3 +566,29 @@ class Hooks(object):
|
|||||||
def charm_dir():
|
def charm_dir():
|
||||||
"""Return the root directory of the current charm"""
|
"""Return the root directory of the current charm"""
|
||||||
return os.environ.get('CHARM_DIR')
|
return os.environ.get('CHARM_DIR')
|
||||||
|
|
||||||
|
|
||||||
|
@cached
|
||||||
|
def action_get(key=None):
|
||||||
|
"""Gets the value of an action parameter, or all key/value param pairs"""
|
||||||
|
cmd = ['action-get']
|
||||||
|
if key is not None:
|
||||||
|
cmd.append(key)
|
||||||
|
cmd.append('--format=json')
|
||||||
|
action_data = json.loads(subprocess.check_output(cmd).decode('UTF-8'))
|
||||||
|
return action_data
|
||||||
|
|
||||||
|
|
||||||
|
def action_set(values):
|
||||||
|
"""Sets the values to be returned after the action finishes"""
|
||||||
|
cmd = ['action-set']
|
||||||
|
for k, v in list(values.items()):
|
||||||
|
cmd.append('{}={}'.format(k, v))
|
||||||
|
subprocess.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def action_fail(message):
|
||||||
|
"""Sets the action status to failed and sets the error message.
|
||||||
|
|
||||||
|
The results set by action_set are preserved."""
|
||||||
|
subprocess.check_call(['action-fail', message])
|
||||||
|
@ -339,12 +339,16 @@ def lsb_release():
|
|||||||
def pwgen(length=None):
|
def pwgen(length=None):
|
||||||
"""Generate a random pasword."""
|
"""Generate a random pasword."""
|
||||||
if length is None:
|
if length is None:
|
||||||
|
# A random length is ok to use a weak PRNG
|
||||||
length = random.choice(range(35, 45))
|
length = random.choice(range(35, 45))
|
||||||
alphanumeric_chars = [
|
alphanumeric_chars = [
|
||||||
l for l in (string.ascii_letters + string.digits)
|
l for l in (string.ascii_letters + string.digits)
|
||||||
if l not in 'l0QD1vAEIOUaeiou']
|
if l not in 'l0QD1vAEIOUaeiou']
|
||||||
|
# Use a crypto-friendly PRNG (e.g. /dev/urandom) for making the
|
||||||
|
# actual password
|
||||||
|
random_generator = random.SystemRandom()
|
||||||
random_chars = [
|
random_chars = [
|
||||||
random.choice(alphanumeric_chars) for _ in range(length)]
|
random_generator.choice(alphanumeric_chars) for _ in range(length)]
|
||||||
return(''.join(random_chars))
|
return(''.join(random_chars))
|
||||||
|
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ class MysqlRelation(RelationContext):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.required_keys = ['host', 'user', 'password', 'database']
|
self.required_keys = ['host', 'user', 'password', 'database']
|
||||||
super(HttpRelation).__init__(self, *args, **kwargs)
|
RelationContext.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class HttpRelation(RelationContext):
|
class HttpRelation(RelationContext):
|
||||||
@ -154,7 +154,7 @@ class HttpRelation(RelationContext):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.required_keys = ['host', 'port']
|
self.required_keys = ['host', 'port']
|
||||||
super(HttpRelation).__init__(self, *args, **kwargs)
|
RelationContext.__init__(self, *args, **kwargs)
|
||||||
|
|
||||||
def provide_data(self):
|
def provide_data(self):
|
||||||
return {
|
return {
|
||||||
|
@ -443,7 +443,7 @@ class HookData(object):
|
|||||||
data = hookenv.execution_environment()
|
data = hookenv.execution_environment()
|
||||||
self.conf = conf_delta = self.kv.delta(data['conf'], 'config')
|
self.conf = conf_delta = self.kv.delta(data['conf'], 'config')
|
||||||
self.rels = rels_delta = self.kv.delta(data['rels'], 'rels')
|
self.rels = rels_delta = self.kv.delta(data['rels'], 'rels')
|
||||||
self.kv.set('env', data['env'])
|
self.kv.set('env', dict(data['env']))
|
||||||
self.kv.set('unit', data['unit'])
|
self.kv.set('unit', data['unit'])
|
||||||
self.kv.set('relid', data.get('relid'))
|
self.kv.set('relid', data.get('relid'))
|
||||||
return conf_delta, rels_delta
|
return conf_delta, rels_delta
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
# along with charm-helpers. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
from collections import OrderedDict
|
||||||
from charmhelpers.contrib.amulet.deployment import (
|
from charmhelpers.contrib.amulet.deployment import (
|
||||||
AmuletDeployment
|
AmuletDeployment
|
||||||
)
|
)
|
||||||
@ -100,12 +101,34 @@ class OpenStackAmuletDeployment(AmuletDeployment):
|
|||||||
"""
|
"""
|
||||||
(self.precise_essex, self.precise_folsom, self.precise_grizzly,
|
(self.precise_essex, self.precise_folsom, self.precise_grizzly,
|
||||||
self.precise_havana, self.precise_icehouse,
|
self.precise_havana, self.precise_icehouse,
|
||||||
self.trusty_icehouse) = range(6)
|
self.trusty_icehouse, self.trusty_juno, self.trusty_kilo) = range(8)
|
||||||
releases = {
|
releases = {
|
||||||
('precise', None): self.precise_essex,
|
('precise', None): self.precise_essex,
|
||||||
('precise', 'cloud:precise-folsom'): self.precise_folsom,
|
('precise', 'cloud:precise-folsom'): self.precise_folsom,
|
||||||
('precise', 'cloud:precise-grizzly'): self.precise_grizzly,
|
('precise', 'cloud:precise-grizzly'): self.precise_grizzly,
|
||||||
('precise', 'cloud:precise-havana'): self.precise_havana,
|
('precise', 'cloud:precise-havana'): self.precise_havana,
|
||||||
('precise', 'cloud:precise-icehouse'): self.precise_icehouse,
|
('precise', 'cloud:precise-icehouse'): self.precise_icehouse,
|
||||||
('trusty', None): self.trusty_icehouse}
|
('trusty', None): self.trusty_icehouse,
|
||||||
|
('trusty', 'cloud:trusty-juno'): self.trusty_juno,
|
||||||
|
('trusty', 'cloud:trusty-kilo'): self.trusty_kilo}
|
||||||
return releases[(self.series, self.openstack)]
|
return releases[(self.series, self.openstack)]
|
||||||
|
|
||||||
|
def _get_openstack_release_string(self):
|
||||||
|
"""Get openstack release string.
|
||||||
|
|
||||||
|
Return a string representing the openstack release.
|
||||||
|
"""
|
||||||
|
releases = OrderedDict([
|
||||||
|
('precise', 'essex'),
|
||||||
|
('quantal', 'folsom'),
|
||||||
|
('raring', 'grizzly'),
|
||||||
|
('saucy', 'havana'),
|
||||||
|
('trusty', 'icehouse'),
|
||||||
|
('utopic', 'juno'),
|
||||||
|
('vivid', 'kilo'),
|
||||||
|
])
|
||||||
|
if self.openstack:
|
||||||
|
os_origin = self.openstack.split(':')[1]
|
||||||
|
return os_origin.split('%s-' % self.series)[1].split('/')[0]
|
||||||
|
else:
|
||||||
|
return releases[self.series]
|
||||||
|
Loading…
Reference in New Issue
Block a user