Sync charm-helpers.

This commit is contained in:
Corey Bryant
2014-12-17 15:51:50 +00:00
parent 2ad775359f
commit e600e8f20a
13 changed files with 234 additions and 97 deletions

View File

@@ -0,0 +1,22 @@
# Bootstrap charm-helpers, installing its dependencies if necessary using
# only standard libraries.
import subprocess
import sys
try:
import six # flake8: noqa
except ImportError:
if sys.version_info.major == 2:
subprocess.check_call(['apt-get', 'install', '-y', 'python-six'])
else:
subprocess.check_call(['apt-get', 'install', '-y', 'python3-six'])
import six # flake8: noqa
try:
import yaml # flake8: noqa
except ImportError:
if sys.version_info.major == 2:
subprocess.check_call(['apt-get', 'install', '-y', 'python-yaml'])
else:
subprocess.check_call(['apt-get', 'install', '-y', 'python3-yaml'])
import yaml # flake8: noqa

View File

@@ -13,9 +13,10 @@ clustering-related helpers.
import subprocess import subprocess
import os import os
from socket import gethostname as get_unit_hostname from socket import gethostname as get_unit_hostname
import six
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
log, log,
relation_ids, relation_ids,
@@ -77,7 +78,7 @@ def is_crm_leader(resource):
"show", resource "show", resource
] ]
try: try:
status = subprocess.check_output(cmd) status = subprocess.check_output(cmd).decode('UTF-8')
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
return False return False
else: else:
@@ -150,34 +151,42 @@ def https():
return False return False
def determine_api_port(public_port): def determine_api_port(public_port, singlenode_mode=False):
''' '''
Determine correct API server listening port based on Determine correct API server listening port based on
existence of HTTPS reverse proxy and/or haproxy. existence of HTTPS reverse proxy and/or haproxy.
public_port: int: standard public port for given service public_port: int: standard public port for given service
singlenode_mode: boolean: Shuffle ports when only a single unit is present
returns: int: the correct listening port for the API service returns: int: the correct listening port for the API service
''' '''
i = 0 i = 0
if len(peer_units()) > 0 or is_clustered(): if singlenode_mode:
i += 1
elif len(peer_units()) > 0 or is_clustered():
i += 1 i += 1
if https(): if https():
i += 1 i += 1
return public_port - (i * 10) return public_port - (i * 10)
def determine_apache_port(public_port): def determine_apache_port(public_port, singlenode_mode=False):
''' '''
Description: Determine correct apache listening port based on public IP + Description: Determine correct apache listening port based on public IP +
state of the cluster. state of the cluster.
public_port: int: standard public port for given service public_port: int: standard public port for given service
singlenode_mode: boolean: Shuffle ports when only a single unit is present
returns: int: the correct listening port for the HAProxy service returns: int: the correct listening port for the HAProxy service
''' '''
i = 0 i = 0
if len(peer_units()) > 0 or is_clustered(): if singlenode_mode:
i += 1
elif len(peer_units()) > 0 or is_clustered():
i += 1 i += 1
return public_port - (i * 10) return public_port - (i * 10)
@@ -197,7 +206,7 @@ def get_hacluster_config():
for setting in settings: for setting in settings:
conf[setting] = config_get(setting) conf[setting] = config_get(setting)
missing = [] missing = []
[missing.append(s) for s, v in conf.iteritems() if v is None] [missing.append(s) for s, v in six.iteritems(conf) if v is None]
if missing: if missing:
log('Insufficient config data to configure hacluster.', level=ERROR) log('Insufficient config data to configure hacluster.', level=ERROR)
raise HAIncompleteConfig raise HAIncompleteConfig

View File

@@ -228,7 +228,7 @@ def get_iface_addr(iface='eth0', inet_type='AF_INET', inc_aliases=False,
raise Exception("Interface '%s' doesn't have any %s addresses." % raise Exception("Interface '%s' doesn't have any %s addresses." %
(iface, inet_type)) (iface, inet_type))
return addresses return sorted(addresses)
get_ipv4_addr = partial(get_iface_addr, inet_type='AF_INET') get_ipv4_addr = partial(get_iface_addr, inet_type='AF_INET')
@@ -302,7 +302,7 @@ def get_ipv6_addr(iface=None, inc_aliases=False, fatal=True, exc_list=None,
if global_addrs: if global_addrs:
# Make sure any found global addresses are not temporary # Make sure any found global addresses are not temporary
cmd = ['ip', 'addr', 'show', iface] cmd = ['ip', 'addr', 'show', iface]
out = subprocess.check_output(cmd) out = subprocess.check_output(cmd).decode('UTF-8')
if dynamic_only: if dynamic_only:
key = re.compile("inet6 (.+)/[0-9]+ scope global dynamic.*") key = re.compile("inet6 (.+)/[0-9]+ scope global dynamic.*")
else: else:

View File

@@ -1,3 +1,4 @@
import six
from charmhelpers.core.hookenv import relation_id as current_relation_id from charmhelpers.core.hookenv import relation_id as current_relation_id
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
is_relation_made, is_relation_made,
@@ -93,7 +94,7 @@ def peer_echo(includes=None):
if ex in echo_data: if ex in echo_data:
echo_data.pop(ex) echo_data.pop(ex)
else: else:
for attribute, value in rdata.iteritems(): for attribute, value in six.iteritems(rdata):
for include in includes: for include in includes:
if include in attribute: if include in attribute:
echo_data[attribute] = value echo_data[attribute] = value
@@ -119,8 +120,8 @@ def peer_store_and_set(relation_id=None, peer_relation_name='cluster',
relation_settings=relation_settings, relation_settings=relation_settings,
**kwargs) **kwargs)
if is_relation_made(peer_relation_name): if is_relation_made(peer_relation_name):
for key, value in dict(kwargs.items() + for key, value in six.iteritems(dict(list(kwargs.items()) +
relation_settings.items()).iteritems(): list(relation_settings.items()))):
key_prefix = relation_id or current_relation_id() key_prefix = relation_id or current_relation_id()
peer_store(key_prefix + delimiter + key, peer_store(key_prefix + delimiter + key,
value, value,

View File

@@ -3,10 +3,11 @@
__author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>' __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
import io
import os import os
class Fstab(file): class Fstab(io.FileIO):
"""This class extends file in order to implement a file reader/writer """This class extends file in order to implement a file reader/writer
for file `/etc/fstab` for file `/etc/fstab`
""" """
@@ -24,8 +25,8 @@ class Fstab(file):
options = "defaults" options = "defaults"
self.options = options self.options = options
self.d = d self.d = int(d)
self.p = p self.p = int(p)
def __eq__(self, o): def __eq__(self, o):
return str(self) == str(o) return str(self) == str(o)
@@ -45,7 +46,7 @@ class Fstab(file):
self._path = path self._path = path
else: else:
self._path = self.DEFAULT_PATH self._path = self.DEFAULT_PATH
file.__init__(self, self._path, 'r+') super(Fstab, self).__init__(self._path, 'rb+')
def _hydrate_entry(self, line): def _hydrate_entry(self, line):
# NOTE: use split with no arguments to split on any # NOTE: use split with no arguments to split on any
@@ -58,8 +59,9 @@ class Fstab(file):
def entries(self): def entries(self):
self.seek(0) self.seek(0)
for line in self.readlines(): for line in self.readlines():
line = line.decode('us-ascii')
try: try:
if not line.startswith("#"): if line.strip() and not line.startswith("#"):
yield self._hydrate_entry(line) yield self._hydrate_entry(line)
except ValueError: except ValueError:
pass pass
@@ -75,14 +77,14 @@ class Fstab(file):
if self.get_entry_by_attr('device', entry.device): if self.get_entry_by_attr('device', entry.device):
return False return False
self.write(str(entry) + '\n') self.write((str(entry) + '\n').encode('us-ascii'))
self.truncate() self.truncate()
return entry return entry
def remove_entry(self, entry): def remove_entry(self, entry):
self.seek(0) self.seek(0)
lines = self.readlines() lines = [l.decode('us-ascii') for l in self.readlines()]
found = False found = False
for index, line in enumerate(lines): for index, line in enumerate(lines):
@@ -97,7 +99,7 @@ class Fstab(file):
lines.remove(line) lines.remove(line)
self.seek(0) self.seek(0)
self.write(''.join(lines)) self.write(''.join(lines).encode('us-ascii'))
self.truncate() self.truncate()
return True return True

View File

@@ -9,9 +9,14 @@ import json
import yaml import yaml
import subprocess import subprocess
import sys import sys
import UserDict
from subprocess import CalledProcessError from subprocess import CalledProcessError
import six
if not six.PY3:
from UserDict import UserDict
else:
from collections import UserDict
CRITICAL = "CRITICAL" CRITICAL = "CRITICAL"
ERROR = "ERROR" ERROR = "ERROR"
WARNING = "WARNING" WARNING = "WARNING"
@@ -63,16 +68,18 @@ def log(message, level=None):
command = ['juju-log'] command = ['juju-log']
if level: if level:
command += ['-l', level] command += ['-l', level]
if not isinstance(message, six.string_types):
message = repr(message)
command += [message] command += [message]
subprocess.call(command) subprocess.call(command)
class Serializable(UserDict.IterableUserDict): class Serializable(UserDict):
"""Wrapper, an object that can be serialized to yaml or json""" """Wrapper, an object that can be serialized to yaml or json"""
def __init__(self, obj): def __init__(self, obj):
# wrap the object # wrap the object
UserDict.IterableUserDict.__init__(self) UserDict.__init__(self)
self.data = obj self.data = obj
def __getattr__(self, attr): def __getattr__(self, attr):
@@ -218,7 +225,7 @@ class Config(dict):
prev_keys = [] prev_keys = []
if self._prev_dict is not None: if self._prev_dict is not None:
prev_keys = self._prev_dict.keys() prev_keys = self._prev_dict.keys()
return list(set(prev_keys + dict.keys(self))) return list(set(prev_keys + list(dict.keys(self))))
def load_previous(self, path=None): def load_previous(self, path=None):
"""Load previous copy of config from disk. """Load previous copy of config from disk.
@@ -269,7 +276,7 @@ class Config(dict):
""" """
if self._prev_dict: if self._prev_dict:
for k, v in self._prev_dict.iteritems(): for k, v in six.iteritems(self._prev_dict):
if k not in self: if k not in self:
self[k] = v self[k] = v
with open(self.path, 'w') as f: with open(self.path, 'w') as f:
@@ -284,7 +291,8 @@ def config(scope=None):
config_cmd_line.append(scope) config_cmd_line.append(scope)
config_cmd_line.append('--format=json') config_cmd_line.append('--format=json')
try: try:
config_data = json.loads(subprocess.check_output(config_cmd_line)) config_data = json.loads(
subprocess.check_output(config_cmd_line).decode('UTF-8'))
if scope is not None: if scope is not None:
return config_data return config_data
return Config(config_data) return Config(config_data)
@@ -303,10 +311,10 @@ def relation_get(attribute=None, unit=None, rid=None):
if unit: if unit:
_args.append(unit) _args.append(unit)
try: try:
return json.loads(subprocess.check_output(_args)) return json.loads(subprocess.check_output(_args).decode('UTF-8'))
except ValueError: except ValueError:
return None return None
except CalledProcessError, e: except CalledProcessError as e:
if e.returncode == 2: if e.returncode == 2:
return None return None
raise raise
@@ -318,7 +326,7 @@ def relation_set(relation_id=None, relation_settings=None, **kwargs):
relation_cmd_line = ['relation-set'] relation_cmd_line = ['relation-set']
if relation_id is not None: if relation_id is not None:
relation_cmd_line.extend(('-r', relation_id)) relation_cmd_line.extend(('-r', relation_id))
for k, v in (relation_settings.items() + kwargs.items()): for k, v in (list(relation_settings.items()) + list(kwargs.items())):
if v is None: if v is None:
relation_cmd_line.append('{}='.format(k)) relation_cmd_line.append('{}='.format(k))
else: else:
@@ -335,7 +343,8 @@ def relation_ids(reltype=None):
relid_cmd_line = ['relation-ids', '--format=json'] relid_cmd_line = ['relation-ids', '--format=json']
if reltype is not None: if reltype is not None:
relid_cmd_line.append(reltype) relid_cmd_line.append(reltype)
return json.loads(subprocess.check_output(relid_cmd_line)) or [] return json.loads(
subprocess.check_output(relid_cmd_line).decode('UTF-8')) or []
return [] return []
@@ -346,7 +355,8 @@ def related_units(relid=None):
units_cmd_line = ['relation-list', '--format=json'] units_cmd_line = ['relation-list', '--format=json']
if relid is not None: if relid is not None:
units_cmd_line.extend(('-r', relid)) units_cmd_line.extend(('-r', relid))
return json.loads(subprocess.check_output(units_cmd_line)) or [] return json.loads(
subprocess.check_output(units_cmd_line).decode('UTF-8')) or []
@cached @cached
@@ -385,21 +395,31 @@ def relations_of_type(reltype=None):
return relation_data return relation_data
@cached
def metadata():
"""Get the current charm metadata.yaml contents as a python object"""
with open(os.path.join(charm_dir(), 'metadata.yaml')) as md:
return yaml.safe_load(md)
@cached @cached
def relation_types(): def relation_types():
"""Get a list of relation types supported by this charm""" """Get a list of relation types supported by this charm"""
charmdir = os.environ.get('CHARM_DIR', '')
mdf = open(os.path.join(charmdir, 'metadata.yaml'))
md = yaml.safe_load(mdf)
rel_types = [] rel_types = []
md = metadata()
for key in ('provides', 'requires', 'peers'): for key in ('provides', 'requires', 'peers'):
section = md.get(key) section = md.get(key)
if section: if section:
rel_types.extend(section.keys()) rel_types.extend(section.keys())
mdf.close()
return rel_types return rel_types
@cached
def charm_name():
"""Get the name of the current charm as is specified on metadata.yaml"""
return metadata().get('name')
@cached @cached
def relations(): def relations():
"""Get a nested dictionary of relation data for all related units""" """Get a nested dictionary of relation data for all related units"""
@@ -455,7 +475,7 @@ def unit_get(attribute):
"""Get the unit ID for the remote unit""" """Get the unit ID for the remote unit"""
_args = ['unit-get', '--format=json', attribute] _args = ['unit-get', '--format=json', attribute]
try: try:
return json.loads(subprocess.check_output(_args)) return json.loads(subprocess.check_output(_args).decode('UTF-8'))
except ValueError: except ValueError:
return None return None

View File

@@ -14,11 +14,12 @@ import string
import subprocess import subprocess
import hashlib import hashlib
from contextlib import contextmanager from contextlib import contextmanager
from collections import OrderedDict from collections import OrderedDict
from hookenv import log import six
from fstab import Fstab
from .hookenv import log
from .fstab import Fstab
def service_start(service_name): def service_start(service_name):
@@ -54,7 +55,9 @@ def service(action, service_name):
def service_running(service): def service_running(service):
"""Determine whether a system service is running""" """Determine whether a system service is running"""
try: try:
output = subprocess.check_output(['service', service, 'status'], stderr=subprocess.STDOUT) output = subprocess.check_output(
['service', service, 'status'],
stderr=subprocess.STDOUT).decode('UTF-8')
except subprocess.CalledProcessError: except subprocess.CalledProcessError:
return False return False
else: else:
@@ -67,7 +70,9 @@ def service_running(service):
def service_available(service_name): def service_available(service_name):
"""Determine whether a system service is available""" """Determine whether a system service is available"""
try: try:
subprocess.check_output(['service', service_name, 'status'], stderr=subprocess.STDOUT) subprocess.check_output(
['service', service_name, 'status'],
stderr=subprocess.STDOUT).decode('UTF-8')
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
return 'unrecognized service' not in e.output return 'unrecognized service' not in e.output
else: else:
@@ -96,6 +101,26 @@ def adduser(username, password=None, shell='/bin/bash', system_user=False):
return user_info return user_info
def add_group(group_name, system_group=False):
"""Add a group to the system"""
try:
group_info = grp.getgrnam(group_name)
log('group {0} already exists!'.format(group_name))
except KeyError:
log('creating group {0}'.format(group_name))
cmd = ['addgroup']
if system_group:
cmd.append('--system')
else:
cmd.extend([
'--group',
])
cmd.append(group_name)
subprocess.check_call(cmd)
group_info = grp.getgrnam(group_name)
return group_info
def add_user_to_group(username, group): def add_user_to_group(username, group):
"""Add a user to a group""" """Add a user to a group"""
cmd = [ cmd = [
@@ -115,7 +140,7 @@ def rsync(from_path, to_path, flags='-r', options=None):
cmd.append(from_path) cmd.append(from_path)
cmd.append(to_path) cmd.append(to_path)
log(" ".join(cmd)) log(" ".join(cmd))
return subprocess.check_output(cmd).strip() return subprocess.check_output(cmd).decode('UTF-8').strip()
def symlink(source, destination): def symlink(source, destination):
@@ -130,23 +155,26 @@ def symlink(source, destination):
subprocess.check_call(cmd) subprocess.check_call(cmd)
def mkdir(path, owner='root', group='root', perms=0555, force=False): def mkdir(path, owner='root', group='root', perms=0o555, force=False):
"""Create a directory""" """Create a directory"""
log("Making dir {} {}:{} {:o}".format(path, owner, group, log("Making dir {} {}:{} {:o}".format(path, owner, group,
perms)) perms))
uid = pwd.getpwnam(owner).pw_uid uid = pwd.getpwnam(owner).pw_uid
gid = grp.getgrnam(group).gr_gid gid = grp.getgrnam(group).gr_gid
realpath = os.path.abspath(path) realpath = os.path.abspath(path)
if os.path.exists(realpath): path_exists = os.path.exists(realpath)
if force and not os.path.isdir(realpath): if path_exists and force:
if not os.path.isdir(realpath):
log("Removing non-directory file {} prior to mkdir()".format(path)) log("Removing non-directory file {} prior to mkdir()".format(path))
os.unlink(realpath) os.unlink(realpath)
else: os.makedirs(realpath, perms)
os.chown(realpath, uid, gid)
elif not path_exists:
os.makedirs(realpath, perms) os.makedirs(realpath, perms)
os.chown(realpath, uid, gid) os.chown(realpath, uid, gid)
def write_file(path, content, owner='root', group='root', perms=0444): def write_file(path, content, owner='root', group='root', perms=0o444):
"""Create or overwrite a file with the contents of a string""" """Create or overwrite a file with the contents of a string"""
log("Writing file {} {}:{} {:o}".format(path, owner, group, perms)) log("Writing file {} {}:{} {:o}".format(path, owner, group, perms))
uid = pwd.getpwnam(owner).pw_uid uid = pwd.getpwnam(owner).pw_uid
@@ -177,7 +205,7 @@ def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"):
cmd_args.extend([device, mountpoint]) cmd_args.extend([device, mountpoint])
try: try:
subprocess.check_output(cmd_args) subprocess.check_output(cmd_args)
except subprocess.CalledProcessError, e: except subprocess.CalledProcessError as e:
log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output)) log('Error mounting {} at {}\n{}'.format(device, mountpoint, e.output))
return False return False
@@ -191,7 +219,7 @@ def umount(mountpoint, persist=False):
cmd_args = ['umount', mountpoint] cmd_args = ['umount', mountpoint]
try: try:
subprocess.check_output(cmd_args) subprocess.check_output(cmd_args)
except subprocess.CalledProcessError, e: except subprocess.CalledProcessError as e:
log('Error unmounting {}\n{}'.format(mountpoint, e.output)) log('Error unmounting {}\n{}'.format(mountpoint, e.output))
return False return False
@@ -218,8 +246,8 @@ def file_hash(path, hash_type='md5'):
""" """
if os.path.exists(path): if os.path.exists(path):
h = getattr(hashlib, hash_type)() h = getattr(hashlib, hash_type)()
with open(path, 'r') as source: with open(path, 'rb') as source:
h.update(source.read()) # IGNORE:E1101 - it does have update h.update(source.read())
return h.hexdigest() return h.hexdigest()
else: else:
return None return None
@@ -297,7 +325,7 @@ def pwgen(length=None):
if length is None: if length is None:
length = random.choice(range(35, 45)) length = random.choice(range(35, 45))
alphanumeric_chars = [ alphanumeric_chars = [
l for l in (string.letters + string.digits) l for l in (string.ascii_letters + string.digits)
if l not in 'l0QD1vAEIOUaeiou'] if l not in 'l0QD1vAEIOUaeiou']
random_chars = [ random_chars = [
random.choice(alphanumeric_chars) for _ in range(length)] random.choice(alphanumeric_chars) for _ in range(length)]
@@ -306,14 +334,14 @@ def pwgen(length=None):
def list_nics(nic_type): def list_nics(nic_type):
'''Return a list of nics of given type(s)''' '''Return a list of nics of given type(s)'''
if isinstance(nic_type, basestring): if isinstance(nic_type, six.string_types):
int_types = [nic_type] int_types = [nic_type]
else: else:
int_types = nic_type int_types = nic_type
interfaces = [] interfaces = []
for int_type in int_types: for int_type in int_types:
cmd = ['ip', 'addr', 'show', 'label', int_type + '*'] cmd = ['ip', 'addr', 'show', 'label', int_type + '*']
ip_output = subprocess.check_output(cmd).split('\n') ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
ip_output = (line for line in ip_output if line) ip_output = (line for line in ip_output if line)
for line in ip_output: for line in ip_output:
if line.split()[1].startswith(int_type): if line.split()[1].startswith(int_type):
@@ -335,7 +363,7 @@ def set_nic_mtu(nic, mtu):
def get_nic_mtu(nic): def get_nic_mtu(nic):
cmd = ['ip', 'addr', 'show', nic] cmd = ['ip', 'addr', 'show', nic]
ip_output = subprocess.check_output(cmd).split('\n') ip_output = subprocess.check_output(cmd).decode('UTF-8').split('\n')
mtu = "" mtu = ""
for line in ip_output: for line in ip_output:
words = line.split() words = line.split()
@@ -346,7 +374,7 @@ def get_nic_mtu(nic):
def get_nic_hwaddr(nic): def get_nic_hwaddr(nic):
cmd = ['ip', '-o', '-0', 'addr', 'show', nic] cmd = ['ip', '-o', '-0', 'addr', 'show', nic]
ip_output = subprocess.check_output(cmd) ip_output = subprocess.check_output(cmd).decode('UTF-8')
hwaddr = "" hwaddr = ""
words = ip_output.split() words = ip_output.split()
if 'link/ether' in words: if 'link/ether' in words:
@@ -363,8 +391,8 @@ def cmp_pkgrevno(package, revno, pkgcache=None):
''' '''
import apt_pkg import apt_pkg
from charmhelpers.fetch import apt_cache
if not pkgcache: if not pkgcache:
from charmhelpers.fetch import apt_cache
pkgcache = apt_cache() pkgcache = apt_cache()
pkg = pkgcache[package] pkg = pkgcache[package]
return apt_pkg.version_compare(pkg.current_ver.ver_str, revno) return apt_pkg.version_compare(pkg.current_ver.ver_str, revno)

View File

@@ -196,7 +196,7 @@ class StoredContext(dict):
if not os.path.isabs(file_name): if not os.path.isabs(file_name):
file_name = os.path.join(hookenv.charm_dir(), file_name) file_name = os.path.join(hookenv.charm_dir(), file_name)
with open(file_name, 'w') as file_stream: with open(file_name, 'w') as file_stream:
os.fchmod(file_stream.fileno(), 0600) os.fchmod(file_stream.fileno(), 0o600)
yaml.dump(config_data, file_stream) yaml.dump(config_data, file_stream)
def read_context(self, file_name): def read_context(self, file_name):
@@ -211,15 +211,19 @@ class StoredContext(dict):
class TemplateCallback(ManagerCallback): class TemplateCallback(ManagerCallback):
""" """
Callback class that will render a Jinja2 template, for use as a ready action. Callback class that will render a Jinja2 template, for use as a ready
action.
:param str source: The template source file, relative to
`$CHARM_DIR/templates`
:param str source: The template source file, relative to `$CHARM_DIR/templates`
:param str target: The target to write the rendered template to :param str target: The target to write the rendered template to
:param str owner: The owner of the rendered file :param str owner: The owner of the rendered file
:param str group: The group of the rendered file :param str group: The group of the rendered file
:param int perms: The permissions of the rendered file :param int perms: The permissions of the rendered file
""" """
def __init__(self, source, target, owner='root', group='root', perms=0444): def __init__(self, source, target,
owner='root', group='root', perms=0o444):
self.source = source self.source = source
self.target = target self.target = target
self.owner = owner self.owner = owner

View File

@@ -4,7 +4,8 @@ from charmhelpers.core import host
from charmhelpers.core import hookenv from charmhelpers.core import hookenv
def render(source, target, context, owner='root', group='root', perms=0444, templates_dir=None): def render(source, target, context, owner='root', group='root',
perms=0o444, templates_dir=None):
""" """
Render a template. Render a template.
@@ -47,5 +48,5 @@ def render(source, target, context, owner='root', group='root', perms=0444, temp
level=hookenv.ERROR) level=hookenv.ERROR)
raise e raise e
content = template.render(context) content = template.render(context)
host.mkdir(os.path.dirname(target)) host.mkdir(os.path.dirname(target), owner, group)
host.write_file(target, content, owner, group, perms) host.write_file(target, content, owner, group, perms)

View File

@@ -5,10 +5,6 @@ from yaml import safe_load
from charmhelpers.core.host import ( from charmhelpers.core.host import (
lsb_release lsb_release
) )
from urlparse import (
urlparse,
urlunparse,
)
import subprocess import subprocess
from charmhelpers.core.hookenv import ( from charmhelpers.core.hookenv import (
config, config,
@@ -16,6 +12,12 @@ from charmhelpers.core.hookenv import (
) )
import os import os
import six
if six.PY3:
from urllib.parse import urlparse, urlunparse
else:
from urlparse import urlparse, urlunparse
CLOUD_ARCHIVE = """# Ubuntu Cloud Archive CLOUD_ARCHIVE = """# Ubuntu Cloud Archive
deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main deb http://ubuntu-cloud.archive.canonical.com/ubuntu {} main
@@ -149,7 +151,7 @@ def apt_install(packages, options=None, fatal=False):
cmd = ['apt-get', '--assume-yes'] cmd = ['apt-get', '--assume-yes']
cmd.extend(options) cmd.extend(options)
cmd.append('install') cmd.append('install')
if isinstance(packages, basestring): if isinstance(packages, six.string_types):
cmd.append(packages) cmd.append(packages)
else: else:
cmd.extend(packages) cmd.extend(packages)
@@ -182,7 +184,7 @@ def apt_update(fatal=False):
def apt_purge(packages, fatal=False): def apt_purge(packages, fatal=False):
"""Purge one or more packages""" """Purge one or more packages"""
cmd = ['apt-get', '--assume-yes', 'purge'] cmd = ['apt-get', '--assume-yes', 'purge']
if isinstance(packages, basestring): if isinstance(packages, six.string_types):
cmd.append(packages) cmd.append(packages)
else: else:
cmd.extend(packages) cmd.extend(packages)
@@ -193,7 +195,7 @@ def apt_purge(packages, fatal=False):
def apt_hold(packages, fatal=False): def apt_hold(packages, fatal=False):
"""Hold one or more packages""" """Hold one or more packages"""
cmd = ['apt-mark', 'hold'] cmd = ['apt-mark', 'hold']
if isinstance(packages, basestring): if isinstance(packages, six.string_types):
cmd.append(packages) cmd.append(packages)
else: else:
cmd.extend(packages) cmd.extend(packages)
@@ -260,7 +262,7 @@ def add_source(source, key=None):
if key: if key:
if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key: if '-----BEGIN PGP PUBLIC KEY BLOCK-----' in key:
with NamedTemporaryFile() as key_file: with NamedTemporaryFile('w+') as key_file:
key_file.write(key) key_file.write(key)
key_file.flush() key_file.flush()
key_file.seek(0) key_file.seek(0)
@@ -297,14 +299,14 @@ def configure_sources(update=False,
sources = safe_load((config(sources_var) or '').strip()) or [] sources = safe_load((config(sources_var) or '').strip()) or []
keys = safe_load((config(keys_var) or '').strip()) or None keys = safe_load((config(keys_var) or '').strip()) or None
if isinstance(sources, basestring): if isinstance(sources, six.string_types):
sources = [sources] sources = [sources]
if keys is None: if keys is None:
for source in sources: for source in sources:
add_source(source, None) add_source(source, None)
else: else:
if isinstance(keys, basestring): if isinstance(keys, six.string_types):
keys = [keys] keys = [keys]
if len(sources) != len(keys): if len(sources) != len(keys):
@@ -401,7 +403,7 @@ def _run_apt_command(cmd, fatal=False):
while result is None or result == APT_NO_LOCK: while result is None or result == APT_NO_LOCK:
try: try:
result = subprocess.check_call(cmd, env=env) result = subprocess.check_call(cmd, env=env)
except subprocess.CalledProcessError, e: except subprocess.CalledProcessError as e:
retry_count = retry_count + 1 retry_count = retry_count + 1
if retry_count > APT_NO_LOCK_RETRY_COUNT: if retry_count > APT_NO_LOCK_RETRY_COUNT:
raise raise

View File

@@ -1,8 +1,23 @@
import os import os
import urllib2
from urllib import urlretrieve
import urlparse
import hashlib import hashlib
import re
import six
if six.PY3:
from urllib.request import (
build_opener, install_opener, urlopen, urlretrieve,
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
)
from urllib.parse import urlparse, urlunparse, parse_qs
from urllib.error import URLError
else:
from urllib import urlretrieve
from urllib2 import (
build_opener, install_opener, urlopen,
HTTPPasswordMgrWithDefaultRealm, HTTPBasicAuthHandler,
URLError
)
from urlparse import urlparse, urlunparse, parse_qs
from charmhelpers.fetch import ( from charmhelpers.fetch import (
BaseFetchHandler, BaseFetchHandler,
@@ -15,6 +30,24 @@ from charmhelpers.payload.archive import (
from charmhelpers.core.host import mkdir, check_hash from charmhelpers.core.host import mkdir, check_hash
def splituser(host):
'''urllib.splituser(), but six's support of this seems broken'''
_userprog = re.compile('^(.*)@(.*)$')
match = _userprog.match(host)
if match:
return match.group(1, 2)
return None, host
def splitpasswd(user):
'''urllib.splitpasswd(), but six's support of this is missing'''
_passwdprog = re.compile('^([^:]*):(.*)$', re.S)
match = _passwdprog.match(user)
if match:
return match.group(1, 2)
return user, None
class ArchiveUrlFetchHandler(BaseFetchHandler): class ArchiveUrlFetchHandler(BaseFetchHandler):
""" """
Handler to download archive files from arbitrary URLs. Handler to download archive files from arbitrary URLs.
@@ -42,20 +75,20 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
""" """
# propogate all exceptions # propogate all exceptions
# URLError, OSError, etc # URLError, OSError, etc
proto, netloc, path, params, query, fragment = urlparse.urlparse(source) proto, netloc, path, params, query, fragment = urlparse(source)
if proto in ('http', 'https'): if proto in ('http', 'https'):
auth, barehost = urllib2.splituser(netloc) auth, barehost = splituser(netloc)
if auth is not None: if auth is not None:
source = urlparse.urlunparse((proto, barehost, path, params, query, fragment)) source = urlunparse((proto, barehost, path, params, query, fragment))
username, password = urllib2.splitpasswd(auth) username, password = splitpasswd(auth)
passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman = HTTPPasswordMgrWithDefaultRealm()
# Realm is set to None in add_password to force the username and password # Realm is set to None in add_password to force the username and password
# to be used whatever the realm # to be used whatever the realm
passman.add_password(None, source, username, password) passman.add_password(None, source, username, password)
authhandler = urllib2.HTTPBasicAuthHandler(passman) authhandler = HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler) opener = build_opener(authhandler)
urllib2.install_opener(opener) install_opener(opener)
response = urllib2.urlopen(source) response = urlopen(source)
try: try:
with open(dest, 'w') as dest_file: with open(dest, 'w') as dest_file:
dest_file.write(response.read()) dest_file.write(response.read())
@@ -91,17 +124,21 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
url_parts = self.parse_url(source) url_parts = self.parse_url(source)
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched') dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
if not os.path.exists(dest_dir): if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755) mkdir(dest_dir, perms=0o755)
dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path)) dld_file = os.path.join(dest_dir, os.path.basename(url_parts.path))
try: try:
self.download(source, dld_file) self.download(source, dld_file)
except urllib2.URLError as e: except URLError as e:
raise UnhandledSource(e.reason) raise UnhandledSource(e.reason)
except OSError as e: except OSError as e:
raise UnhandledSource(e.strerror) raise UnhandledSource(e.strerror)
options = urlparse.parse_qs(url_parts.fragment) options = parse_qs(url_parts.fragment)
for key, value in options.items(): for key, value in options.items():
if key in hashlib.algorithms: if not six.PY3:
algorithms = hashlib.algorithms
else:
algorithms = hashlib.algorithms_available
if key in algorithms:
check_hash(dld_file, value, key) check_hash(dld_file, value, key)
if checksum: if checksum:
check_hash(dld_file, checksum, hash_type) check_hash(dld_file, checksum, hash_type)

View File

@@ -5,6 +5,10 @@ from charmhelpers.fetch import (
) )
from charmhelpers.core.host import mkdir from charmhelpers.core.host import mkdir
import six
if six.PY3:
raise ImportError('bzrlib does not support Python3')
try: try:
from bzrlib.branch import Branch from bzrlib.branch import Branch
except ImportError: except ImportError:
@@ -42,7 +46,7 @@ class BzrUrlFetchHandler(BaseFetchHandler):
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
branch_name) branch_name)
if not os.path.exists(dest_dir): if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755) mkdir(dest_dir, perms=0o755)
try: try:
self.branch(source, dest_dir) self.branch(source, dest_dir)
except OSError as e: except OSError as e:

View File

@@ -5,6 +5,10 @@ from charmhelpers.fetch import (
) )
from charmhelpers.core.host import mkdir from charmhelpers.core.host import mkdir
import six
if six.PY3:
raise ImportError('GitPython does not support Python 3')
try: try:
from git import Repo from git import Repo
except ImportError: except ImportError:
@@ -17,7 +21,7 @@ class GitUrlFetchHandler(BaseFetchHandler):
"""Handler for git branches via generic and github URLs""" """Handler for git branches via generic and github URLs"""
def can_handle(self, source): def can_handle(self, source):
url_parts = self.parse_url(source) url_parts = self.parse_url(source)
#TODO (mattyw) no support for ssh git@ yet # TODO (mattyw) no support for ssh git@ yet
if url_parts.scheme not in ('http', 'https', 'git'): if url_parts.scheme not in ('http', 'https', 'git'):
return False return False
else: else:
@@ -30,13 +34,16 @@ class GitUrlFetchHandler(BaseFetchHandler):
repo = Repo.clone_from(source, dest) repo = Repo.clone_from(source, dest)
repo.git.checkout(branch) repo.git.checkout(branch)
def install(self, source, branch="master"): def install(self, source, branch="master", dest=None):
url_parts = self.parse_url(source) url_parts = self.parse_url(source)
branch_name = url_parts.path.strip("/").split("/")[-1] branch_name = url_parts.path.strip("/").split("/")[-1]
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched", if dest:
branch_name) dest_dir = os.path.join(dest, branch_name)
else:
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), "fetched",
branch_name)
if not os.path.exists(dest_dir): if not os.path.exists(dest_dir):
mkdir(dest_dir, perms=0755) mkdir(dest_dir, perms=0o755)
try: try:
self.clone(source, dest_dir, branch) self.clone(source, dest_dir, branch)
except OSError as e: except OSError as e: