synced lp:~cts-engineering/charms/trusty/charm-helpers/ipv6

This commit is contained in:
Edward Hope-Morley 2014-09-22 21:02:27 +01:00
parent 942f4159fb
commit 6d53fe8e19
9 changed files with 445 additions and 59 deletions

225
bin/charm_helpers_sync.py Normal file
View File

@ -0,0 +1,225 @@
#!/usr/bin/python
#
# Copyright 2013 Canonical Ltd.
# Authors:
# Adam Gandelman <adamg@ubuntu.com>
#
import logging
import optparse
import os
import subprocess
import shutil
import sys
import tempfile
import yaml
from fnmatch import fnmatch
CHARM_HELPERS_BRANCH = 'lp:charm-helpers'
def parse_config(conf_file):
if not os.path.isfile(conf_file):
logging.error('Invalid config file: %s.' % conf_file)
return False
return yaml.load(open(conf_file).read())
def clone_helpers(work_dir, branch):
dest = os.path.join(work_dir, 'charm-helpers')
logging.info('Checking out %s to %s.' % (branch, dest))
cmd = ['bzr', 'checkout', '--lightweight', branch, dest]
subprocess.check_call(cmd)
return dest
def _module_path(module):
return os.path.join(*module.split('.'))
def _src_path(src, module):
return os.path.join(src, 'charmhelpers', _module_path(module))
def _dest_path(dest, module):
return os.path.join(dest, _module_path(module))
def _is_pyfile(path):
return os.path.isfile(path + '.py')
def ensure_init(path):
'''
ensure directories leading up to path are importable, omitting
parent directory, eg path='/hooks/helpers/foo'/:
hooks/
hooks/helpers/__init__.py
hooks/helpers/foo/__init__.py
'''
for d, dirs, files in os.walk(os.path.join(*path.split('/')[:2])):
_i = os.path.join(d, '__init__.py')
if not os.path.exists(_i):
logging.info('Adding missing __init__.py: %s' % _i)
open(_i, 'wb').close()
def sync_pyfile(src, dest):
src = src + '.py'
src_dir = os.path.dirname(src)
logging.info('Syncing pyfile: %s -> %s.' % (src, dest))
if not os.path.exists(dest):
os.makedirs(dest)
shutil.copy(src, dest)
if os.path.isfile(os.path.join(src_dir, '__init__.py')):
shutil.copy(os.path.join(src_dir, '__init__.py'),
dest)
ensure_init(dest)
def get_filter(opts=None):
opts = opts or []
if 'inc=*' in opts:
# do not filter any files, include everything
return None
def _filter(dir, ls):
incs = [opt.split('=').pop() for opt in opts if 'inc=' in opt]
_filter = []
for f in ls:
_f = os.path.join(dir, f)
if not os.path.isdir(_f) and not _f.endswith('.py') and incs:
if True not in [fnmatch(_f, inc) for inc in incs]:
logging.debug('Not syncing %s, does not match include '
'filters (%s)' % (_f, incs))
_filter.append(f)
else:
logging.debug('Including file, which matches include '
'filters (%s): %s' % (incs, _f))
elif (os.path.isfile(_f) and not _f.endswith('.py')):
logging.debug('Not syncing file: %s' % f)
_filter.append(f)
elif (os.path.isdir(_f) and not
os.path.isfile(os.path.join(_f, '__init__.py'))):
logging.debug('Not syncing directory: %s' % f)
_filter.append(f)
return _filter
return _filter
def sync_directory(src, dest, opts=None):
if os.path.exists(dest):
logging.debug('Removing existing directory: %s' % dest)
shutil.rmtree(dest)
logging.info('Syncing directory: %s -> %s.' % (src, dest))
shutil.copytree(src, dest, ignore=get_filter(opts))
ensure_init(dest)
def sync(src, dest, module, opts=None):
if os.path.isdir(_src_path(src, module)):
sync_directory(_src_path(src, module), _dest_path(dest, module), opts)
elif _is_pyfile(_src_path(src, module)):
sync_pyfile(_src_path(src, module),
os.path.dirname(_dest_path(dest, module)))
else:
logging.warn('Could not sync: %s. Neither a pyfile or directory, '
'does it even exist?' % module)
def parse_sync_options(options):
if not options:
return []
return options.split(',')
def extract_options(inc, global_options=None):
global_options = global_options or []
if global_options and isinstance(global_options, basestring):
global_options = [global_options]
if '|' not in inc:
return (inc, global_options)
inc, opts = inc.split('|')
return (inc, parse_sync_options(opts) + global_options)
def sync_helpers(include, src, dest, options=None):
if not os.path.isdir(dest):
os.makedirs(dest)
global_options = parse_sync_options(options)
for inc in include:
if isinstance(inc, str):
inc, opts = extract_options(inc, global_options)
sync(src, dest, inc, opts)
elif isinstance(inc, dict):
# could also do nested dicts here.
for k, v in inc.iteritems():
if isinstance(v, list):
for m in v:
inc, opts = extract_options(m, global_options)
sync(src, dest, '%s.%s' % (k, inc), opts)
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option('-c', '--config', action='store', dest='config',
default=None, help='helper config file')
parser.add_option('-D', '--debug', action='store_true', dest='debug',
default=False, help='debug')
parser.add_option('-b', '--branch', action='store', dest='branch',
help='charm-helpers bzr branch (overrides config)')
parser.add_option('-d', '--destination', action='store', dest='dest_dir',
help='sync destination dir (overrides config)')
(opts, args) = parser.parse_args()
if opts.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
if opts.config:
logging.info('Loading charm helper config from %s.' % opts.config)
config = parse_config(opts.config)
if not config:
logging.error('Could not parse config from %s.' % opts.config)
sys.exit(1)
else:
config = {}
if 'branch' not in config:
config['branch'] = CHARM_HELPERS_BRANCH
if opts.branch:
config['branch'] = opts.branch
if opts.dest_dir:
config['destination'] = opts.dest_dir
if 'destination' not in config:
logging.error('No destination dir. specified as option or config.')
sys.exit(1)
if 'include' not in config:
if not args:
logging.error('No modules to sync specified as option or config.')
sys.exit(1)
config['include'] = []
[config['include'].append(a) for a in args]
sync_options = None
if 'options' in config:
sync_options = config['options']
tmpd = tempfile.mkdtemp()
try:
checkout = clone_helpers(tmpd, config['branch'])
sync_helpers(config['include'], checkout, config['destination'],
options=sync_options)
except Exception, e:
logging.error("Could not sync: %s" % e)
raise e
finally:
logging.debug('Cleaning up %s' % tmpd)
shutil.rmtree(tmpd)

View File

@ -1,4 +1,4 @@
branch: lp:charm-helpers
branch: lp:~cts-engineering/charms/trusty/charm-helpers/ipv6
destination: hooks/charmhelpers
include:
- core

View File

@ -1,4 +1,4 @@
branch: lp:charm-helpers
branch: lp:~cts-engineering/charms/trusty/charm-helpers/ipv6
destination: tests/charmhelpers
include:
- contrib.amulet

View File

@ -5,7 +5,9 @@ from functools import partial
from charmhelpers.fetch import apt_install
from charmhelpers.core.hookenv import (
ERROR, log,
WARNING,
ERROR,
log
)
try:
@ -164,9 +166,9 @@ def format_ipv6_addr(address):
if is_ipv6(address):
address = "[%s]" % address
else:
log("Not an valid ipv6 address: %s" % address,
level=ERROR)
log("Not a valid ipv6 address: %s" % address, level=WARNING)
address = None
return address

View File

@ -209,10 +209,15 @@ def mounts():
return system_mounts
def file_hash(path):
"""Generate a md5 hash of the contents of 'path' or None if not found """
def file_hash(path, hash_type='md5'):
"""
Generate a hash checksum of the contents of 'path' or None if not found.
:param str hash_type: Any hash alrgorithm supported by :mod:`hashlib`,
such as md5, sha1, sha256, sha512, etc.
"""
if os.path.exists(path):
h = hashlib.md5()
h = getattr(hashlib, hash_type)()
with open(path, 'r') as source:
h.update(source.read()) # IGNORE:E1101 - it does have update
return h.hexdigest()
@ -220,6 +225,26 @@ def file_hash(path):
return None
def check_hash(path, checksum, hash_type='md5'):
"""
Validate a file using a cryptographic checksum.
:param str checksum: Value of the checksum used to validate the file.
:param str hash_type: Hash algorithm used to generate :param:`checksum`.
Can be any hash alrgorithm supported by :mod:`hashlib`,
such as md5, sha1, sha256, sha512, etc.
:raises ChecksumError: If the file fails the checksum
"""
actual_checksum = file_hash(path, hash_type)
if checksum != actual_checksum:
raise ChecksumError("'%s' != '%s'" % (checksum, actual_checksum))
class ChecksumError(ValueError):
pass
def restart_on_change(restart_map, stopstart=False):
"""Restart services based on configuration files changing

View File

@ -118,6 +118,9 @@ class ServiceManager(object):
else:
self.provide_data()
self.reconfigure_services()
cfg = hookenv.config()
if cfg.implicit_save:
cfg.save()
def provide_data(self):
"""

View File

@ -1,3 +1,5 @@
import os
import yaml
from charmhelpers.core import hookenv
from charmhelpers.core import templating
@ -19,15 +21,21 @@ class RelationContext(dict):
the `name` attribute that are complete will used to populate the dictionary
values (see `get_data`, below).
The generated context will be namespaced under the interface type, to prevent
potential naming conflicts.
The generated context will be namespaced under the relation :attr:`name`,
to prevent potential naming conflicts.
:param str name: Override the relation :attr:`name`, since it can vary from charm to charm
:param list additional_required_keys: Extend the list of :attr:`required_keys`
"""
name = None
interface = None
required_keys = []
def __init__(self, *args, **kwargs):
super(RelationContext, self).__init__(*args, **kwargs)
def __init__(self, name=None, additional_required_keys=None):
if name is not None:
self.name = name
if additional_required_keys is not None:
self.required_keys.extend(additional_required_keys)
self.get_data()
def __bool__(self):
@ -101,9 +109,115 @@ class RelationContext(dict):
return {}
class MysqlRelation(RelationContext):
"""
Relation context for the `mysql` interface.
:param str name: Override the relation :attr:`name`, since it can vary from charm to charm
:param list additional_required_keys: Extend the list of :attr:`required_keys`
"""
name = 'db'
interface = 'mysql'
required_keys = ['host', 'user', 'password', 'database']
class HttpRelation(RelationContext):
"""
Relation context for the `http` interface.
:param str name: Override the relation :attr:`name`, since it can vary from charm to charm
:param list additional_required_keys: Extend the list of :attr:`required_keys`
"""
name = 'website'
interface = 'http'
required_keys = ['host', 'port']
def provide_data(self):
return {
'host': hookenv.unit_get('private-address'),
'port': 80,
}
class RequiredConfig(dict):
"""
Data context that loads config options with one or more mandatory options.
Once the required options have been changed from their default values, all
config options will be available, namespaced under `config` to prevent
potential naming conflicts (for example, between a config option and a
relation property).
:param list *args: List of options that must be changed from their default values.
"""
def __init__(self, *args):
self.required_options = args
self['config'] = hookenv.config()
with open(os.path.join(hookenv.charm_dir(), 'config.yaml')) as fp:
self.config = yaml.load(fp).get('options', {})
def __bool__(self):
for option in self.required_options:
if option not in self['config']:
return False
current_value = self['config'][option]
default_value = self.config[option].get('default')
if current_value == default_value:
return False
if current_value in (None, '') and default_value in (None, ''):
return False
return True
def __nonzero__(self):
return self.__bool__()
class StoredContext(dict):
"""
A data context that always returns the data that it was first created with.
This is useful to do a one-time generation of things like passwords, that
will thereafter use the same value that was originally generated, instead
of generating a new value each time it is run.
"""
def __init__(self, file_name, config_data):
"""
If the file exists, populate `self` with the data from the file.
Otherwise, populate with the given data and persist it to the file.
"""
if os.path.exists(file_name):
self.update(self.read_context(file_name))
else:
self.store_context(file_name, config_data)
self.update(config_data)
def store_context(self, file_name, config_data):
if not os.path.isabs(file_name):
file_name = os.path.join(hookenv.charm_dir(), file_name)
with open(file_name, 'w') as file_stream:
os.fchmod(file_stream.fileno(), 0600)
yaml.dump(config_data, file_stream)
def read_context(self, file_name):
if not os.path.isabs(file_name):
file_name = os.path.join(hookenv.charm_dir(), file_name)
with open(file_name, 'r') as file_stream:
data = yaml.load(file_stream)
if not data:
raise OSError("%s is empty" % file_name)
return data
class TemplateCallback(ManagerCallback):
"""
Callback class that will render a template, for use as a ready action.
Callback class that will render a Jinja2 template, for use as a ready action.
:param str source: The template source file, relative to `$CHARM_DIR/templates`
:param str target: The target to write the rendered template to
:param str owner: The owner of the rendered file
:param str group: The group of the rendered file
:param int perms: The permissions of the rendered file
"""
def __init__(self, source, target, owner='root', group='root', perms=0444):
self.source = source

View File

@ -311,22 +311,35 @@ def configure_sources(update=False,
apt_update(fatal=True)
def install_remote(source):
def install_remote(source, *args, **kwargs):
"""
Install a file tree from a remote source
The specified source should be a url of the form:
scheme://[host]/path[#[option=value][&...]]
Schemes supported are based on this modules submodules
Options supported are submodule-specific"""
Schemes supported are based on this modules submodules.
Options supported are submodule-specific.
Additional arguments are passed through to the submodule.
For example::
dest = install_remote('http://example.com/archive.tgz',
checksum='deadbeef',
hash_type='sha1')
This will download `archive.tgz`, validate it using SHA1 and, if
the file is ok, extract it and return the directory in which it
was extracted. If the checksum fails, it will raise
:class:`charmhelpers.core.host.ChecksumError`.
"""
# We ONLY check for True here because can_handle may return a string
# explaining why it can't handle a given source.
handlers = [h for h in plugins() if h.can_handle(source) is True]
installed_to = None
for handler in handlers:
try:
installed_to = handler.install(source)
installed_to = handler.install(source, *args, **kwargs)
except UnhandledSource:
pass
if not installed_to:

View File

@ -12,21 +12,19 @@ from charmhelpers.payload.archive import (
get_archive_handler,
extract,
)
from charmhelpers.core.host import mkdir
from charmhelpers.core.host import mkdir, check_hash
"""
This class is a plugin for charmhelpers.fetch.install_remote.
It grabs, validates and installs remote archives fetched over "http", "https", "ftp" or "file" protocols. The contents of the archive are installed in $CHARM_DIR/fetched/.
Example usage:
install_remote("https://example.com/some/archive.tar.gz")
# Installs the contents of archive.tar.gz in $CHARM_DIR/fetched/.
See charmhelpers.fetch.archiveurl.get_archivehandler for supported archive types.
"""
class ArchiveUrlFetchHandler(BaseFetchHandler):
"""Handler for archives via generic URLs"""
"""
Handler to download archive files from arbitrary URLs.
Can fetch from http, https, ftp, and file URLs.
Can install either tarballs (.tar, .tgz, .tbz2, etc) or zip files.
Installs the contents of the archive in $CHARM_DIR/fetched/.
"""
def can_handle(self, source):
url_parts = self.parse_url(source)
if url_parts.scheme not in ('http', 'https', 'ftp', 'file'):
@ -36,6 +34,12 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
return False
def download(self, source, dest):
"""
Download an archive file.
:param str source: URL pointing to an archive file.
:param str dest: Local path location to download archive file to.
"""
# propogate all exceptions
# URLError, OSError, etc
proto, netloc, path, params, query, fragment = urlparse.urlparse(source)
@ -60,7 +64,29 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
os.unlink(dest)
raise e
def install(self, source):
# Mandatory file validation via Sha1 or MD5 hashing.
def download_and_validate(self, url, hashsum, validate="sha1"):
tempfile, headers = urlretrieve(url)
check_hash(tempfile, hashsum, validate)
return tempfile
def install(self, source, dest=None, checksum=None, hash_type='sha1'):
"""
Download and install an archive file, with optional checksum validation.
The checksum can also be given on the :param:`source` URL's fragment.
For example::
handler.install('http://example.com/file.tgz#sha1=deadbeef')
:param str source: URL pointing to an archive file.
:param str dest: Local destination path to install to. If not given,
installs to `$CHARM_DIR/archives/archive_file_name`.
:param str checksum: If given, validate the archive file after download.
:param str hash_type: Algorithm used to generate :param:`checksum`.
Can be any hash alrgorithm supported by :mod:`hashlib`,
such as md5, sha1, sha256, sha512, etc.
"""
url_parts = self.parse_url(source)
dest_dir = os.path.join(os.environ.get('CHARM_DIR'), 'fetched')
if not os.path.exists(dest_dir):
@ -72,32 +98,10 @@ class ArchiveUrlFetchHandler(BaseFetchHandler):
raise UnhandledSource(e.reason)
except OSError as e:
raise UnhandledSource(e.strerror)
return extract(dld_file)
# Mandatory file validation via Sha1 or MD5 hashing.
def download_and_validate(self, url, hashsum, validate="sha1"):
if validate == 'sha1' and len(hashsum) != 40:
raise ValueError("HashSum must be = 40 characters when using sha1"
" validation")
if validate == 'md5' and len(hashsum) != 32:
raise ValueError("HashSum must be = 32 characters when using md5"
" validation")
tempfile, headers = urlretrieve(url)
self.validate_file(tempfile, hashsum, validate)
return tempfile
# Predicate method that returns status of hash matching expected hash.
def validate_file(self, source, hashsum, vmethod='sha1'):
if vmethod != 'sha1' and vmethod != 'md5':
raise ValueError("Validation Method not supported")
if vmethod == 'md5':
m = hashlib.md5()
if vmethod == 'sha1':
m = hashlib.sha1()
with open(source) as f:
for line in f:
m.update(line)
if hashsum != m.hexdigest():
msg = "Hash Mismatch on {} expected {} got {}"
raise ValueError(msg.format(source, hashsum, m.hexdigest()))
options = urlparse.parse_qs(url_parts.fragment)
for key, value in options.items():
if key in hashlib.algorithms:
check_hash(dld_file, value, key)
if checksum:
check_hash(dld_file, checksum, hash_type)
return extract(dld_file, dest)