Enable Bionic as a gate test
Change bionic test from dev to gate for 18.05. Change-Id: I21fdefe0aa0b019d5b211bb54d0a2fa9e38c2864
This commit is contained in:
parent
1d9b5a0f51
commit
b45cd2ae74
3
.stestr.conf
Normal file
3
.stestr.conf
Normal file
@ -0,0 +1,3 @@
|
||||
[DEFAULT]
|
||||
test_path=./unit_tests
|
||||
top_dir=./
|
@ -1,13 +0,0 @@
|
||||
# Copyright 2016 Canonical Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
@ -17,9 +17,23 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
_path = os.path.dirname(os.path.realpath(__file__))
|
||||
_parent = os.path.abspath(os.path.join(_path, ".."))
|
||||
_hooks = os.path.abspath(os.path.join(_parent, "hooks"))
|
||||
|
||||
|
||||
def _add_path(path):
|
||||
if path not in sys.path:
|
||||
sys.path.insert(1, path)
|
||||
|
||||
|
||||
_add_path(_parent)
|
||||
_add_path(_hooks)
|
||||
|
||||
|
||||
from charmhelpers.core.hookenv import action_fail
|
||||
|
||||
from hooks.glance_utils import (
|
||||
from glance_utils import (
|
||||
pause_unit_helper,
|
||||
resume_unit_helper,
|
||||
register_configs,
|
||||
|
@ -1 +0,0 @@
|
||||
../charmhelpers
|
@ -1 +0,0 @@
|
||||
../hooks
|
@ -14,16 +14,33 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
_path = os.path.dirname(os.path.realpath(__file__))
|
||||
_parent = os.path.abspath(os.path.join(_path, ".."))
|
||||
_hooks = os.path.abspath(os.path.join(_parent, "hooks"))
|
||||
|
||||
|
||||
def _add_path(path):
|
||||
if path not in sys.path:
|
||||
sys.path.insert(1, path)
|
||||
|
||||
|
||||
_add_path(_parent)
|
||||
_add_path(_hooks)
|
||||
|
||||
|
||||
from charmhelpers.contrib.openstack.utils import (
|
||||
do_action_openstack_upgrade,
|
||||
)
|
||||
|
||||
from hooks.glance_relations import (
|
||||
from glance_relations import (
|
||||
config_changed,
|
||||
CONFIGS
|
||||
)
|
||||
|
||||
from hooks.glance_utils import do_openstack_upgrade
|
||||
from glance_utils import do_openstack_upgrade
|
||||
|
||||
|
||||
def openstack_upgrade():
|
||||
|
@ -797,9 +797,9 @@ class ApacheSSLContext(OSContextGenerator):
|
||||
key_filename = 'key'
|
||||
|
||||
write_file(path=os.path.join(ssl_dir, cert_filename),
|
||||
content=b64decode(cert))
|
||||
content=b64decode(cert), perms=0o640)
|
||||
write_file(path=os.path.join(ssl_dir, key_filename),
|
||||
content=b64decode(key))
|
||||
content=b64decode(key), perms=0o640)
|
||||
|
||||
def configure_ca(self):
|
||||
ca_cert = get_ca_cert()
|
||||
@ -1873,10 +1873,11 @@ class EnsureDirContext(OSContextGenerator):
|
||||
context is needed to do that before rendering a template.
|
||||
'''
|
||||
|
||||
def __init__(self, dirname):
|
||||
def __init__(self, dirname, **kwargs):
|
||||
'''Used merely to ensure that a given directory exists.'''
|
||||
self.dirname = dirname
|
||||
self.kwargs = kwargs
|
||||
|
||||
def __call__(self):
|
||||
mkdir(self.dirname)
|
||||
mkdir(self.dirname, **self.kwargs)
|
||||
return {}
|
||||
|
@ -0,0 +1,5 @@
|
||||
[oslo_middleware]
|
||||
|
||||
# Bug #1758675
|
||||
enable_proxy_headers_parsing = true
|
||||
|
@ -5,4 +5,7 @@ transport_url = {{ transport_url }}
|
||||
{% if notification_topics -%}
|
||||
topics = {{ notification_topics }}
|
||||
{% endif -%}
|
||||
{% if notification_format -%}
|
||||
notification_format = {{ notification_format }}
|
||||
{% endif -%}
|
||||
{% endif -%}
|
||||
|
@ -306,7 +306,7 @@ def get_os_codename_install_source(src):
|
||||
|
||||
if src.startswith('cloud:'):
|
||||
ca_rel = src.split(':')[1]
|
||||
ca_rel = ca_rel.split('%s-' % ubuntu_rel)[1].split('/')[0]
|
||||
ca_rel = ca_rel.split('-')[1].split('/')[0]
|
||||
return ca_rel
|
||||
|
||||
# Best guess match based on deb string provided
|
||||
|
126
charmhelpers/contrib/openstack/vaultlocker.py
Normal file
126
charmhelpers/contrib/openstack/vaultlocker.py
Normal file
@ -0,0 +1,126 @@
|
||||
# Copyright 2018 Canonical Limited.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import charmhelpers.contrib.openstack.alternatives as alternatives
|
||||
import charmhelpers.contrib.openstack.context as context
|
||||
|
||||
import charmhelpers.core.hookenv as hookenv
|
||||
import charmhelpers.core.host as host
|
||||
import charmhelpers.core.templating as templating
|
||||
import charmhelpers.core.unitdata as unitdata
|
||||
|
||||
VAULTLOCKER_BACKEND = 'charm-vaultlocker'
|
||||
|
||||
|
||||
class VaultKVContext(context.OSContextGenerator):
|
||||
"""Vault KV context for interaction with vault-kv interfaces"""
|
||||
interfaces = ['secrets-storage']
|
||||
|
||||
def __init__(self, secret_backend=None):
|
||||
super(context.OSContextGenerator, self).__init__()
|
||||
self.secret_backend = (
|
||||
secret_backend or 'charm-{}'.format(hookenv.service_name())
|
||||
)
|
||||
|
||||
def __call__(self):
|
||||
db = unitdata.kv()
|
||||
last_token = db.get('last-token')
|
||||
secret_id = db.get('secret-id')
|
||||
for relation_id in hookenv.relation_ids(self.interfaces[0]):
|
||||
for unit in hookenv.related_units(relation_id):
|
||||
data = hookenv.relation_get(unit=unit,
|
||||
rid=relation_id)
|
||||
vault_url = data.get('vault_url')
|
||||
role_id = data.get('{}_role_id'.format(hookenv.local_unit()))
|
||||
token = data.get('{}_token'.format(hookenv.local_unit()))
|
||||
|
||||
if all([vault_url, role_id, token]):
|
||||
token = json.loads(token)
|
||||
vault_url = json.loads(vault_url)
|
||||
|
||||
# Tokens may change when secret_id's are being
|
||||
# reissued - if so use token to get new secret_id
|
||||
if token != last_token:
|
||||
secret_id = retrieve_secret_id(
|
||||
url=vault_url,
|
||||
token=token
|
||||
)
|
||||
db.set('secret-id', secret_id)
|
||||
db.set('last-token', token)
|
||||
db.flush()
|
||||
|
||||
ctxt = {
|
||||
'vault_url': vault_url,
|
||||
'role_id': json.loads(role_id),
|
||||
'secret_id': secret_id,
|
||||
'secret_backend': self.secret_backend,
|
||||
}
|
||||
vault_ca = data.get('vault_ca')
|
||||
if vault_ca:
|
||||
ctxt['vault_ca'] = json.loads(vault_ca)
|
||||
self.complete = True
|
||||
return ctxt
|
||||
return {}
|
||||
|
||||
|
||||
def write_vaultlocker_conf(context, priority=100):
|
||||
"""Write vaultlocker configuration to disk and install alternative
|
||||
|
||||
:param context: Dict of data from vault-kv relation
|
||||
:ptype: context: dict
|
||||
:param priority: Priority of alternative configuration
|
||||
:ptype: priority: int"""
|
||||
charm_vl_path = "/var/lib/charm/{}/vaultlocker.conf".format(
|
||||
hookenv.service_name()
|
||||
)
|
||||
host.mkdir(os.path.dirname(charm_vl_path), perms=0o700)
|
||||
templating.render(source='vaultlocker.conf.j2',
|
||||
target=charm_vl_path,
|
||||
context=context, perms=0o600),
|
||||
alternatives.install_alternative('vaultlocker.conf',
|
||||
'/etc/vaultlocker/vaultlocker.conf',
|
||||
charm_vl_path, priority)
|
||||
|
||||
|
||||
def vault_relation_complete(backend=None):
|
||||
"""Determine whether vault relation is complete
|
||||
|
||||
:param backend: Name of secrets backend requested
|
||||
:ptype backend: string
|
||||
:returns: whether the relation to vault is complete
|
||||
:rtype: bool"""
|
||||
vault_kv = VaultKVContext(secret_backend=backend or VAULTLOCKER_BACKEND)
|
||||
vault_kv()
|
||||
return vault_kv.complete
|
||||
|
||||
|
||||
# TODO: contrib a high level unwrap method to hvac that works
|
||||
def retrieve_secret_id(url, token):
|
||||
"""Retrieve a response-wrapped secret_id from Vault
|
||||
|
||||
:param url: URL to Vault Server
|
||||
:ptype url: str
|
||||
:param token: One shot Token to use
|
||||
:ptype token: str
|
||||
:returns: secret_id to use for Vault Access
|
||||
:rtype: str"""
|
||||
import hvac
|
||||
client = hvac.Client(url=url, token=token)
|
||||
response = client._post('/v1/sys/wrapping/unwrap')
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
return data['data']['secret_id']
|
@ -291,7 +291,7 @@ class Pool(object):
|
||||
|
||||
class ReplicatedPool(Pool):
|
||||
def __init__(self, service, name, pg_num=None, replicas=2,
|
||||
percent_data=10.0):
|
||||
percent_data=10.0, app_name=None):
|
||||
super(ReplicatedPool, self).__init__(service=service, name=name)
|
||||
self.replicas = replicas
|
||||
if pg_num:
|
||||
@ -301,6 +301,10 @@ class ReplicatedPool(Pool):
|
||||
self.pg_num = min(pg_num, max_pgs)
|
||||
else:
|
||||
self.pg_num = self.get_pgs(self.replicas, percent_data)
|
||||
if app_name:
|
||||
self.app_name = app_name
|
||||
else:
|
||||
self.app_name = 'unknown'
|
||||
|
||||
def create(self):
|
||||
if not pool_exists(self.service, self.name):
|
||||
@ -313,6 +317,12 @@ class ReplicatedPool(Pool):
|
||||
update_pool(client=self.service,
|
||||
pool=self.name,
|
||||
settings={'size': str(self.replicas)})
|
||||
try:
|
||||
set_app_name_for_pool(client=self.service,
|
||||
pool=self.name,
|
||||
name=self.app_name)
|
||||
except CalledProcessError:
|
||||
log('Could not set app name for pool {}'.format(self.name, level=WARNING))
|
||||
except CalledProcessError:
|
||||
raise
|
||||
|
||||
@ -320,10 +330,14 @@ class ReplicatedPool(Pool):
|
||||
# Default jerasure erasure coded pool
|
||||
class ErasurePool(Pool):
|
||||
def __init__(self, service, name, erasure_code_profile="default",
|
||||
percent_data=10.0):
|
||||
percent_data=10.0, app_name=None):
|
||||
super(ErasurePool, self).__init__(service=service, name=name)
|
||||
self.erasure_code_profile = erasure_code_profile
|
||||
self.percent_data = percent_data
|
||||
if app_name:
|
||||
self.app_name = app_name
|
||||
else:
|
||||
self.app_name = 'unknown'
|
||||
|
||||
def create(self):
|
||||
if not pool_exists(self.service, self.name):
|
||||
@ -355,6 +369,12 @@ class ErasurePool(Pool):
|
||||
'erasure', self.erasure_code_profile]
|
||||
try:
|
||||
check_call(cmd)
|
||||
try:
|
||||
set_app_name_for_pool(client=self.service,
|
||||
pool=self.name,
|
||||
name=self.app_name)
|
||||
except CalledProcessError:
|
||||
log('Could not set app name for pool {}'.format(self.name, level=WARNING))
|
||||
except CalledProcessError:
|
||||
raise
|
||||
|
||||
@ -778,6 +798,25 @@ def update_pool(client, pool, settings):
|
||||
check_call(cmd)
|
||||
|
||||
|
||||
def set_app_name_for_pool(client, pool, name):
|
||||
"""
|
||||
Calls `osd pool application enable` for the specified pool name
|
||||
|
||||
:param client: Name of the ceph client to use
|
||||
:type client: str
|
||||
:param pool: Pool to set app name for
|
||||
:type pool: str
|
||||
:param name: app name for the specified pool
|
||||
:type name: str
|
||||
|
||||
:raises: CalledProcessError if ceph call fails
|
||||
"""
|
||||
if ceph_version() >= '12.0.0':
|
||||
cmd = ['ceph', '--id', client, 'osd', 'pool',
|
||||
'application', 'enable', pool, name]
|
||||
check_call(cmd)
|
||||
|
||||
|
||||
def create_pool(service, name, replicas=3, pg_num=None):
|
||||
"""Create a new RADOS pool."""
|
||||
if pool_exists(service, name):
|
||||
|
@ -290,7 +290,7 @@ class Config(dict):
|
||||
self.implicit_save = True
|
||||
self._prev_dict = None
|
||||
self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
|
||||
if os.path.exists(self.path):
|
||||
if os.path.exists(self.path) and os.stat(self.path).st_size:
|
||||
self.load_previous()
|
||||
atexit(self._implicit_save)
|
||||
|
||||
@ -310,7 +310,11 @@ class Config(dict):
|
||||
"""
|
||||
self.path = path or self.path
|
||||
with open(self.path) as f:
|
||||
self._prev_dict = json.load(f)
|
||||
try:
|
||||
self._prev_dict = json.load(f)
|
||||
except ValueError as e:
|
||||
log('Unable to parse previous config data - {}'.format(str(e)),
|
||||
level=ERROR)
|
||||
for k, v in copy.deepcopy(self._prev_dict).items():
|
||||
if k not in self:
|
||||
self[k] = v
|
||||
@ -354,22 +358,40 @@ class Config(dict):
|
||||
self.save()
|
||||
|
||||
|
||||
@cached
|
||||
_cache_config = None
|
||||
|
||||
|
||||
def config(scope=None):
|
||||
"""Juju charm configuration"""
|
||||
config_cmd_line = ['config-get']
|
||||
if scope is not None:
|
||||
config_cmd_line.append(scope)
|
||||
else:
|
||||
config_cmd_line.append('--all')
|
||||
config_cmd_line.append('--format=json')
|
||||
"""
|
||||
Get the juju charm configuration (scope==None) or individual key,
|
||||
(scope=str). The returned value is a Python data structure loaded as
|
||||
JSON from the Juju config command.
|
||||
|
||||
:param scope: If set, return the value for the specified key.
|
||||
:type scope: Optional[str]
|
||||
:returns: Either the whole config as a Config, or a key from it.
|
||||
:rtype: Any
|
||||
"""
|
||||
global _cache_config
|
||||
config_cmd_line = ['config-get', '--all', '--format=json']
|
||||
try:
|
||||
config_data = json.loads(
|
||||
subprocess.check_output(config_cmd_line).decode('UTF-8'))
|
||||
# JSON Decode Exception for Python3.5+
|
||||
exc_json = json.decoder.JSONDecodeError
|
||||
except AttributeError:
|
||||
# JSON Decode Exception for Python2.7 through Python3.4
|
||||
exc_json = ValueError
|
||||
try:
|
||||
if _cache_config is None:
|
||||
config_data = json.loads(
|
||||
subprocess.check_output(config_cmd_line).decode('UTF-8'))
|
||||
_cache_config = Config(config_data)
|
||||
if scope is not None:
|
||||
return config_data
|
||||
return Config(config_data)
|
||||
except ValueError:
|
||||
return _cache_config.get(scope)
|
||||
return _cache_config
|
||||
except (exc_json, UnicodeDecodeError) as e:
|
||||
log('Unable to parse output from config-get: config_cmd_line="{}" '
|
||||
'message="{}"'
|
||||
.format(config_cmd_line, str(e)), level=ERROR)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -307,7 +307,9 @@ class PortManagerCallback(ManagerCallback):
|
||||
"""
|
||||
def __call__(self, manager, service_name, event_name):
|
||||
service = manager.get_service(service_name)
|
||||
new_ports = service.get('ports', [])
|
||||
# turn this generator into a list,
|
||||
# as we'll be going over it multiple times
|
||||
new_ports = list(service.get('ports', []))
|
||||
port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
|
||||
if os.path.exists(port_file):
|
||||
with open(port_file) as fp:
|
||||
|
@ -31,18 +31,22 @@ __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
|
||||
def create(sysctl_dict, sysctl_file):
|
||||
"""Creates a sysctl.conf file from a YAML associative array
|
||||
|
||||
:param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"
|
||||
:param sysctl_dict: a dict or YAML-formatted string of sysctl
|
||||
options eg "{ 'kernel.max_pid': 1337 }"
|
||||
:type sysctl_dict: str
|
||||
:param sysctl_file: path to the sysctl file to be saved
|
||||
:type sysctl_file: str or unicode
|
||||
:returns: None
|
||||
"""
|
||||
try:
|
||||
sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
|
||||
except yaml.YAMLError:
|
||||
log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
|
||||
level=ERROR)
|
||||
return
|
||||
if type(sysctl_dict) is not dict:
|
||||
try:
|
||||
sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
|
||||
except yaml.YAMLError:
|
||||
log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
|
||||
level=ERROR)
|
||||
return
|
||||
else:
|
||||
sysctl_dict_parsed = sysctl_dict
|
||||
|
||||
with open(sysctl_file, "w") as fd:
|
||||
for key, value in sysctl_dict_parsed.items():
|
||||
|
@ -166,6 +166,10 @@ class Storage(object):
|
||||
|
||||
To support dicts, lists, integer, floats, and booleans values
|
||||
are automatically json encoded/decoded.
|
||||
|
||||
Note: to facilitate unit testing, ':memory:' can be passed as the
|
||||
path parameter which causes sqlite3 to only build the db in memory.
|
||||
This should only be used for testing purposes.
|
||||
"""
|
||||
def __init__(self, path=None):
|
||||
self.db_path = path
|
||||
@ -175,8 +179,9 @@ class Storage(object):
|
||||
else:
|
||||
self.db_path = os.path.join(
|
||||
os.environ.get('CHARM_DIR', ''), '.unit-state.db')
|
||||
with open(self.db_path, 'a') as f:
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
if self.db_path != ':memory:':
|
||||
with open(self.db_path, 'a') as f:
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
self.conn = sqlite3.connect('%s' % self.db_path)
|
||||
self.cursor = self.conn.cursor()
|
||||
self.revision = None
|
||||
|
@ -1 +0,0 @@
|
||||
../charmhelpers
|
@ -14,8 +14,21 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
_path = os.path.dirname(os.path.realpath(__file__))
|
||||
_parent = os.path.abspath(os.path.join(_path, ".."))
|
||||
|
||||
|
||||
def _add_path(path):
|
||||
if path not in sys.path:
|
||||
sys.path.insert(1, path)
|
||||
|
||||
|
||||
_add_path(_parent)
|
||||
|
||||
|
||||
from subprocess import (
|
||||
call,
|
||||
check_call,
|
||||
|
@ -290,7 +290,7 @@ class Config(dict):
|
||||
self.implicit_save = True
|
||||
self._prev_dict = None
|
||||
self.path = os.path.join(charm_dir(), Config.CONFIG_FILE_NAME)
|
||||
if os.path.exists(self.path):
|
||||
if os.path.exists(self.path) and os.stat(self.path).st_size:
|
||||
self.load_previous()
|
||||
atexit(self._implicit_save)
|
||||
|
||||
@ -310,7 +310,11 @@ class Config(dict):
|
||||
"""
|
||||
self.path = path or self.path
|
||||
with open(self.path) as f:
|
||||
self._prev_dict = json.load(f)
|
||||
try:
|
||||
self._prev_dict = json.load(f)
|
||||
except ValueError as e:
|
||||
log('Unable to parse previous config data - {}'.format(str(e)),
|
||||
level=ERROR)
|
||||
for k, v in copy.deepcopy(self._prev_dict).items():
|
||||
if k not in self:
|
||||
self[k] = v
|
||||
@ -354,22 +358,40 @@ class Config(dict):
|
||||
self.save()
|
||||
|
||||
|
||||
@cached
|
||||
_cache_config = None
|
||||
|
||||
|
||||
def config(scope=None):
|
||||
"""Juju charm configuration"""
|
||||
config_cmd_line = ['config-get']
|
||||
if scope is not None:
|
||||
config_cmd_line.append(scope)
|
||||
else:
|
||||
config_cmd_line.append('--all')
|
||||
config_cmd_line.append('--format=json')
|
||||
"""
|
||||
Get the juju charm configuration (scope==None) or individual key,
|
||||
(scope=str). The returned value is a Python data structure loaded as
|
||||
JSON from the Juju config command.
|
||||
|
||||
:param scope: If set, return the value for the specified key.
|
||||
:type scope: Optional[str]
|
||||
:returns: Either the whole config as a Config, or a key from it.
|
||||
:rtype: Any
|
||||
"""
|
||||
global _cache_config
|
||||
config_cmd_line = ['config-get', '--all', '--format=json']
|
||||
try:
|
||||
config_data = json.loads(
|
||||
subprocess.check_output(config_cmd_line).decode('UTF-8'))
|
||||
# JSON Decode Exception for Python3.5+
|
||||
exc_json = json.decoder.JSONDecodeError
|
||||
except AttributeError:
|
||||
# JSON Decode Exception for Python2.7 through Python3.4
|
||||
exc_json = ValueError
|
||||
try:
|
||||
if _cache_config is None:
|
||||
config_data = json.loads(
|
||||
subprocess.check_output(config_cmd_line).decode('UTF-8'))
|
||||
_cache_config = Config(config_data)
|
||||
if scope is not None:
|
||||
return config_data
|
||||
return Config(config_data)
|
||||
except ValueError:
|
||||
return _cache_config.get(scope)
|
||||
return _cache_config
|
||||
except (exc_json, UnicodeDecodeError) as e:
|
||||
log('Unable to parse output from config-get: config_cmd_line="{}" '
|
||||
'message="{}"'
|
||||
.format(config_cmd_line, str(e)), level=ERROR)
|
||||
return None
|
||||
|
||||
|
||||
|
@ -307,7 +307,9 @@ class PortManagerCallback(ManagerCallback):
|
||||
"""
|
||||
def __call__(self, manager, service_name, event_name):
|
||||
service = manager.get_service(service_name)
|
||||
new_ports = service.get('ports', [])
|
||||
# turn this generator into a list,
|
||||
# as we'll be going over it multiple times
|
||||
new_ports = list(service.get('ports', []))
|
||||
port_file = os.path.join(hookenv.charm_dir(), '.{}.ports'.format(service_name))
|
||||
if os.path.exists(port_file):
|
||||
with open(port_file) as fp:
|
||||
|
@ -31,18 +31,22 @@ __author__ = 'Jorge Niedbalski R. <jorge.niedbalski@canonical.com>'
|
||||
def create(sysctl_dict, sysctl_file):
|
||||
"""Creates a sysctl.conf file from a YAML associative array
|
||||
|
||||
:param sysctl_dict: a YAML-formatted string of sysctl options eg "{ 'kernel.max_pid': 1337 }"
|
||||
:param sysctl_dict: a dict or YAML-formatted string of sysctl
|
||||
options eg "{ 'kernel.max_pid': 1337 }"
|
||||
:type sysctl_dict: str
|
||||
:param sysctl_file: path to the sysctl file to be saved
|
||||
:type sysctl_file: str or unicode
|
||||
:returns: None
|
||||
"""
|
||||
try:
|
||||
sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
|
||||
except yaml.YAMLError:
|
||||
log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
|
||||
level=ERROR)
|
||||
return
|
||||
if type(sysctl_dict) is not dict:
|
||||
try:
|
||||
sysctl_dict_parsed = yaml.safe_load(sysctl_dict)
|
||||
except yaml.YAMLError:
|
||||
log("Error parsing YAML sysctl_dict: {}".format(sysctl_dict),
|
||||
level=ERROR)
|
||||
return
|
||||
else:
|
||||
sysctl_dict_parsed = sysctl_dict
|
||||
|
||||
with open(sysctl_file, "w") as fd:
|
||||
for key, value in sysctl_dict_parsed.items():
|
||||
|
@ -166,6 +166,10 @@ class Storage(object):
|
||||
|
||||
To support dicts, lists, integer, floats, and booleans values
|
||||
are automatically json encoded/decoded.
|
||||
|
||||
Note: to facilitate unit testing, ':memory:' can be passed as the
|
||||
path parameter which causes sqlite3 to only build the db in memory.
|
||||
This should only be used for testing purposes.
|
||||
"""
|
||||
def __init__(self, path=None):
|
||||
self.db_path = path
|
||||
@ -175,8 +179,9 @@ class Storage(object):
|
||||
else:
|
||||
self.db_path = os.path.join(
|
||||
os.environ.get('CHARM_DIR', ''), '.unit-state.db')
|
||||
with open(self.db_path, 'a') as f:
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
if self.db_path != ':memory:':
|
||||
with open(self.db_path, 'a') as f:
|
||||
os.fchmod(f.fileno(), 0o600)
|
||||
self.conn = sqlite3.connect('%s' % self.db_path)
|
||||
self.cursor = self.conn.cursor()
|
||||
self.revision = None
|
||||
|
2
tox.ini
2
tox.ini
@ -60,7 +60,7 @@ basepython = python2.7
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
commands =
|
||||
bundletester -vl DEBUG -r json -o func-results.json gate-basic-xenial-pike --no-destroy
|
||||
bundletester -vl DEBUG -r json -o func-results.json gate-basic-bionic-queens --no-destroy
|
||||
|
||||
[testenv:func27-dfs]
|
||||
# Charm Functional Test
|
||||
|
@ -11,3 +11,20 @@
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
import os
|
||||
import sys
|
||||
|
||||
_path = os.path.dirname(os.path.realpath(__file__))
|
||||
_parent = os.path.abspath(os.path.join(_path, ".."))
|
||||
_hooks = os.path.abspath(os.path.join(_parent, "hooks"))
|
||||
_actions = os.path.abspath(os.path.join(_parent, "actions"))
|
||||
|
||||
|
||||
def _add_path(path):
|
||||
if path not in sys.path:
|
||||
sys.path.insert(1, path)
|
||||
|
||||
|
||||
_add_path(_parent)
|
||||
_add_path(_hooks)
|
||||
_add_path(_actions)
|
||||
|
@ -19,19 +19,19 @@ import mock
|
||||
from test_utils import CharmTestCase
|
||||
|
||||
os.environ['JUJU_UNIT_NAME'] = 'glance'
|
||||
with mock.patch('actions.hooks.glance_utils.register_configs') as configs:
|
||||
with mock.patch('glance_utils.register_configs') as configs:
|
||||
configs.return_value = 'test-config'
|
||||
import actions.actions
|
||||
import actions
|
||||
|
||||
|
||||
class PauseTestCase(CharmTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(PauseTestCase, self).setUp(
|
||||
actions.actions, ["pause_unit_helper"])
|
||||
actions, ["pause_unit_helper"])
|
||||
|
||||
def test_pauses_services(self):
|
||||
actions.actions.pause([])
|
||||
actions.pause([])
|
||||
self.pause_unit_helper.assert_called_once_with('test-config')
|
||||
|
||||
|
||||
@ -39,17 +39,17 @@ class ResumeTestCase(CharmTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(ResumeTestCase, self).setUp(
|
||||
actions.actions, ["resume_unit_helper"])
|
||||
actions, ["resume_unit_helper"])
|
||||
|
||||
def test_pauses_services(self):
|
||||
actions.actions.resume([])
|
||||
actions.resume([])
|
||||
self.resume_unit_helper.assert_called_once_with('test-config')
|
||||
|
||||
|
||||
class MainTestCase(CharmTestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(MainTestCase, self).setUp(actions.actions, ["action_fail"])
|
||||
super(MainTestCase, self).setUp(actions, ["action_fail"])
|
||||
|
||||
def test_invokes_action(self):
|
||||
dummy_calls = []
|
||||
@ -57,13 +57,13 @@ class MainTestCase(CharmTestCase):
|
||||
def dummy_action(args):
|
||||
dummy_calls.append(True)
|
||||
|
||||
with mock.patch.dict(actions.actions.ACTIONS, {"foo": dummy_action}):
|
||||
actions.actions.main(["foo"])
|
||||
with mock.patch.dict(actions.ACTIONS, {"foo": dummy_action}):
|
||||
actions.main(["foo"])
|
||||
self.assertEqual(dummy_calls, [True])
|
||||
|
||||
def test_unknown_action(self):
|
||||
"""Unknown actions aren't a traceback."""
|
||||
exit_string = actions.actions.main(["foo"])
|
||||
exit_string = actions.main(["foo"])
|
||||
self.assertEqual("Action foo undefined", exit_string)
|
||||
|
||||
def test_failing_action(self):
|
||||
@ -75,6 +75,6 @@ class MainTestCase(CharmTestCase):
|
||||
def dummy_action(args):
|
||||
raise ValueError("uh oh")
|
||||
|
||||
with mock.patch.dict(actions.actions.ACTIONS, {"foo": dummy_action}):
|
||||
actions.actions.main(["foo"])
|
||||
with mock.patch.dict(actions.ACTIONS, {"foo": dummy_action}):
|
||||
actions.main(["foo"])
|
||||
self.assertEqual(dummy_calls, ["uh oh"])
|
||||
|
@ -25,10 +25,17 @@ mock_apt = MagicMock()
|
||||
sys.modules['apt'] = mock_apt
|
||||
mock_apt.apt_pkg = MagicMock()
|
||||
|
||||
with patch('actions.hooks.glance_utils.register_configs'):
|
||||
with patch('hooks.glance_utils.register_configs'):
|
||||
with patch('actions.hooks.glance_utils.restart_map'):
|
||||
from actions import openstack_upgrade
|
||||
with patch('charmhelpers.contrib.hardening.harden.harden') as mock_dec, \
|
||||
patch('charmhelpers.contrib.openstack.utils.'
|
||||
'os_requires_version') as mock_os, \
|
||||
patch('glance_utils.register_configs'), \
|
||||
patch('glance_utils.restart_map'):
|
||||
mock_dec.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args, **kwargs))
|
||||
|
||||
mock_os.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args, **kwargs))
|
||||
import openstack_upgrade
|
||||
|
||||
from test_utils import CharmTestCase
|
||||
|
||||
@ -44,10 +51,10 @@ class TestGlanceUpgradeActions(CharmTestCase):
|
||||
super(TestGlanceUpgradeActions, self).setUp(openstack_upgrade,
|
||||
TO_PATCH)
|
||||
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.config')
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.action_set')
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.openstack_upgrade_available') # noqa
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.juju_log')
|
||||
@patch('charmhelpers.contrib.openstack.utils.config')
|
||||
@patch('charmhelpers.contrib.openstack.utils.action_set')
|
||||
@patch('charmhelpers.contrib.openstack.utils.openstack_upgrade_available') # noqa
|
||||
@patch('charmhelpers.contrib.openstack.utils.juju_log')
|
||||
@patch('subprocess.check_output')
|
||||
def test_openstack_upgrade_true(self, _check_output, log, upgrade_avail,
|
||||
action_set, config):
|
||||
@ -60,10 +67,10 @@ class TestGlanceUpgradeActions(CharmTestCase):
|
||||
self.assertTrue(self.do_openstack_upgrade.called)
|
||||
self.assertTrue(self.config_changed.called)
|
||||
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.config')
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.action_set')
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.openstack_upgrade_available') # noqa
|
||||
@patch('actions.charmhelpers.contrib.openstack.utils.juju_log')
|
||||
@patch('charmhelpers.contrib.openstack.utils.config')
|
||||
@patch('charmhelpers.contrib.openstack.utils.action_set')
|
||||
@patch('charmhelpers.contrib.openstack.utils.openstack_upgrade_available') # noqa
|
||||
@patch('charmhelpers.contrib.openstack.utils.juju_log')
|
||||
@patch('subprocess.check_output')
|
||||
def test_openstack_upgrade_false(self, _check_output, log, upgrade_avail,
|
||||
action_set, config):
|
||||
|
@ -14,7 +14,7 @@
|
||||
|
||||
from mock import patch, MagicMock
|
||||
|
||||
from hooks import glance_contexts as contexts
|
||||
import glance_contexts as contexts
|
||||
from test_utils import (
|
||||
CharmTestCase
|
||||
)
|
||||
|
@ -25,28 +25,27 @@ sys.modules['apt'] = mock_apt
|
||||
mock_apt.apt_pkg = MagicMock()
|
||||
|
||||
os.environ['JUJU_UNIT_NAME'] = 'glance'
|
||||
import hooks.glance_utils as utils # noqa
|
||||
|
||||
_reg = utils.register_configs
|
||||
_map = utils.restart_map
|
||||
|
||||
utils.register_configs = MagicMock()
|
||||
utils.restart_map = MagicMock()
|
||||
|
||||
with patch('hooks.charmhelpers.contrib.hardening.harden.harden') as mock_dec:
|
||||
with patch('hooks.charmhelpers.contrib.openstack.'
|
||||
'utils.os_requires_version') as mock_os:
|
||||
mock_dec.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args, **kwargs))
|
||||
mock_os.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args, **kwargs))
|
||||
import hooks.glance_relations as relations
|
||||
with patch('charmhelpers.contrib.openstack.utils.'
|
||||
'pausable_restart_on_change') as mock_on_change, \
|
||||
patch('charmhelpers.contrib.hardening.harden.harden') as mock_dec, \
|
||||
patch('charmhelpers.contrib.openstack.'
|
||||
'utils.os_requires_version') as mock_os, \
|
||||
patch('glance_utils.register_configs') as mock_register, \
|
||||
patch('glance_utils.restart_map') as mock_map, \
|
||||
patch('glance_utils.config'):
|
||||
mock_on_change.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args,
|
||||
**kwargs))
|
||||
mock_dec.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args, **kwargs))
|
||||
mock_os.side_effect = (lambda *dargs, **dkwargs: lambda f:
|
||||
lambda *args, **kwargs: f(*args, **kwargs))
|
||||
import glance_relations as relations
|
||||
|
||||
relations.hooks._config_save = False
|
||||
|
||||
utils.register_configs = _reg
|
||||
utils.restart_map = _map
|
||||
|
||||
TO_PATCH = [
|
||||
# charmhelpers.core.hookenv
|
||||
'Hooks',
|
||||
@ -75,7 +74,7 @@ TO_PATCH = [
|
||||
'is_clustered',
|
||||
# charmhelpers.contrib.hahelpers.cluster_utils
|
||||
'is_elected_leader',
|
||||
# hooks.glance_utils
|
||||
# glance_utils
|
||||
'restart_map',
|
||||
'register_configs',
|
||||
'do_openstack_upgrade',
|
||||
@ -84,6 +83,7 @@ TO_PATCH = [
|
||||
'ceph_config_file',
|
||||
'update_nrpe_config',
|
||||
'reinstall_paste_ini',
|
||||
'determine_packages',
|
||||
# other
|
||||
'call',
|
||||
'check_call',
|
||||
@ -104,28 +104,23 @@ class GlanceRelationTests(CharmTestCase):
|
||||
def setUp(self):
|
||||
super(GlanceRelationTests, self).setUp(relations, TO_PATCH)
|
||||
self.config.side_effect = self.test_config.get
|
||||
self.restart_on_change.return_value = None
|
||||
|
||||
@patch.object(utils, 'config')
|
||||
@patch.object(utils, 'token_cache_pkgs')
|
||||
def test_install_hook(self, token_cache_pkgs, util_config):
|
||||
token_cache_pkgs.return_value = ['memcached']
|
||||
def test_install_hook(self):
|
||||
repo = 'cloud:precise-grizzly'
|
||||
_packages = ['apache2', 'glance', 'haproxy', 'memcached',
|
||||
'python-keystone', 'python-mysqldb', 'python-psycopg2',
|
||||
'python-six', 'python-swiftclient', 'uuid']
|
||||
self.test_config.set('openstack-origin', repo)
|
||||
self.service_stop.return_value = True
|
||||
self.determine_packages.return_value = _packages
|
||||
relations.install_hook()
|
||||
self.configure_installation_source.assert_called_with(repo)
|
||||
self.apt_update.assert_called_with(fatal=True)
|
||||
self.apt_install.assert_called_with(
|
||||
['apache2', 'glance', 'haproxy', 'memcached', 'python-keystone',
|
||||
'python-mysqldb', 'python-psycopg2', 'python-six',
|
||||
'python-swiftclient', 'uuid'], fatal=True)
|
||||
self.apt_install.assert_called_with(_packages, fatal=True)
|
||||
self.assertTrue(self.execd_preinstall.called)
|
||||
|
||||
@patch.object(utils, 'config')
|
||||
@patch.object(utils, 'token_cache_pkgs')
|
||||
def test_install_hook_precise_distro(self, token_cache_pkgs,
|
||||
util_config):
|
||||
token_cache_pkgs.return_value = []
|
||||
def test_install_hook_precise_distro(self):
|
||||
self.test_config.set('openstack-origin', 'distro')
|
||||
self.lsb_release.return_value = {'DISTRIB_RELEASE': 12.04,
|
||||
'DISTRIB_CODENAME': 'precise'}
|
||||
@ -338,9 +333,9 @@ class GlanceRelationTests(CharmTestCase):
|
||||
for c in [call('/etc/glance/glance.conf')]:
|
||||
self.assertNotIn(c, configs.write.call_args_list)
|
||||
|
||||
@patch('hooks.charmhelpers.contrib.storage.linux.ceph.CephBrokerRq'
|
||||
@patch('charmhelpers.contrib.storage.linux.ceph.CephBrokerRq'
|
||||
'.add_op_request_access_to_group')
|
||||
@patch('hooks.charmhelpers.contrib.storage.linux.ceph.CephBrokerRq'
|
||||
@patch('charmhelpers.contrib.storage.linux.ceph.CephBrokerRq'
|
||||
'.add_op_create_pool')
|
||||
def test_create_pool_op(self, mock_create_pool,
|
||||
mock_request_access):
|
||||
@ -564,11 +559,8 @@ class GlanceRelationTests(CharmTestCase):
|
||||
configs.write.call_args_list)
|
||||
|
||||
@patch.object(relations, 'update_image_location_policy')
|
||||
@patch.object(utils, 'config')
|
||||
@patch.object(utils, 'token_cache_pkgs')
|
||||
@patch.object(relations, 'CONFIGS')
|
||||
def test_upgrade_charm(self, configs, token_cache_pkgs,
|
||||
util_config, mock_update_image_location_policy):
|
||||
def test_upgrade_charm(self, configs, mock_update_image_location_policy):
|
||||
self.filter_installed_packages.return_value = ['test']
|
||||
relations.upgrade_charm()
|
||||
self.apt_install.assert_called_with(['test'], fatal=True)
|
||||
|
@ -18,7 +18,7 @@ from collections import OrderedDict
|
||||
from mock import patch, call, MagicMock, mock_open
|
||||
|
||||
os.environ['JUJU_UNIT_NAME'] = 'glance'
|
||||
import hooks.glance_utils as utils
|
||||
import glance_utils as utils
|
||||
|
||||
from test_utils import (
|
||||
CharmTestCase,
|
||||
|
@ -20,9 +20,9 @@ import yaml
|
||||
from contextlib import contextmanager
|
||||
from mock import patch, MagicMock
|
||||
|
||||
patch('hooks.charmhelpers.contrib.openstack.utils.'
|
||||
patch('charmhelpers.contrib.openstack.utils.'
|
||||
'set_os_workload_status').start()
|
||||
patch('hooks.charmhelpers.core.hookenv.status_set').start()
|
||||
patch('charmhelpers.core.hookenv.status_set').start()
|
||||
|
||||
|
||||
def load_config():
|
||||
|
Loading…
Reference in New Issue
Block a user