Blackify openstack.orchestration
Black used with the '-l 79 -S' flags. A future change will ignore this commit in git-blame history by adding a 'git-blame-ignore-revs' file. Change-Id: I77ee83b03379efbec18ba61166fd74ce5ee0e84b Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
parent
409f648ce5
commit
874ea74103
@ -16,13 +16,19 @@ from openstack.orchestration.util import template_format
|
||||
|
||||
|
||||
SECTIONS = (
|
||||
PARAMETER_DEFAULTS, PARAMETERS, RESOURCE_REGISTRY,
|
||||
ENCRYPTED_PARAM_NAMES, EVENT_SINKS,
|
||||
PARAMETER_MERGE_STRATEGIES
|
||||
PARAMETER_DEFAULTS,
|
||||
PARAMETERS,
|
||||
RESOURCE_REGISTRY,
|
||||
ENCRYPTED_PARAM_NAMES,
|
||||
EVENT_SINKS,
|
||||
PARAMETER_MERGE_STRATEGIES,
|
||||
) = (
|
||||
'parameter_defaults', 'parameters', 'resource_registry',
|
||||
'encrypted_param_names', 'event_sinks',
|
||||
'parameter_merge_strategies'
|
||||
'parameter_defaults',
|
||||
'parameters',
|
||||
'resource_registry',
|
||||
'encrypted_param_names',
|
||||
'event_sinks',
|
||||
'parameter_merge_strategies',
|
||||
)
|
||||
|
||||
|
||||
@ -47,7 +53,8 @@ def parse(env_str):
|
||||
env = {}
|
||||
elif not isinstance(env, dict):
|
||||
raise ValueError(
|
||||
'The environment is not a valid YAML mapping data type.')
|
||||
'The environment is not a valid YAML mapping data type.'
|
||||
)
|
||||
|
||||
for param in env:
|
||||
if param not in SECTIONS:
|
||||
|
@ -30,8 +30,8 @@ def get_events(cloud, stack_id, event_args, marker=None, limit=None):
|
||||
event_args['limit'] = limit
|
||||
|
||||
data = cloud._orchestration_client.get(
|
||||
'/stacks/{id}/events'.format(id=stack_id),
|
||||
params=params)
|
||||
'/stacks/{id}/events'.format(id=stack_id), params=params
|
||||
)
|
||||
events = meta.get_and_munchify('events', data)
|
||||
|
||||
# Show which stack the event comes from (for nested events)
|
||||
@ -41,7 +41,8 @@ def get_events(cloud, stack_id, event_args, marker=None, limit=None):
|
||||
|
||||
|
||||
def poll_for_events(
|
||||
cloud, stack_name, action=None, poll_period=5, marker=None):
|
||||
cloud, stack_name, action=None, poll_period=5, marker=None
|
||||
):
|
||||
"""Continuously poll events and logs for performed action on stack."""
|
||||
|
||||
def stop_check_action(a):
|
||||
@ -60,20 +61,26 @@ def poll_for_events(
|
||||
msg_template = "\n Stack %(name)s %(status)s \n"
|
||||
|
||||
def is_stack_event(event):
|
||||
if (event.get('resource_name', '') != stack_name
|
||||
and event.get('physical_resource_id', '') != stack_name):
|
||||
if (
|
||||
event.get('resource_name', '') != stack_name
|
||||
and event.get('physical_resource_id', '') != stack_name
|
||||
):
|
||||
return False
|
||||
|
||||
phys_id = event.get('physical_resource_id', '')
|
||||
links = dict((link.get('rel'),
|
||||
link.get('href')) for link in event.get('links', []))
|
||||
links = dict(
|
||||
(link.get('rel'), link.get('href'))
|
||||
for link in event.get('links', [])
|
||||
)
|
||||
stack_id = links.get('stack', phys_id).rsplit('/', 1)[-1]
|
||||
return stack_id == phys_id
|
||||
|
||||
while True:
|
||||
events = get_events(
|
||||
cloud, stack_id=stack_name,
|
||||
event_args={'sort_dir': 'asc', 'marker': marker})
|
||||
cloud,
|
||||
stack_id=stack_name,
|
||||
event_args={'sort_dir': 'asc', 'marker': marker},
|
||||
)
|
||||
|
||||
if len(events) == 0:
|
||||
no_event_polls += 1
|
||||
@ -87,7 +94,8 @@ def poll_for_events(
|
||||
if is_stack_event(event):
|
||||
stack_status = getattr(event, 'resource_status', '')
|
||||
msg = msg_template % dict(
|
||||
name=stack_name, status=stack_status)
|
||||
name=stack_name, status=stack_status
|
||||
)
|
||||
if stop_check(stack_status):
|
||||
return stack_status, msg
|
||||
|
||||
@ -95,8 +103,7 @@ def poll_for_events(
|
||||
# after 2 polls with no events, fall back to a stack get
|
||||
stack = cloud.get_stack(stack_name, resolve_outputs=False)
|
||||
stack_status = stack['stack_status']
|
||||
msg = msg_template % dict(
|
||||
name=stack_name, status=stack_status)
|
||||
msg = msg_template % dict(name=stack_name, status=stack_status)
|
||||
if stop_check(stack_status):
|
||||
return stack_status, msg
|
||||
# go back to event polling again
|
||||
|
@ -36,7 +36,8 @@ HeatYamlLoader.add_constructor(u'tag:yaml.org,2002:str', _construct_yaml_str)
|
||||
# openstack.common.jsonutils. Therefore, make unicode string out of timestamps
|
||||
# until jsonutils can handle dates.
|
||||
HeatYamlLoader.add_constructor(
|
||||
u'tag:yaml.org,2002:timestamp', _construct_yaml_str)
|
||||
u'tag:yaml.org,2002:timestamp', _construct_yaml_str
|
||||
)
|
||||
|
||||
|
||||
def parse(tmpl_str):
|
||||
@ -64,8 +65,10 @@ def parse(tmpl_str):
|
||||
if tpl is None:
|
||||
tpl = {}
|
||||
# Looking for supported version keys in the loaded template
|
||||
if not ('HeatTemplateFormatVersion' in tpl
|
||||
or 'heat_template_version' in tpl
|
||||
or 'AWSTemplateFormatVersion' in tpl):
|
||||
if not (
|
||||
'HeatTemplateFormatVersion' in tpl
|
||||
or 'heat_template_version' in tpl
|
||||
or 'AWSTemplateFormatVersion' in tpl
|
||||
):
|
||||
raise ValueError("Template format version not found.")
|
||||
return tpl
|
||||
|
@ -23,9 +23,14 @@ from openstack.orchestration.util import template_format
|
||||
from openstack.orchestration.util import utils
|
||||
|
||||
|
||||
def get_template_contents(template_file=None, template_url=None,
|
||||
template_object=None, object_request=None,
|
||||
files=None, existing=False):
|
||||
def get_template_contents(
|
||||
template_file=None,
|
||||
template_url=None,
|
||||
template_object=None,
|
||||
object_request=None,
|
||||
files=None,
|
||||
existing=False,
|
||||
):
|
||||
|
||||
is_object = False
|
||||
tpl = None
|
||||
@ -46,11 +51,13 @@ def get_template_contents(template_file=None, template_url=None,
|
||||
else:
|
||||
raise exceptions.SDKException(
|
||||
'Must provide one of template_file,'
|
||||
' template_url or template_object')
|
||||
' template_url or template_object'
|
||||
)
|
||||
|
||||
if not tpl:
|
||||
raise exceptions.SDKException(
|
||||
'Could not fetch template from %s' % template_url)
|
||||
'Could not fetch template from %s' % template_url
|
||||
)
|
||||
|
||||
try:
|
||||
if isinstance(tpl, bytes):
|
||||
@ -58,35 +65,43 @@ def get_template_contents(template_file=None, template_url=None,
|
||||
template = template_format.parse(tpl)
|
||||
except ValueError as e:
|
||||
raise exceptions.SDKException(
|
||||
'Error parsing template %(url)s %(error)s' %
|
||||
{'url': template_url, 'error': e})
|
||||
'Error parsing template %(url)s %(error)s'
|
||||
% {'url': template_url, 'error': e}
|
||||
)
|
||||
|
||||
tmpl_base_url = utils.base_url_for_url(template_url)
|
||||
if files is None:
|
||||
files = {}
|
||||
resolve_template_get_files(template, files, tmpl_base_url, is_object,
|
||||
object_request)
|
||||
resolve_template_get_files(
|
||||
template, files, tmpl_base_url, is_object, object_request
|
||||
)
|
||||
return files, template
|
||||
|
||||
|
||||
def resolve_template_get_files(template, files, template_base_url,
|
||||
is_object=False, object_request=None):
|
||||
|
||||
def resolve_template_get_files(
|
||||
template, files, template_base_url, is_object=False, object_request=None
|
||||
):
|
||||
def ignore_if(key, value):
|
||||
if key != 'get_file' and key != 'type':
|
||||
return True
|
||||
if not isinstance(value, str):
|
||||
return True
|
||||
if (key == 'type'
|
||||
and not value.endswith(('.yaml', '.template'))):
|
||||
if key == 'type' and not value.endswith(('.yaml', '.template')):
|
||||
return True
|
||||
return False
|
||||
|
||||
def recurse_if(value):
|
||||
return isinstance(value, (dict, list))
|
||||
|
||||
get_file_contents(template, files, template_base_url,
|
||||
ignore_if, recurse_if, is_object, object_request)
|
||||
get_file_contents(
|
||||
template,
|
||||
files,
|
||||
template_base_url,
|
||||
ignore_if,
|
||||
recurse_if,
|
||||
is_object,
|
||||
object_request,
|
||||
)
|
||||
|
||||
|
||||
def is_template(file_content):
|
||||
@ -99,9 +114,15 @@ def is_template(file_content):
|
||||
return True
|
||||
|
||||
|
||||
def get_file_contents(from_data, files, base_url=None,
|
||||
ignore_if=None, recurse_if=None,
|
||||
is_object=False, object_request=None):
|
||||
def get_file_contents(
|
||||
from_data,
|
||||
files,
|
||||
base_url=None,
|
||||
ignore_if=None,
|
||||
recurse_if=None,
|
||||
is_object=False,
|
||||
object_request=None,
|
||||
):
|
||||
|
||||
if recurse_if and recurse_if(from_data):
|
||||
if isinstance(from_data, dict):
|
||||
@ -109,8 +130,15 @@ def get_file_contents(from_data, files, base_url=None,
|
||||
else:
|
||||
recurse_data = from_data
|
||||
for value in recurse_data:
|
||||
get_file_contents(value, files, base_url, ignore_if, recurse_if,
|
||||
is_object, object_request)
|
||||
get_file_contents(
|
||||
value,
|
||||
files,
|
||||
base_url,
|
||||
ignore_if,
|
||||
recurse_if,
|
||||
is_object,
|
||||
object_request,
|
||||
)
|
||||
|
||||
if isinstance(from_data, dict):
|
||||
for key, value in from_data.items():
|
||||
@ -129,11 +157,14 @@ def get_file_contents(from_data, files, base_url=None,
|
||||
if is_template(file_content):
|
||||
if is_object:
|
||||
template = get_template_contents(
|
||||
template_object=str_url, files=files,
|
||||
object_request=object_request)[1]
|
||||
template_object=str_url,
|
||||
files=files,
|
||||
object_request=object_request,
|
||||
)[1]
|
||||
else:
|
||||
template = get_template_contents(
|
||||
template_url=str_url, files=files)[1]
|
||||
template_url=str_url, files=files
|
||||
)[1]
|
||||
file_content = json.dumps(template)
|
||||
files[str_url] = file_content
|
||||
# replace the data value with the normalised absolute URL
|
||||
@ -157,11 +188,14 @@ def deep_update(old, new):
|
||||
return old
|
||||
|
||||
|
||||
def process_multiple_environments_and_files(env_paths=None, template=None,
|
||||
template_url=None,
|
||||
env_path_is_object=None,
|
||||
object_request=None,
|
||||
env_list_tracker=None):
|
||||
def process_multiple_environments_and_files(
|
||||
env_paths=None,
|
||||
template=None,
|
||||
template_url=None,
|
||||
env_path_is_object=None,
|
||||
object_request=None,
|
||||
env_list_tracker=None,
|
||||
):
|
||||
"""Reads one or more environment files.
|
||||
|
||||
Reads in each specified environment file and returns a dictionary
|
||||
@ -204,7 +238,8 @@ def process_multiple_environments_and_files(env_paths=None, template=None,
|
||||
template_url=template_url,
|
||||
env_path_is_object=env_path_is_object,
|
||||
object_request=object_request,
|
||||
include_env_in_files=include_env_in_files)
|
||||
include_env_in_files=include_env_in_files,
|
||||
)
|
||||
|
||||
# 'files' looks like {"filename1": contents, "filename2": contents}
|
||||
# so a simple update is enough for merging
|
||||
@ -221,12 +256,14 @@ def process_multiple_environments_and_files(env_paths=None, template=None,
|
||||
return merged_files, merged_env
|
||||
|
||||
|
||||
def process_environment_and_files(env_path=None,
|
||||
template=None,
|
||||
template_url=None,
|
||||
env_path_is_object=None,
|
||||
object_request=None,
|
||||
include_env_in_files=False):
|
||||
def process_environment_and_files(
|
||||
env_path=None,
|
||||
template=None,
|
||||
template_url=None,
|
||||
env_path_is_object=None,
|
||||
object_request=None,
|
||||
include_env_in_files=False,
|
||||
):
|
||||
"""Loads a single environment file.
|
||||
|
||||
Returns an entry suitable for the files dict which maps the environment
|
||||
@ -253,7 +290,10 @@ def process_environment_and_files(env_path=None,
|
||||
resolve_environment_urls(
|
||||
env.get('resource_registry'),
|
||||
files,
|
||||
env_base_url, is_object=True, object_request=object_request)
|
||||
env_base_url,
|
||||
is_object=True,
|
||||
object_request=object_request,
|
||||
)
|
||||
|
||||
elif env_path:
|
||||
env_url = utils.normalise_file_path_to_url(env_path)
|
||||
@ -263,9 +303,8 @@ def process_environment_and_files(env_path=None,
|
||||
env = environment_format.parse(raw_env)
|
||||
|
||||
resolve_environment_urls(
|
||||
env.get('resource_registry'),
|
||||
files,
|
||||
env_base_url)
|
||||
env.get('resource_registry'), files, env_base_url
|
||||
)
|
||||
|
||||
if include_env_in_files:
|
||||
files[env_url] = json.dumps(env)
|
||||
@ -273,8 +312,13 @@ def process_environment_and_files(env_path=None,
|
||||
return files, env
|
||||
|
||||
|
||||
def resolve_environment_urls(resource_registry, files, env_base_url,
|
||||
is_object=False, object_request=None):
|
||||
def resolve_environment_urls(
|
||||
resource_registry,
|
||||
files,
|
||||
env_base_url,
|
||||
is_object=False,
|
||||
object_request=None,
|
||||
):
|
||||
"""Handles any resource URLs specified in an environment.
|
||||
|
||||
:param resource_registry: mapping of type name to template filename
|
||||
@ -302,11 +346,22 @@ def resolve_environment_urls(resource_registry, files, env_base_url,
|
||||
if key in ['hooks', 'restricted_actions']:
|
||||
return True
|
||||
|
||||
get_file_contents(rr, files, base_url, ignore_if,
|
||||
is_object=is_object, object_request=object_request)
|
||||
get_file_contents(
|
||||
rr,
|
||||
files,
|
||||
base_url,
|
||||
ignore_if,
|
||||
is_object=is_object,
|
||||
object_request=object_request,
|
||||
)
|
||||
|
||||
for res_name, res_dict in rr.get('resources', {}).items():
|
||||
res_base_url = res_dict.get('base_url', base_url)
|
||||
get_file_contents(
|
||||
res_dict, files, res_base_url, ignore_if,
|
||||
is_object=is_object, object_request=object_request)
|
||||
res_dict,
|
||||
files,
|
||||
res_base_url,
|
||||
ignore_if,
|
||||
is_object=is_object,
|
||||
object_request=object_request,
|
||||
)
|
||||
|
@ -40,8 +40,7 @@ def read_url_content(url):
|
||||
# TODO(mordred) Use requests
|
||||
content = request.urlopen(url).read()
|
||||
except error.URLError:
|
||||
raise exceptions.SDKException(
|
||||
'Could not fetch contents for %s' % url)
|
||||
raise exceptions.SDKException('Could not fetch contents for %s' % url)
|
||||
|
||||
if content:
|
||||
try:
|
||||
@ -52,8 +51,9 @@ def read_url_content(url):
|
||||
|
||||
|
||||
def resource_nested_identifier(rsrc):
|
||||
nested_link = [link for link in rsrc.links or []
|
||||
if link.get('rel') == 'nested']
|
||||
nested_link = [
|
||||
link for link in rsrc.links or [] if link.get('rel') == 'nested'
|
||||
]
|
||||
if nested_link:
|
||||
nested_href = nested_link[0].get('href')
|
||||
nested_identifier = nested_href.split("/")[-2:]
|
||||
|
@ -36,23 +36,34 @@ class Proxy(proxy.Proxy):
|
||||
}
|
||||
|
||||
def _extract_name_consume_url_parts(self, url_parts):
|
||||
if (len(url_parts) == 3 and url_parts[0] == 'software_deployments'
|
||||
and url_parts[1] == 'metadata'):
|
||||
if (
|
||||
len(url_parts) == 3
|
||||
and url_parts[0] == 'software_deployments'
|
||||
and url_parts[1] == 'metadata'
|
||||
):
|
||||
# Another nice example of totally different URL naming scheme,
|
||||
# which we need to repair /software_deployment/metadata/server_id -
|
||||
# just replace server_id with metadata to keep further logic
|
||||
return ['software_deployment', 'metadata']
|
||||
if (url_parts[0] == 'stacks' and len(url_parts) > 2
|
||||
and not url_parts[2] in ['preview', 'resources']):
|
||||
if (
|
||||
url_parts[0] == 'stacks'
|
||||
and len(url_parts) > 2
|
||||
and not url_parts[2] in ['preview', 'resources']
|
||||
):
|
||||
# orchestrate introduce having stack name and id part of the URL
|
||||
# (/stacks/name/id/everything_else), so if on third position we
|
||||
# have not a known part - discard it, not to brake further logic
|
||||
del url_parts[2]
|
||||
return super(Proxy, self)._extract_name_consume_url_parts(url_parts)
|
||||
|
||||
def read_env_and_templates(self, template_file=None, template_url=None,
|
||||
template_object=None, files=None,
|
||||
environment_files=None):
|
||||
def read_env_and_templates(
|
||||
self,
|
||||
template_file=None,
|
||||
template_url=None,
|
||||
template_object=None,
|
||||
files=None,
|
||||
environment_files=None,
|
||||
):
|
||||
"""Read templates and environment content and prepares
|
||||
corresponding stack attributes
|
||||
|
||||
@ -70,16 +81,20 @@ class Proxy(proxy.Proxy):
|
||||
envfiles = dict()
|
||||
tpl_files = None
|
||||
if environment_files:
|
||||
envfiles, env = \
|
||||
template_utils.process_multiple_environments_and_files(
|
||||
env_paths=environment_files)
|
||||
(
|
||||
envfiles,
|
||||
env,
|
||||
) = template_utils.process_multiple_environments_and_files(
|
||||
env_paths=environment_files
|
||||
)
|
||||
stack_attrs['environment'] = env
|
||||
if template_file or template_url or template_object:
|
||||
tpl_files, template = template_utils.get_template_contents(
|
||||
template_file=template_file,
|
||||
template_url=template_url,
|
||||
template_object=template_object,
|
||||
files=files)
|
||||
files=files,
|
||||
)
|
||||
stack_attrs['template'] = template
|
||||
if tpl_files or envfiles:
|
||||
stack_attrs['files'] = dict(
|
||||
@ -104,8 +119,9 @@ class Proxy(proxy.Proxy):
|
||||
base_path = None if not preview else '/stacks/preview'
|
||||
return self._create(_stack.Stack, base_path=base_path, **attrs)
|
||||
|
||||
def find_stack(self, name_or_id,
|
||||
ignore_missing=True, resolve_outputs=True):
|
||||
def find_stack(
|
||||
self, name_or_id, ignore_missing=True, resolve_outputs=True
|
||||
):
|
||||
"""Find a single stack
|
||||
|
||||
:param name_or_id: The name or ID of a stack.
|
||||
@ -116,9 +132,12 @@ class Proxy(proxy.Proxy):
|
||||
attempting to find a nonexistent resource.
|
||||
:returns: One :class:`~openstack.orchestration.v1.stack.Stack` or None
|
||||
"""
|
||||
return self._find(_stack.Stack, name_or_id,
|
||||
ignore_missing=ignore_missing,
|
||||
resolve_outputs=resolve_outputs)
|
||||
return self._find(
|
||||
_stack.Stack,
|
||||
name_or_id,
|
||||
ignore_missing=ignore_missing,
|
||||
resolve_outputs=resolve_outputs,
|
||||
)
|
||||
|
||||
def stacks(self, **query):
|
||||
"""Return a generator of stacks
|
||||
@ -219,8 +238,12 @@ class Proxy(proxy.Proxy):
|
||||
else:
|
||||
obj = self._find(_stack.Stack, stack, ignore_missing=False)
|
||||
|
||||
return self._get(_stack_template.StackTemplate, requires_id=False,
|
||||
stack_name=obj.name, stack_id=obj.id)
|
||||
return self._get(
|
||||
_stack_template.StackTemplate,
|
||||
requires_id=False,
|
||||
stack_name=obj.name,
|
||||
stack_id=obj.id,
|
||||
)
|
||||
|
||||
def get_stack_environment(self, stack):
|
||||
"""Get environment used by a stack
|
||||
@ -238,9 +261,12 @@ class Proxy(proxy.Proxy):
|
||||
else:
|
||||
obj = self._find(_stack.Stack, stack, ignore_missing=False)
|
||||
|
||||
return self._get(_stack_environment.StackEnvironment,
|
||||
requires_id=False, stack_name=obj.name,
|
||||
stack_id=obj.id)
|
||||
return self._get(
|
||||
_stack_environment.StackEnvironment,
|
||||
requires_id=False,
|
||||
stack_name=obj.name,
|
||||
stack_id=obj.id,
|
||||
)
|
||||
|
||||
def get_stack_files(self, stack):
|
||||
"""Get files used by a stack
|
||||
@ -283,8 +309,9 @@ class Proxy(proxy.Proxy):
|
||||
else:
|
||||
obj = self._find(_stack.Stack, stack, ignore_missing=False)
|
||||
|
||||
return self._list(_resource.Resource, stack_name=obj.name,
|
||||
stack_id=obj.id, **query)
|
||||
return self._list(
|
||||
_resource.Resource, stack_name=obj.name, stack_id=obj.id, **query
|
||||
)
|
||||
|
||||
def create_software_config(self, **attrs):
|
||||
"""Create a new software config from attributes
|
||||
@ -335,8 +362,9 @@ class Proxy(proxy.Proxy):
|
||||
attempting to delete a nonexistent software config.
|
||||
:returns: ``None``
|
||||
"""
|
||||
self._delete(_sc.SoftwareConfig, software_config,
|
||||
ignore_missing=ignore_missing)
|
||||
self._delete(
|
||||
_sc.SoftwareConfig, software_config, ignore_missing=ignore_missing
|
||||
)
|
||||
|
||||
def create_software_deployment(self, **attrs):
|
||||
"""Create a new software deployment from attributes
|
||||
@ -374,8 +402,9 @@ class Proxy(proxy.Proxy):
|
||||
"""
|
||||
return self._get(_sd.SoftwareDeployment, software_deployment)
|
||||
|
||||
def delete_software_deployment(self, software_deployment,
|
||||
ignore_missing=True):
|
||||
def delete_software_deployment(
|
||||
self, software_deployment, ignore_missing=True
|
||||
):
|
||||
"""Delete a software deployment
|
||||
|
||||
:param software_deployment: The value can be either the ID of a
|
||||
@ -388,8 +417,11 @@ class Proxy(proxy.Proxy):
|
||||
attempting to delete a nonexistent software deployment.
|
||||
:returns: ``None``
|
||||
"""
|
||||
self._delete(_sd.SoftwareDeployment, software_deployment,
|
||||
ignore_missing=ignore_missing)
|
||||
self._delete(
|
||||
_sd.SoftwareDeployment,
|
||||
software_deployment,
|
||||
ignore_missing=ignore_missing,
|
||||
)
|
||||
|
||||
def update_software_deployment(self, software_deployment, **attrs):
|
||||
"""Update a software deployment
|
||||
@ -403,11 +435,13 @@ class Proxy(proxy.Proxy):
|
||||
:rtype:
|
||||
:class:`~openstack.orchestration.v1.software_deployment.SoftwareDeployment`
|
||||
"""
|
||||
return self._update(_sd.SoftwareDeployment, software_deployment,
|
||||
**attrs)
|
||||
return self._update(
|
||||
_sd.SoftwareDeployment, software_deployment, **attrs
|
||||
)
|
||||
|
||||
def validate_template(self, template, environment=None, template_url=None,
|
||||
ignore_errors=None):
|
||||
def validate_template(
|
||||
self, template, environment=None, template_url=None, ignore_errors=None
|
||||
):
|
||||
"""Validates a template.
|
||||
|
||||
:param template: The stack template on which the validation is
|
||||
@ -429,15 +463,21 @@ class Proxy(proxy.Proxy):
|
||||
"""
|
||||
if template is None and template_url is None:
|
||||
raise exceptions.InvalidRequest(
|
||||
"'template_url' must be specified when template is None")
|
||||
"'template_url' must be specified when template is None"
|
||||
)
|
||||
|
||||
tmpl = _template.Template.new()
|
||||
return tmpl.validate(self, template, environment=environment,
|
||||
template_url=template_url,
|
||||
ignore_errors=ignore_errors)
|
||||
return tmpl.validate(
|
||||
self,
|
||||
template,
|
||||
environment=environment,
|
||||
template_url=template_url,
|
||||
ignore_errors=ignore_errors,
|
||||
)
|
||||
|
||||
def wait_for_status(self, res, status='ACTIVE', failures=None,
|
||||
interval=2, wait=120):
|
||||
def wait_for_status(
|
||||
self, res, status='ACTIVE', failures=None, interval=2, wait=120
|
||||
):
|
||||
"""Wait for a resource to be in a particular status.
|
||||
|
||||
:param res: The resource to wait on to reach the specified status.
|
||||
@ -460,7 +500,8 @@ class Proxy(proxy.Proxy):
|
||||
"""
|
||||
failures = [] if failures is None else failures
|
||||
return resource.wait_for_status(
|
||||
self, res, status, failures, interval, wait)
|
||||
self, res, status, failures, interval, wait
|
||||
)
|
||||
|
||||
def wait_for_delete(self, res, interval=2, wait=120):
|
||||
"""Wait for a resource to be deleted.
|
||||
@ -478,26 +519,37 @@ class Proxy(proxy.Proxy):
|
||||
return resource.wait_for_delete(self, res, interval, wait)
|
||||
|
||||
def get_template_contents(
|
||||
self, template_file=None, template_url=None,
|
||||
template_object=None, files=None):
|
||||
self,
|
||||
template_file=None,
|
||||
template_url=None,
|
||||
template_object=None,
|
||||
files=None,
|
||||
):
|
||||
try:
|
||||
return template_utils.get_template_contents(
|
||||
template_file=template_file, template_url=template_url,
|
||||
template_object=template_object, files=files)
|
||||
template_file=template_file,
|
||||
template_url=template_url,
|
||||
template_object=template_object,
|
||||
files=files,
|
||||
)
|
||||
except Exception as e:
|
||||
raise exceptions.SDKException(
|
||||
"Error in processing template files: %s" % str(e))
|
||||
"Error in processing template files: %s" % str(e)
|
||||
)
|
||||
|
||||
def _get_cleanup_dependencies(self):
|
||||
return {
|
||||
'orchestration': {
|
||||
'before': ['compute', 'network', 'identity']
|
||||
}
|
||||
'orchestration': {'before': ['compute', 'network', 'identity']}
|
||||
}
|
||||
|
||||
def _service_cleanup(self, dry_run=True, client_status_queue=None,
|
||||
identified_resources=None,
|
||||
filters=None, resource_evaluation_fn=None):
|
||||
def _service_cleanup(
|
||||
self,
|
||||
dry_run=True,
|
||||
client_status_queue=None,
|
||||
identified_resources=None,
|
||||
filters=None,
|
||||
resource_evaluation_fn=None,
|
||||
):
|
||||
stacks = []
|
||||
for obj in self.stacks():
|
||||
need_delete = self._service_cleanup_del_res(
|
||||
@ -507,7 +559,8 @@ class Proxy(proxy.Proxy):
|
||||
client_status_queue=client_status_queue,
|
||||
identified_resources=identified_resources,
|
||||
filters=filters,
|
||||
resource_evaluation_fn=resource_evaluation_fn)
|
||||
resource_evaluation_fn=resource_evaluation_fn,
|
||||
)
|
||||
if not dry_run and need_delete:
|
||||
stacks.append(obj)
|
||||
|
||||
|
@ -31,8 +31,9 @@ class Resource(resource.Resource):
|
||||
links = resource.Body('links')
|
||||
#: ID of the logical resource, usually the literal name of the resource
|
||||
#: as it appears in the stack template.
|
||||
logical_resource_id = resource.Body('logical_resource_id',
|
||||
alternate_id=True)
|
||||
logical_resource_id = resource.Body(
|
||||
'logical_resource_id', alternate_id=True
|
||||
)
|
||||
#: Name of the resource.
|
||||
name = resource.Body('resource_name')
|
||||
#: ID of the physical resource (if any) that backs up the resource. For
|
||||
|
@ -48,5 +48,6 @@ class SoftwareConfig(resource.Resource):
|
||||
def create(self, session, base_path=None):
|
||||
# This overrides the default behavior of resource creation because
|
||||
# heat doesn't accept resource_key in its request.
|
||||
return super(SoftwareConfig, self).create(session, prepend_key=False,
|
||||
base_path=base_path)
|
||||
return super(SoftwareConfig, self).create(
|
||||
session, prepend_key=False, base_path=base_path
|
||||
)
|
||||
|
@ -53,10 +53,12 @@ class SoftwareDeployment(resource.Resource):
|
||||
# This overrides the default behavior of resource creation because
|
||||
# heat doesn't accept resource_key in its request.
|
||||
return super(SoftwareDeployment, self).create(
|
||||
session, prepend_key=False, base_path=base_path)
|
||||
session, prepend_key=False, base_path=base_path
|
||||
)
|
||||
|
||||
def commit(self, session, base_path=None):
|
||||
# This overrides the default behavior of resource creation because
|
||||
# heat doesn't accept resource_key in its request.
|
||||
return super(SoftwareDeployment, self).commit(
|
||||
session, prepend_key=False, base_path=base_path)
|
||||
session, prepend_key=False, base_path=base_path
|
||||
)
|
||||
|
@ -29,8 +29,12 @@ class Stack(resource.Resource):
|
||||
allow_delete = True
|
||||
|
||||
_query_mapping = resource.QueryParameters(
|
||||
'action', 'name', 'status',
|
||||
'project_id', 'owner_id', 'username',
|
||||
'action',
|
||||
'name',
|
||||
'status',
|
||||
'project_id',
|
||||
'owner_id',
|
||||
'username',
|
||||
project_id='tenant_id',
|
||||
**tag.TagMixin._tag_query_parameters
|
||||
)
|
||||
@ -111,14 +115,16 @@ class Stack(resource.Resource):
|
||||
def create(self, session, base_path=None):
|
||||
# This overrides the default behavior of resource creation because
|
||||
# heat doesn't accept resource_key in its request.
|
||||
return super(Stack, self).create(session, prepend_key=False,
|
||||
base_path=base_path)
|
||||
return super(Stack, self).create(
|
||||
session, prepend_key=False, base_path=base_path
|
||||
)
|
||||
|
||||
def commit(self, session, base_path=None):
|
||||
# This overrides the default behavior of resource creation because
|
||||
# heat doesn't accept resource_key in its request.
|
||||
return super(Stack, self).commit(session, prepend_key=False,
|
||||
has_body=False, base_path=None)
|
||||
return super(Stack, self).commit(
|
||||
session, prepend_key=False, has_body=False, base_path=None
|
||||
)
|
||||
|
||||
def update(self, session, preview=False):
|
||||
# This overrides the default behavior of resource update because
|
||||
@ -127,16 +133,17 @@ class Stack(resource.Resource):
|
||||
if self.name and self.id:
|
||||
base_path = '/stacks/%(stack_name)s/%(stack_id)s' % {
|
||||
'stack_name': self.name,
|
||||
'stack_id': self.id}
|
||||
'stack_id': self.id,
|
||||
}
|
||||
elif self.name or self.id:
|
||||
# We have only one of name/id. Do not try to build a stacks/NAME/ID
|
||||
# path
|
||||
base_path = '/stacks/%(stack_identity)s' % {
|
||||
'stack_identity': self.name or self.id}
|
||||
'stack_identity': self.name or self.id
|
||||
}
|
||||
request = self._prepare_request(
|
||||
prepend_key=False,
|
||||
requires_id=False,
|
||||
base_path=base_path)
|
||||
prepend_key=False, requires_id=False, base_path=base_path
|
||||
)
|
||||
|
||||
microversion = self._get_microversion(session, action='commit')
|
||||
|
||||
@ -145,8 +152,11 @@ class Stack(resource.Resource):
|
||||
request_url = utils.urljoin(request_url, 'preview')
|
||||
|
||||
response = session.put(
|
||||
request_url, json=request.body, headers=request.headers,
|
||||
microversion=microversion)
|
||||
request_url,
|
||||
json=request.body,
|
||||
headers=request.headers,
|
||||
microversion=microversion,
|
||||
)
|
||||
|
||||
self.microversion = microversion
|
||||
self._translate_response(response, has_body=True)
|
||||
@ -162,20 +172,28 @@ class Stack(resource.Resource):
|
||||
return self._action(session, {'check': ''})
|
||||
|
||||
def abandon(self, session):
|
||||
url = utils.urljoin(self.base_path, self.name,
|
||||
self._get_id(self), 'abandon')
|
||||
url = utils.urljoin(
|
||||
self.base_path, self.name, self._get_id(self), 'abandon'
|
||||
)
|
||||
resp = session.delete(url)
|
||||
return resp.json()
|
||||
|
||||
def fetch(self, session, requires_id=True,
|
||||
base_path=None, error_message=None,
|
||||
skip_cache=False, resolve_outputs=True):
|
||||
def fetch(
|
||||
self,
|
||||
session,
|
||||
requires_id=True,
|
||||
base_path=None,
|
||||
error_message=None,
|
||||
skip_cache=False,
|
||||
resolve_outputs=True,
|
||||
):
|
||||
|
||||
if not self.allow_fetch:
|
||||
raise exceptions.MethodNotSupported(self, "fetch")
|
||||
|
||||
request = self._prepare_request(requires_id=requires_id,
|
||||
base_path=base_path)
|
||||
request = self._prepare_request(
|
||||
requires_id=requires_id, base_path=base_path
|
||||
)
|
||||
# session = self._get_session(session)
|
||||
microversion = self._get_microversion(session, action='fetch')
|
||||
|
||||
@ -185,7 +203,8 @@ class Stack(resource.Resource):
|
||||
if not resolve_outputs:
|
||||
request.url = request.url + '?resolve_outputs=False'
|
||||
response = session.get(
|
||||
request.url, microversion=microversion, skip_cache=skip_cache)
|
||||
request.url, microversion=microversion, skip_cache=skip_cache
|
||||
)
|
||||
kwargs = {}
|
||||
if error_message:
|
||||
kwargs['error_message'] = error_message
|
||||
@ -195,7 +214,8 @@ class Stack(resource.Resource):
|
||||
|
||||
if self and self.status in ['DELETE_COMPLETE', 'ADOPT_COMPLETE']:
|
||||
raise exceptions.ResourceNotFound(
|
||||
"No stack found for %s" % self.id)
|
||||
"No stack found for %s" % self.id
|
||||
)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
@ -227,9 +247,8 @@ class Stack(resource.Resource):
|
||||
# Try to short-circuit by looking directly for a matching ID.
|
||||
try:
|
||||
match = cls.existing(
|
||||
id=name_or_id,
|
||||
connection=session._get_connection(),
|
||||
**params)
|
||||
id=name_or_id, connection=session._get_connection(), **params
|
||||
)
|
||||
return match.fetch(session, **params)
|
||||
except exceptions.NotFoundException:
|
||||
pass
|
||||
@ -240,7 +259,8 @@ class Stack(resource.Resource):
|
||||
if ignore_missing:
|
||||
return None
|
||||
raise exceptions.ResourceNotFound(
|
||||
"No %s found for %s" % (cls.__name__, name_or_id))
|
||||
"No %s found for %s" % (cls.__name__, name_or_id)
|
||||
)
|
||||
|
||||
|
||||
StackPreview = Stack
|
||||
|
@ -32,8 +32,14 @@ class Template(resource.Resource):
|
||||
#: A list of parameter groups each contains a lsit of parameter names.
|
||||
parameter_groups = resource.Body('ParameterGroups', type=list)
|
||||
|
||||
def validate(self, session, template, environment=None, template_url=None,
|
||||
ignore_errors=None):
|
||||
def validate(
|
||||
self,
|
||||
session,
|
||||
template,
|
||||
environment=None,
|
||||
template_url=None,
|
||||
ignore_errors=None,
|
||||
):
|
||||
url = '/validate'
|
||||
|
||||
body = {'template': template}
|
||||
|
@ -42,9 +42,8 @@ class TestStack(base.BaseFunctionalTest):
|
||||
# the shade layer.
|
||||
template['heat_template_version'] = '2013-05-23'
|
||||
self.network, self.subnet = test_network.create_network(
|
||||
self.conn,
|
||||
self.NAME,
|
||||
self.cidr)
|
||||
self.conn, self.NAME, self.cidr
|
||||
)
|
||||
parameters = {
|
||||
'image': image.id,
|
||||
'key_name': self.NAME,
|
||||
@ -60,8 +59,11 @@ class TestStack(base.BaseFunctionalTest):
|
||||
self.stack = sot
|
||||
self.assertEqual(self.NAME, sot.name)
|
||||
self.conn.orchestration.wait_for_status(
|
||||
sot, status='CREATE_COMPLETE', failures=['CREATE_FAILED'],
|
||||
wait=self._wait_for_timeout)
|
||||
sot,
|
||||
status='CREATE_COMPLETE',
|
||||
failures=['CREATE_FAILED'],
|
||||
wait=self._wait_for_timeout,
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.orchestration.delete_stack(self.stack, ignore_missing=False)
|
||||
@ -69,7 +71,8 @@ class TestStack(base.BaseFunctionalTest):
|
||||
# Need to wait for the stack to go away before network delete
|
||||
try:
|
||||
self.conn.orchestration.wait_for_status(
|
||||
self.stack, 'DELETE_COMPLETE', wait=self._wait_for_timeout)
|
||||
self.stack, 'DELETE_COMPLETE', wait=self._wait_for_timeout
|
||||
)
|
||||
except exceptions.ResourceNotFound:
|
||||
pass
|
||||
test_network.delete_network(self.conn, self.network, self.subnet)
|
||||
|
@ -23,7 +23,6 @@ EXAMPLE = {
|
||||
|
||||
|
||||
class TestVersion(base.TestCase):
|
||||
|
||||
def test_basic(self):
|
||||
sot = version.Version()
|
||||
self.assertEqual('version', sot.resource_key)
|
||||
|
@ -42,11 +42,15 @@ class TestOrchestrationStack(TestOrchestrationProxy):
|
||||
self.proxy.create_stack,
|
||||
stack.Stack,
|
||||
method_kwargs={"preview": True, "x": 1, "y": 2, "z": 3},
|
||||
expected_kwargs={"x": 1, "y": 2, "z": 3})
|
||||
expected_kwargs={"x": 1, "y": 2, "z": 3},
|
||||
)
|
||||
|
||||
def test_find_stack(self):
|
||||
self.verify_find(self.proxy.find_stack, stack.Stack,
|
||||
expected_kwargs={'resolve_outputs': True})
|
||||
self.verify_find(
|
||||
self.proxy.find_stack,
|
||||
stack.Stack,
|
||||
expected_kwargs={'resolve_outputs': True},
|
||||
)
|
||||
# mock_method="openstack.proxy.Proxy._find"
|
||||
# test_method=self.proxy.find_stack
|
||||
# method_kwargs = {
|
||||
@ -78,12 +82,15 @@ class TestOrchestrationStack(TestOrchestrationProxy):
|
||||
self.verify_list(self.proxy.stacks, stack.Stack)
|
||||
|
||||
def test_get_stack(self):
|
||||
self.verify_get(self.proxy.get_stack, stack.Stack,
|
||||
method_kwargs={'resolve_outputs': False},
|
||||
expected_kwargs={'resolve_outputs': False})
|
||||
self.verify_get(
|
||||
self.proxy.get_stack,
|
||||
stack.Stack,
|
||||
method_kwargs={'resolve_outputs': False},
|
||||
expected_kwargs={'resolve_outputs': False},
|
||||
)
|
||||
self.verify_get_overrided(
|
||||
self.proxy, stack.Stack,
|
||||
'openstack.orchestration.v1.stack.Stack')
|
||||
self.proxy, stack.Stack, 'openstack.orchestration.v1.stack.Stack'
|
||||
)
|
||||
|
||||
def test_update_stack(self):
|
||||
self._verify(
|
||||
@ -92,7 +99,8 @@ class TestOrchestrationStack(TestOrchestrationProxy):
|
||||
expected_result='result',
|
||||
method_args=['stack'],
|
||||
method_kwargs={'preview': False},
|
||||
expected_args=[self.proxy, False])
|
||||
expected_args=[self.proxy, False],
|
||||
)
|
||||
|
||||
def test_update_stack_preview(self):
|
||||
self._verify(
|
||||
@ -101,7 +109,8 @@ class TestOrchestrationStack(TestOrchestrationProxy):
|
||||
expected_result='result',
|
||||
method_args=['stack'],
|
||||
method_kwargs={'preview': True},
|
||||
expected_args=[self.proxy, True])
|
||||
expected_args=[self.proxy, True],
|
||||
)
|
||||
|
||||
def test_abandon_stack(self):
|
||||
self._verify(
|
||||
@ -109,7 +118,8 @@ class TestOrchestrationStack(TestOrchestrationProxy):
|
||||
self.proxy.abandon_stack,
|
||||
expected_result='result',
|
||||
method_args=['stack'],
|
||||
expected_args=[self.proxy])
|
||||
expected_args=[self.proxy],
|
||||
)
|
||||
|
||||
def test_delete_stack(self):
|
||||
self.verify_delete(self.proxy.delete_stack, stack.Stack, False)
|
||||
@ -154,9 +164,12 @@ class TestOrchestrationStackEnvironment(TestOrchestrationProxy):
|
||||
expected_kwargs={
|
||||
'requires_id': False,
|
||||
'stack_name': stack_name,
|
||||
'stack_id': stack_id})
|
||||
mock_find.assert_called_once_with(mock.ANY, 'IDENTITY',
|
||||
ignore_missing=False)
|
||||
'stack_id': stack_id,
|
||||
},
|
||||
)
|
||||
mock_find.assert_called_once_with(
|
||||
mock.ANY, 'IDENTITY', ignore_missing=False
|
||||
)
|
||||
|
||||
def test_get_stack_environment_with_stack_object(self):
|
||||
stack_id = '1234'
|
||||
@ -171,7 +184,9 @@ class TestOrchestrationStackEnvironment(TestOrchestrationProxy):
|
||||
expected_kwargs={
|
||||
'requires_id': False,
|
||||
'stack_name': stack_name,
|
||||
'stack_id': stack_id})
|
||||
'stack_id': stack_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class TestOrchestrationStackFiles(TestOrchestrationProxy):
|
||||
@ -187,8 +202,9 @@ class TestOrchestrationStackFiles(TestOrchestrationProxy):
|
||||
res = self.proxy.get_stack_files('IDENTITY')
|
||||
|
||||
self.assertEqual({'file': 'content'}, res)
|
||||
mock_find.assert_called_once_with(mock.ANY, 'IDENTITY',
|
||||
ignore_missing=False)
|
||||
mock_find.assert_called_once_with(
|
||||
mock.ANY, 'IDENTITY', ignore_missing=False
|
||||
)
|
||||
mock_fetch.assert_called_once_with(self.proxy)
|
||||
|
||||
@mock.patch.object(stack_files.StackFiles, 'fetch')
|
||||
@ -220,9 +236,12 @@ class TestOrchestrationStackTemplate(TestOrchestrationProxy):
|
||||
expected_kwargs={
|
||||
'requires_id': False,
|
||||
'stack_name': stack_name,
|
||||
'stack_id': stack_id})
|
||||
mock_find.assert_called_once_with(mock.ANY, 'IDENTITY',
|
||||
ignore_missing=False)
|
||||
'stack_id': stack_id,
|
||||
},
|
||||
)
|
||||
mock_find.assert_called_once_with(
|
||||
mock.ANY, 'IDENTITY', ignore_missing=False
|
||||
)
|
||||
|
||||
def test_get_stack_template_with_stack_object(self):
|
||||
stack_id = '1234'
|
||||
@ -237,7 +256,9 @@ class TestOrchestrationStackTemplate(TestOrchestrationProxy):
|
||||
expected_kwargs={
|
||||
'requires_id': False,
|
||||
'stack_name': stack_name,
|
||||
'stack_id': stack_id})
|
||||
'stack_id': stack_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class TestOrchestrationResource(TestOrchestrationProxy):
|
||||
@ -247,11 +268,13 @@ class TestOrchestrationResource(TestOrchestrationProxy):
|
||||
stack_name = 'test_stack'
|
||||
stk = stack.Stack(id=stack_id, name=stack_name)
|
||||
|
||||
self.verify_list(self.proxy.resources, resource.Resource,
|
||||
method_args=[stk],
|
||||
expected_args=[],
|
||||
expected_kwargs={'stack_name': stack_name,
|
||||
'stack_id': stack_id})
|
||||
self.verify_list(
|
||||
self.proxy.resources,
|
||||
resource.Resource,
|
||||
method_args=[stk],
|
||||
expected_args=[],
|
||||
expected_kwargs={'stack_name': stack_name, 'stack_id': stack_id},
|
||||
)
|
||||
|
||||
self.assertEqual(0, mock_find.call_count)
|
||||
|
||||
@ -262,31 +285,37 @@ class TestOrchestrationResource(TestOrchestrationProxy):
|
||||
stk = stack.Stack(id=stack_id, name=stack_name)
|
||||
mock_find.return_value = stk
|
||||
|
||||
self.verify_list(self.proxy.resources, resource.Resource,
|
||||
method_args=[stack_id],
|
||||
expected_args=[],
|
||||
expected_kwargs={'stack_name': stack_name,
|
||||
'stack_id': stack_id})
|
||||
self.verify_list(
|
||||
self.proxy.resources,
|
||||
resource.Resource,
|
||||
method_args=[stack_id],
|
||||
expected_args=[],
|
||||
expected_kwargs={'stack_name': stack_name, 'stack_id': stack_id},
|
||||
)
|
||||
|
||||
mock_find.assert_called_once_with(mock.ANY, stack_id,
|
||||
ignore_missing=False)
|
||||
mock_find.assert_called_once_with(
|
||||
mock.ANY, stack_id, ignore_missing=False
|
||||
)
|
||||
|
||||
@mock.patch.object(stack.Stack, 'find')
|
||||
@mock.patch.object(resource.Resource, 'list')
|
||||
def test_resources_stack_not_found(self, mock_list, mock_find):
|
||||
stack_name = 'test_stack'
|
||||
mock_find.side_effect = exceptions.ResourceNotFound(
|
||||
'No stack found for test_stack')
|
||||
'No stack found for test_stack'
|
||||
)
|
||||
|
||||
ex = self.assertRaises(exceptions.ResourceNotFound,
|
||||
self.proxy.resources, stack_name)
|
||||
ex = self.assertRaises(
|
||||
exceptions.ResourceNotFound, self.proxy.resources, stack_name
|
||||
)
|
||||
self.assertEqual('No stack found for test_stack', str(ex))
|
||||
|
||||
|
||||
class TestOrchestrationSoftwareConfig(TestOrchestrationProxy):
|
||||
def test_create_software_config(self):
|
||||
self.verify_create(self.proxy.create_software_config,
|
||||
sc.SoftwareConfig)
|
||||
self.verify_create(
|
||||
self.proxy.create_software_config, sc.SoftwareConfig
|
||||
)
|
||||
|
||||
def test_software_configs(self):
|
||||
self.verify_list(self.proxy.software_configs, sc.SoftwareConfig)
|
||||
@ -295,34 +324,42 @@ class TestOrchestrationSoftwareConfig(TestOrchestrationProxy):
|
||||
self.verify_get(self.proxy.get_software_config, sc.SoftwareConfig)
|
||||
|
||||
def test_delete_software_config(self):
|
||||
self.verify_delete(self.proxy.delete_software_config,
|
||||
sc.SoftwareConfig, True)
|
||||
self.verify_delete(self.proxy.delete_software_config,
|
||||
sc.SoftwareConfig, False)
|
||||
self.verify_delete(
|
||||
self.proxy.delete_software_config, sc.SoftwareConfig, True
|
||||
)
|
||||
self.verify_delete(
|
||||
self.proxy.delete_software_config, sc.SoftwareConfig, False
|
||||
)
|
||||
|
||||
|
||||
class TestOrchestrationSoftwareDeployment(TestOrchestrationProxy):
|
||||
def test_create_software_deployment(self):
|
||||
self.verify_create(self.proxy.create_software_deployment,
|
||||
sd.SoftwareDeployment)
|
||||
self.verify_create(
|
||||
self.proxy.create_software_deployment, sd.SoftwareDeployment
|
||||
)
|
||||
|
||||
def test_software_deployments(self):
|
||||
self.verify_list(self.proxy.software_deployments,
|
||||
sd.SoftwareDeployment)
|
||||
self.verify_list(
|
||||
self.proxy.software_deployments, sd.SoftwareDeployment
|
||||
)
|
||||
|
||||
def test_get_software_deployment(self):
|
||||
self.verify_get(self.proxy.get_software_deployment,
|
||||
sd.SoftwareDeployment)
|
||||
self.verify_get(
|
||||
self.proxy.get_software_deployment, sd.SoftwareDeployment
|
||||
)
|
||||
|
||||
def test_update_software_deployment(self):
|
||||
self.verify_update(self.proxy.update_software_deployment,
|
||||
sd.SoftwareDeployment)
|
||||
self.verify_update(
|
||||
self.proxy.update_software_deployment, sd.SoftwareDeployment
|
||||
)
|
||||
|
||||
def test_delete_software_deployment(self):
|
||||
self.verify_delete(self.proxy.delete_software_deployment,
|
||||
sd.SoftwareDeployment, True)
|
||||
self.verify_delete(self.proxy.delete_software_deployment,
|
||||
sd.SoftwareDeployment, False)
|
||||
self.verify_delete(
|
||||
self.proxy.delete_software_deployment, sd.SoftwareDeployment, True
|
||||
)
|
||||
self.verify_delete(
|
||||
self.proxy.delete_software_deployment, sd.SoftwareDeployment, False
|
||||
)
|
||||
|
||||
|
||||
class TestOrchestrationTemplate(TestOrchestrationProxy):
|
||||
@ -336,8 +373,12 @@ class TestOrchestrationTemplate(TestOrchestrationProxy):
|
||||
res = self.proxy.validate_template(tmpl, env, tmpl_url, ignore_errors)
|
||||
|
||||
mock_validate.assert_called_once_with(
|
||||
self.proxy, tmpl, environment=env, template_url=tmpl_url,
|
||||
ignore_errors=ignore_errors)
|
||||
self.proxy,
|
||||
tmpl,
|
||||
environment=env,
|
||||
template_url=tmpl_url,
|
||||
ignore_errors=ignore_errors,
|
||||
)
|
||||
self.assertEqual(mock_validate.return_value, res)
|
||||
|
||||
def test_validate_template_no_env(self):
|
||||
@ -349,11 +390,16 @@ class TestOrchestrationTemplate(TestOrchestrationProxy):
|
||||
self.assertIsInstance(res["files"], dict)
|
||||
|
||||
def test_validate_template_invalid_request(self):
|
||||
err = self.assertRaises(exceptions.InvalidRequest,
|
||||
self.proxy.validate_template,
|
||||
None, template_url=None)
|
||||
self.assertEqual("'template_url' must be specified when template is "
|
||||
"None", str(err))
|
||||
err = self.assertRaises(
|
||||
exceptions.InvalidRequest,
|
||||
self.proxy.validate_template,
|
||||
None,
|
||||
template_url=None,
|
||||
)
|
||||
self.assertEqual(
|
||||
"'template_url' must be specified when template is " "None",
|
||||
str(err),
|
||||
)
|
||||
|
||||
|
||||
class TestExtractName(TestOrchestrationProxy):
|
||||
@ -362,22 +408,47 @@ class TestExtractName(TestOrchestrationProxy):
|
||||
('stacks', dict(url='/stacks', parts=['stacks'])),
|
||||
< |