Cleanup utils 2/2
Removes remaining unused utility methods. Change-Id: Ic2769b69bf4b34669886dde1ef663d561d9e6173
This commit is contained in:
parent
5a422e4f9e
commit
e1b9144816
|
@ -13,13 +13,7 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from unittest import mock
|
||||
import yaml
|
||||
import six
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.tests import base
|
||||
|
@ -214,204 +208,6 @@ class PlanTest(base.TestCase):
|
|||
def setUp(self):
|
||||
super(PlanTest, self).setUp()
|
||||
self.container = 'overcloud'
|
||||
self.swift = mock.MagicMock()
|
||||
self.swift.get_object.return_value = ({}, PLAN_ENV_CONTENTS)
|
||||
|
||||
def test_get_env(self):
|
||||
env = plan_utils.get_env(self.swift, self.container)
|
||||
|
||||
self.swift.get_object.assert_called()
|
||||
self.assertEqual(env['template'], 'overcloud.yaml')
|
||||
|
||||
def test_get_env_not_found(self):
|
||||
self.swift.get_object.side_effect = swiftexceptions.ClientException
|
||||
|
||||
self. assertRaises(Exception, plan_utils.get_env, self.swift,
|
||||
self.container)
|
||||
|
||||
def test_get_user_env(self):
|
||||
self.swift.get_object.return_value = ({}, USER_ENV_CONTENTS)
|
||||
env = plan_utils.get_user_env(self.swift, self.container)
|
||||
|
||||
self.swift.get_object.assert_called_with(
|
||||
self.container, 'user-environment.yaml')
|
||||
self.assertEqual(
|
||||
env['resource_registry']['OS::TripleO::Foo'], 'bar.yaml')
|
||||
|
||||
def test_put_user_env(self):
|
||||
contents = {'a': 'b'}
|
||||
plan_utils.put_user_env(self.swift, self.container, contents)
|
||||
|
||||
self.swift.put_object.assert_called_with(
|
||||
self.container, 'user-environment.yaml', 'a: b\n')
|
||||
|
||||
def test_update_in_env(self):
|
||||
env = plan_utils.get_env(self.swift, self.container)
|
||||
|
||||
updated_env = plan_utils.update_in_env(
|
||||
self.swift,
|
||||
env,
|
||||
'template',
|
||||
'updated-overcloud.yaml'
|
||||
)
|
||||
self.assertEqual(updated_env['template'], 'updated-overcloud.yaml')
|
||||
|
||||
updated_env = plan_utils.update_in_env(
|
||||
self.swift,
|
||||
env,
|
||||
'parameter_defaults',
|
||||
{'another-key': 'another-value'}
|
||||
)
|
||||
self.assertEqual(updated_env['parameter_defaults'], {
|
||||
'BlockStorageCount': 42,
|
||||
'OvercloudControlFlavor': 'yummy',
|
||||
'another-key': 'another-value'
|
||||
})
|
||||
|
||||
updated_env = plan_utils.update_in_env(
|
||||
self.swift,
|
||||
env,
|
||||
'parameter_defaults',
|
||||
delete_key=True
|
||||
)
|
||||
self.assertNotIn('parameter_defaults', updated_env)
|
||||
|
||||
self.swift.get_object.assert_called()
|
||||
self.swift.put_object.assert_called()
|
||||
|
||||
def test_write_json_temp_file(self):
|
||||
name = plan_utils.write_json_temp_file({'foo': 'bar'})
|
||||
with open(name) as f:
|
||||
self.assertEqual({'foo': 'bar'}, json.load(f))
|
||||
os.remove(name)
|
||||
|
||||
@mock.patch('requests.request', autospec=True)
|
||||
def test_object_request(self, request):
|
||||
request.return_value.content = 'foo'
|
||||
|
||||
content = plan_utils.object_request('GET', '/foo/bar', 'asdf1234')
|
||||
|
||||
self.assertEqual('foo', content)
|
||||
request.assert_called_once_with(
|
||||
'GET', '/foo/bar', headers={'X-Auth-Token': 'asdf1234'})
|
||||
|
||||
@mock.patch('tripleo_common.utils.plan.object_request',
|
||||
autospec=True)
|
||||
def test_process_environments_and_files(self, object_request):
|
||||
swift_url = 'https://192.0.2.1:8443/foo'
|
||||
url = '%s/bar' % swift_url
|
||||
object_request.return_value = 'parameter_defaults: {foo: bar}'
|
||||
swift = mock.Mock()
|
||||
swift.url = swift_url
|
||||
swift.token = 'asdf1234'
|
||||
|
||||
result = plan_utils.process_environments_and_files(swift, [url])
|
||||
|
||||
self.assertEqual(
|
||||
{'parameter_defaults': {'foo': 'bar'}},
|
||||
result[1]
|
||||
)
|
||||
object_request.assert_called_once_with(
|
||||
'GET',
|
||||
'https://192.0.2.1:8443/foo/bar',
|
||||
'asdf1234'
|
||||
)
|
||||
|
||||
@mock.patch('tripleo_common.utils.plan.object_request',
|
||||
autospec=True)
|
||||
def test_get_template_contents(self, object_request):
|
||||
swift_url = 'https://192.0.2.1:8443/foo'
|
||||
url = '%s/bar' % swift_url
|
||||
object_request.return_value = 'heat_template_version: 2016-04-30'
|
||||
swift = mock.Mock()
|
||||
swift.url = swift_url
|
||||
swift.token = 'asdf1234'
|
||||
|
||||
result = plan_utils.get_template_contents(swift, url)
|
||||
|
||||
self.assertEqual(
|
||||
{'heat_template_version': '2016-04-30'},
|
||||
result[1]
|
||||
)
|
||||
object_request.assert_called_once_with(
|
||||
'GET',
|
||||
'https://192.0.2.1:8443/foo/bar',
|
||||
'asdf1234'
|
||||
)
|
||||
|
||||
def test_build_env_paths(self):
|
||||
swift = mock.Mock()
|
||||
swift.url = 'https://192.0.2.1:8443/foo'
|
||||
swift.token = 'asdf1234'
|
||||
plan = {
|
||||
'version': '1.0',
|
||||
'environments': [
|
||||
{'path': 'bar.yaml'},
|
||||
{'data': {
|
||||
'parameter_defaults': {'InlineParam': 1}}}
|
||||
],
|
||||
'passwords': {
|
||||
'ThePassword': 'password1'
|
||||
},
|
||||
'derived_parameters': {
|
||||
'DerivedParam': 'DerivedValue',
|
||||
'MergableParam': {
|
||||
'one': 'derived one',
|
||||
'two': 'derived two',
|
||||
},
|
||||
},
|
||||
'parameter_defaults': {
|
||||
'Foo': 'bar',
|
||||
'MergableParam': {
|
||||
'one': 'user one',
|
||||
'three': 'user three',
|
||||
},
|
||||
},
|
||||
'resource_registry': {
|
||||
'Foo::Bar': 'foo_bar.yaml'
|
||||
},
|
||||
}
|
||||
|
||||
env_paths, temp_env_paths = plan_utils.build_env_paths(
|
||||
swift, 'overcloud', plan)
|
||||
|
||||
self.assertEqual(3, len(temp_env_paths))
|
||||
self.assertEqual(
|
||||
['https://192.0.2.1:8443/foo/overcloud/bar.yaml'] + temp_env_paths,
|
||||
env_paths
|
||||
)
|
||||
|
||||
with open(env_paths[1]) as f:
|
||||
self.assertEqual(
|
||||
{'parameter_defaults': {'InlineParam': 1}},
|
||||
json.load(f)
|
||||
)
|
||||
|
||||
with open(env_paths[2]) as f:
|
||||
self.assertEqual(
|
||||
{'parameter_defaults': {
|
||||
'ThePassword': 'password1',
|
||||
'DerivedParam': 'DerivedValue',
|
||||
'Foo': 'bar',
|
||||
'MergableParam': {
|
||||
'one': 'user one',
|
||||
'two': 'derived two',
|
||||
'three': 'user three',
|
||||
}
|
||||
}},
|
||||
json.load(f)
|
||||
)
|
||||
|
||||
with open(env_paths[3]) as f:
|
||||
self.assertEqual(
|
||||
{'resource_registry': {
|
||||
'Foo::Bar': 'foo_bar.yaml'
|
||||
}},
|
||||
json.load(f)
|
||||
)
|
||||
|
||||
for path in temp_env_paths:
|
||||
os.remove(path)
|
||||
|
||||
def test_get_next_index(self):
|
||||
keys_map = {
|
||||
|
@ -709,154 +505,3 @@ class PlanTest(base.TestCase):
|
|||
|
||||
result = plan_utils.generate_passwords(None, mock_orchestration)
|
||||
self.assertEqual(existing_passwords, result)
|
||||
|
||||
@mock.patch("tripleo_common.utils.plan.get_role_data")
|
||||
@mock.patch("tripleo_common.utils.plan."
|
||||
"update_plan_environment")
|
||||
@mock.patch("tripleo_common.utils.plan.get_env", autospec=True)
|
||||
@mock.patch("tripleo_common.image.kolla_builder."
|
||||
"container_images_prepare_multi")
|
||||
@mock.patch("tripleo_common.image.kolla_builder.KollaImageBuilder")
|
||||
def test_update_plan_with_image_parameter(
|
||||
self, kib, prepare, get_env, mock_update_plan, grd):
|
||||
builder = kib.return_value
|
||||
builder.container_images_from_template.return_value = [{
|
||||
'imagename': 't/cb-nova-compute:liberty',
|
||||
'params': ['ContainerNovaComputeImage',
|
||||
'ContainerNovaLibvirtConfigImage']
|
||||
}, {'imagename': 't/cb-nova-libvirt:liberty',
|
||||
'params': ['ContainerNovaLibvirtImage']}]
|
||||
|
||||
plan = {
|
||||
'version': '1.0',
|
||||
'environments': [],
|
||||
'parameter_defaults': {}
|
||||
}
|
||||
role_data = [{'name': 'Controller'}]
|
||||
final_env = {'environments': [
|
||||
{'path': 'overcloud-resource-registry-puppet.yaml'},
|
||||
{'path': 'environments/containers-default-parameters.yaml'},
|
||||
{'path': 'user-environment.yaml'}
|
||||
]}
|
||||
image_params = {
|
||||
'FooContainerImage': '192.0.2.1/foo/image',
|
||||
'ContainerNovaComputeImage': 't/cb-nova-compute:liberty',
|
||||
'ContainerNovaLibvirtConfigImage': 't/cb-nova-compute:liberty',
|
||||
'ContainerNovaLibvirtImage': 't/cb-nova-libvirt:liberty',
|
||||
}
|
||||
image_env_contents = yaml.safe_dump(
|
||||
{'parameter_defaults': image_params},
|
||||
default_flow_style=False
|
||||
)
|
||||
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object.return_value = role_data
|
||||
prepare.return_value = image_params
|
||||
grd.return_value = role_data
|
||||
|
||||
get_env.return_value = plan
|
||||
mock_update_plan.return_value = final_env
|
||||
result = plan_utils.update_plan_environment_with_image_parameters(
|
||||
swift, container='overcloud')
|
||||
self.assertEqual(final_env, result)
|
||||
|
||||
get_env.assert_called_once_with(swift, 'overcloud')
|
||||
prepare.assert_called_once_with({}, role_data, dry_run=True)
|
||||
swift.put_object.assert_called_once_with(
|
||||
'overcloud',
|
||||
'environments/containers-default-parameters.yaml',
|
||||
image_env_contents
|
||||
)
|
||||
|
||||
@mock.patch("tripleo_common.utils.plan."
|
||||
"update_plan_environment")
|
||||
@mock.patch("tripleo_common.image.kolla_builder.KollaImageBuilder")
|
||||
def test_update_plan_image_parameters_default(
|
||||
self, kib, mock_update_plan):
|
||||
swift = mock.MagicMock()
|
||||
builder = kib.return_value
|
||||
builder.container_images_from_template.return_value = [{
|
||||
'imagename': 't/cb-nova-compute:liberty',
|
||||
'params': ['ContainerNovaComputeImage',
|
||||
'ContainerNovaLibvirtConfigImage']
|
||||
}, {'imagename': 't/cb-nova-libvirt:liberty',
|
||||
'params': ['ContainerNovaLibvirtImage']}]
|
||||
|
||||
final_env = {'environments': [
|
||||
{'path': 'overcloud-resource-registry-puppet.yaml'},
|
||||
{'path': 'environments/containers-default-parameters.yaml'},
|
||||
{'path': 'user-environment.yaml'}
|
||||
]}
|
||||
mock_update_plan.return_value = final_env
|
||||
|
||||
result = plan_utils.update_plan_environment_with_image_parameters(
|
||||
swift, container='overcloud', with_roledata=False)
|
||||
self.assertEqual(final_env, result)
|
||||
|
||||
kib.assert_called_once_with(
|
||||
[os.path.join(sys.prefix, 'share', 'tripleo-common',
|
||||
'container-images', 'tripleo_containers.yaml.j2')]
|
||||
)
|
||||
params = {
|
||||
'ContainerNovaComputeImage': 't/cb-nova-compute:liberty',
|
||||
'ContainerNovaLibvirtConfigImage': 't/cb-nova-compute:liberty',
|
||||
'ContainerNovaLibvirtImage': 't/cb-nova-libvirt:liberty',
|
||||
}
|
||||
expected_env = yaml.safe_dump(
|
||||
{'parameter_defaults': params},
|
||||
default_flow_style=False
|
||||
)
|
||||
swift.put_object.assert_called_once_with(
|
||||
'overcloud',
|
||||
'environments/containers-default-parameters.yaml',
|
||||
expected_env
|
||||
)
|
||||
mock_update_plan.assert_called_once_with(
|
||||
swift,
|
||||
{'environments/containers-default-parameters.yaml': True},
|
||||
container='overcloud'
|
||||
)
|
||||
|
||||
def test_create_plan_container(self):
|
||||
# Setup
|
||||
container_name = 'Test-container-7'
|
||||
swift = mock.MagicMock()
|
||||
swift.get_account.return_value = [
|
||||
'', [{'name': 'test1'}, {'name': 'test2'}]]
|
||||
|
||||
# Test
|
||||
plan_utils.create_plan_container(swift, container_name)
|
||||
|
||||
# Verify
|
||||
swift.put_container.assert_called_once_with(
|
||||
container_name,
|
||||
headers={'x-container-meta-usage-tripleo': 'plan'}
|
||||
)
|
||||
|
||||
def test_container_exists(self):
|
||||
# Setup
|
||||
container_name = 'Test-container-7'
|
||||
swift = mock.MagicMock()
|
||||
swift.get_account.return_value = [
|
||||
'', [{'name': 'Test-container-7'}, {'name': 'test2'}]]
|
||||
|
||||
# Test
|
||||
error_str = ('A container with the name %s already'
|
||||
' exists.') % container_name
|
||||
err = self.assertRaises(RuntimeError,
|
||||
plan_utils.create_plan_container,
|
||||
swift, container_name)
|
||||
self.assertEquals(error_str, six.text_type(err))
|
||||
|
||||
def test_run_invalid_name(self):
|
||||
# Setup
|
||||
container_name = 'Invalid_underscore'
|
||||
swift = mock.MagicMock()
|
||||
|
||||
# Test
|
||||
error_str = ('The plan name must only contain '
|
||||
'letters, numbers or dashes')
|
||||
err = self.assertRaises(RuntimeError,
|
||||
plan_utils.create_plan_container,
|
||||
swift, container_name)
|
||||
self.assertEquals(error_str, six.text_type(err))
|
||||
|
|
|
@ -1,260 +0,0 @@
|
|||
# Copyright 2016 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import tempfile
|
||||
from unittest import mock
|
||||
|
||||
import yaml
|
||||
|
||||
from heatclient import exc as heat_exc
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.tests import base
|
||||
from tripleo_common.utils import stack
|
||||
|
||||
|
||||
class DeployStackTest(base.TestCase):
|
||||
|
||||
@mock.patch('tripleo_common.utils.stack.time')
|
||||
@mock.patch('heatclient.common.template_utils.'
|
||||
'process_multiple_environments_and_files')
|
||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||
def test_stack_deploy(
|
||||
self, mock_get_template_contents,
|
||||
mock_process_multiple_environments_and_files,
|
||||
mock_time):
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock(url="http://test.com")
|
||||
mock_env = yaml.safe_dump({
|
||||
'name': 'overcloud',
|
||||
'temp_environment': 'temp_environment',
|
||||
'template': 'template',
|
||||
'environments': [{u'path': u'environments/test.yaml'}],
|
||||
'parameter_defaults': {'random_existing_data': 'a_value'},
|
||||
}, default_flow_style=False)
|
||||
swift.get_object.side_effect = (
|
||||
({}, mock_env),
|
||||
({}, mock_env),
|
||||
swiftexceptions.ClientException('atest2')
|
||||
)
|
||||
heat = mock.MagicMock()
|
||||
heat.stacks.get.return_value = None
|
||||
|
||||
mock_get_template_contents.return_value = ({}, {
|
||||
'heat_template_version': '2016-04-30'
|
||||
})
|
||||
mock_process_multiple_environments_and_files.return_value = ({}, {})
|
||||
|
||||
# freeze time at datetime.datetime(2016, 9, 8, 16, 24, 24)
|
||||
mock_time.time.return_value = 1473366264
|
||||
|
||||
stack.deploy_stack(swift, heat, 'overcloud')
|
||||
|
||||
heat.stacks.create.assert_called_once_with(
|
||||
environment={},
|
||||
files={},
|
||||
stack_name='overcloud',
|
||||
template={'heat_template_version': '2016-04-30'},
|
||||
timeout_mins=240,
|
||||
)
|
||||
swift.delete_object.assert_called_once_with(
|
||||
"overcloud-swift-rings", "swift-rings.tar.gz")
|
||||
swift.copy_object.assert_called_once_with(
|
||||
"overcloud-swift-rings", "swift-rings.tar.gz",
|
||||
"overcloud-swift-rings/swift-rings.tar.gz-%d" % 1473366264)
|
||||
|
||||
@mock.patch('tripleo_common.utils.stack.time')
|
||||
@mock.patch('heatclient.common.template_utils.'
|
||||
'process_multiple_environments_and_files')
|
||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||
def test_run_skip_deploy_identifier(
|
||||
self, mock_get_template_contents,
|
||||
mock_process_multiple_environments_and_files,
|
||||
mock_time):
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock(url="http://test.com")
|
||||
|
||||
heat = mock.MagicMock()
|
||||
heat.stacks.get.return_value = None
|
||||
|
||||
mock_env = yaml.safe_dump({
|
||||
'name': constants.DEFAULT_CONTAINER_NAME,
|
||||
'temp_environment': 'temp_environment',
|
||||
'template': 'template',
|
||||
'environments': [{u'path': u'environments/test.yaml'}],
|
||||
'parameter_defaults': {'random_existing_data': 'a_value'},
|
||||
}, default_flow_style=False)
|
||||
swift.get_object.side_effect = (
|
||||
({}, mock_env),
|
||||
({}, mock_env),
|
||||
swiftexceptions.ClientException('atest2')
|
||||
)
|
||||
|
||||
mock_get_template_contents.return_value = ({}, {
|
||||
'heat_template_version': '2016-04-30'
|
||||
})
|
||||
mock_process_multiple_environments_and_files.return_value = ({}, {})
|
||||
|
||||
# freeze time at datetime.datetime(2016, 9, 8, 16, 24, 24)
|
||||
mock_time.time.return_value = 1473366264
|
||||
|
||||
stack.deploy_stack(swift, heat,
|
||||
'overcloud', skip_deploy_identifier=True)
|
||||
|
||||
heat.stacks.create.assert_called_once_with(
|
||||
environment={},
|
||||
files={},
|
||||
stack_name='overcloud',
|
||||
template={'heat_template_version': '2016-04-30'},
|
||||
timeout_mins=240,
|
||||
)
|
||||
swift.delete_object.assert_called_once_with(
|
||||
"overcloud-swift-rings", "swift-rings.tar.gz")
|
||||
swift.copy_object.assert_called_once_with(
|
||||
"overcloud-swift-rings", "swift-rings.tar.gz",
|
||||
"overcloud-swift-rings/swift-rings.tar.gz-%d" % 1473366264)
|
||||
|
||||
@mock.patch('tripleo_common.utils.stack.time')
|
||||
@mock.patch('heatclient.common.template_utils.'
|
||||
'process_multiple_environments_and_files')
|
||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||
def test_run_create_failed(
|
||||
self, mock_get_template_contents,
|
||||
mock_process_multiple_environments_and_files,
|
||||
mock_time):
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock(url="http://test.com")
|
||||
mock_env = yaml.safe_dump({
|
||||
'name': 'overcloud',
|
||||
'temp_environment': 'temp_environment',
|
||||
'template': 'template',
|
||||
'environments': [{u'path': u'environments/test.yaml'}],
|
||||
'parameter_defaults': {'random_existing_data': 'a_value'},
|
||||
}, default_flow_style=False)
|
||||
swift.get_object.side_effect = (
|
||||
({}, mock_env),
|
||||
({}, mock_env),
|
||||
swiftexceptions.ClientException('atest2')
|
||||
)
|
||||
|
||||
heat = mock.MagicMock()
|
||||
heat.stacks.get.return_value = None
|
||||
heat.stacks.create.side_effect = heat_exc.HTTPException("Oops")
|
||||
|
||||
mock_get_template_contents.return_value = ({}, {
|
||||
'heat_template_version': '2016-04-30'
|
||||
})
|
||||
mock_process_multiple_environments_and_files.return_value = ({}, {})
|
||||
|
||||
# freeze time at datetime.datetime(2016, 9, 8, 16, 24, 24)
|
||||
mock_time.time.return_value = 1473366264
|
||||
|
||||
self.assertRaises(RuntimeError, stack.deploy_stack,
|
||||
swift, heat, 'overcloud')
|
||||
|
||||
@mock.patch('tripleo_common.update.check_neutron_mechanism_drivers')
|
||||
@mock.patch('tripleo_common.utils.stack.time')
|
||||
@mock.patch('heatclient.common.template_utils.'
|
||||
'process_multiple_environments_and_files')
|
||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||
def test_run_update_failed(
|
||||
self, mock_get_template_contents,
|
||||
mock_process_multiple_environments_and_files, mock_time,
|
||||
mock_check_neutron_drivers):
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock(url="http://test.com")
|
||||
mock_env = yaml.safe_dump({
|
||||
'name': 'overcloud',
|
||||
'temp_environment': 'temp_environment',
|
||||
'template': 'template',
|
||||
'environments': [{u'path': u'environments/test.yaml'}],
|
||||
'parameter_defaults': {'random_existing_data': 'a_value'},
|
||||
}, default_flow_style=False)
|
||||
swift.get_object.side_effect = (
|
||||
({}, mock_env),
|
||||
({}, mock_env),
|
||||
swiftexceptions.ClientException('atest2')
|
||||
)
|
||||
|
||||
heat = mock.MagicMock()
|
||||
heat.stacks.get.return_value = mock.Mock()
|
||||
heat.stacks.update.side_effect = heat_exc.HTTPException("Oops")
|
||||
|
||||
mock_get_template_contents.return_value = ({}, {
|
||||
'heat_template_version': '2016-04-30'
|
||||
})
|
||||
mock_process_multiple_environments_and_files.return_value = ({}, {})
|
||||
|
||||
# freeze time at datetime.datetime(2016, 9, 8, 16, 24, 24)
|
||||
mock_time.time.return_value = 1473366264
|
||||
mock_check_neutron_drivers.return_value = None
|
||||
|
||||
self.assertRaises(RuntimeError, stack.deploy_stack,
|
||||
swift, heat, 'overcloud')
|
||||
|
||||
def test_set_tls_parameters_no_ca_found(self):
|
||||
my_params = {}
|
||||
my_env = {'parameter_defaults': {}}
|
||||
stack.set_tls_parameters(
|
||||
parameters=my_params, env=my_env,
|
||||
local_ca_path='/tmp/my-unexistent-file.txt')
|
||||
self.assertEqual(my_params, {})
|
||||
|
||||
def test_set_tls_parameters_ca_found_no_camap_provided(self):
|
||||
my_params = {}
|
||||
my_env = {'parameter_defaults': {}}
|
||||
with tempfile.NamedTemporaryFile() as ca_file:
|
||||
# Write test data
|
||||
ca_file.write(b'FAKE CA CERT')
|
||||
ca_file.flush()
|
||||
|
||||
# Test
|
||||
stack.set_tls_parameters(
|
||||
parameters=my_params, env=my_env,
|
||||
local_ca_path=ca_file.name)
|
||||
self.assertIn('CAMap', my_params)
|
||||
self.assertIn('undercloud-ca', my_params['CAMap'])
|
||||
self.assertIn('content', my_params['CAMap']['undercloud-ca'])
|
||||
self.assertEqual(
|
||||
'FAKE CA CERT',
|
||||
my_params['CAMap']['undercloud-ca']['content'])
|
||||
|
||||
def test_set_tls_parameters_ca_found_camap_provided(self):
|
||||
my_params = {}
|
||||
my_env = {
|
||||
'parameter_defaults': {
|
||||
'CAMap': {'overcloud-ca': {'content': 'ANOTER FAKE CERT'}}}}
|
||||
with tempfile.NamedTemporaryFile() as ca_file:
|
||||
# Write test data
|
||||
ca_file.write(b'FAKE CA CERT')
|
||||
ca_file.flush()
|
||||
|
||||
# Test
|
||||
stack.set_tls_parameters(
|
||||
parameters=my_params, env=my_env,
|
||||
local_ca_path=ca_file.name)
|
||||
self.assertIn('CAMap', my_params)
|
||||
self.assertIn('undercloud-ca', my_params['CAMap'])
|
||||
self.assertIn('content', my_params['CAMap']['undercloud-ca'])
|
||||
self.assertEqual('FAKE CA CERT',
|
||||
my_params['CAMap']['undercloud-ca']['content'])
|
||||
self.assertIn('overcloud-ca', my_params['CAMap'])
|
||||
self.assertIn('content', my_params['CAMap']['overcloud-ca'])
|
||||
self.assertEqual('ANOTER FAKE CERT',
|
||||
my_params['CAMap']['overcloud-ca']['content'])
|
|
@ -1,120 +0,0 @@
|
|||
# Copyright (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from tripleo_common.tests import base
|
||||
from tripleo_common.utils import swift as swift_utils
|
||||
|
||||
|
||||
class SwiftTest(base.TestCase):
|
||||
def setUp(self):
|
||||
super(SwiftTest, self).setUp()
|
||||
self.container_name = 'overcloud'
|
||||
self.swiftclient = mock.MagicMock()
|
||||
self.swiftclient.get_account.return_value = ({}, [
|
||||
{'name': self.container_name},
|
||||
{'name': 'test'},
|
||||
])
|
||||
self.swiftclient.get_container.return_value = (
|
||||
{'x-container-meta-usage-tripleo': 'plan'}, [
|
||||
{'name': 'some-name.yaml'},
|
||||
{'name': 'some-other-name.yaml'},
|
||||
{'name': 'yet-some-other-name.yaml'},
|
||||
{'name': 'finally-another-name.yaml'}
|
||||
]
|
||||
)
|
||||
|
||||
def test_delete_container_success(self):
|
||||
swift_utils.empty_container(self.swiftclient, self.container_name)
|
||||
|
||||
mock_calls = [
|
||||
mock.call('overcloud', 'some-name.yaml'),
|
||||
mock.call('overcloud', 'some-other-name.yaml'),
|
||||
mock.call('overcloud', 'yet-some-other-name.yaml'),
|
||||
mock.call('overcloud', 'finally-another-name.yaml')
|
||||
]
|
||||
self.swiftclient.delete_object.assert_has_calls(
|
||||
mock_calls, any_order=True)
|
||||
|
||||
self.swiftclient.get_account.assert_called()
|
||||
self.swiftclient.get_container.assert_called_with(self.container_name)
|
||||
|
||||
def test_delete_container_not_found(self):
|
||||
self.assertRaises(ValueError,
|
||||
swift_utils.empty_container,
|
||||
self.swiftclient, 'idontexist')
|
||||
self.swiftclient.get_account.assert_called()
|
||||
self.swiftclient.get_container.assert_not_called()
|
||||
self.swiftclient.delete_object.assert_not_called()
|
||||
|
||||
def test_create_container(self):
|
||||
swift_utils.create_container(self.swiftclient, 'abc')
|
||||
self.swiftclient.put_container.assert_called()
|
||||
|
||||
def test_get_object_string(self):
|
||||
self.swiftclient.get_object.return_value = (1, str('foo'))
|
||||
val = swift_utils.get_object_string(self.swiftclient, 'foo', 'bar')
|
||||
self.assertEqual(str('foo'), val)
|
||||
|
||||
def test_get_object_string_from_bytes(self):
|
||||
self.swiftclient.get_object.return_value = (1, b'foo')
|
||||
val = swift_utils.get_object_string(self.swiftclient, 'foo', 'bar')
|
||||
self.assertEqual(str('foo'), val)
|
||||
|
||||
def test_put_object_string(self):
|
||||
put_mock = mock.MagicMock()
|
||||
self.swiftclient.put_object = put_mock
|
||||
swift_utils.put_object_string(self.swiftclient, 'foo', 'bar',
|
||||
str('foo'))
|
||||
put_mock.assert_called_once_with('foo', 'bar', str('foo'))
|
||||
|
||||
def test_put_object_string_from_bytes(self):
|
||||
put_mock = mock.MagicMock()
|
||||
self.swiftclient.put_object = put_mock
|
||||
swift_utils.put_object_string(self.swiftclient, 'foo', 'bar', b'foo')
|
||||
put_mock.assert_called_once_with('foo', 'bar', str('foo'))
|
||||
|
||||
@mock.patch('time.time')
|
||||
@mock.patch('uuid.uuid4')
|
||||
def _test_get_tempurl(self, secret, mock_uuid, mock_time):
|
||||
url = "http://swift:8080/v1/AUTH_test"
|
||||
swiftclient = mock.MagicMock(url=url)
|
||||
headers = {}
|
||||
if secret:
|
||||
headers['x-container-meta-temp-url-key'] = secret
|
||||
swiftclient.head_container.return_value = headers
|
||||
|
||||
mock_uuid.return_value = '1-2-3-4'
|
||||
mock_time.return_value = 1500000000
|
||||
|
||||
tempurl = swift_utils.get_temp_url(swiftclient,
|
||||
"container", "obj")
|
||||
|
||||
expected = "%s/container/obj?temp_url_sig=%s&temp_url_expires=%d" % (
|
||||
url, "ea8fdc57e2b2b1fbb7210bddd40029a7c8d5e2ed", 1500086400)
|
||||
self.assertEqual(expected, tempurl)
|
||||
|
||||
if not secret:
|
||||
swiftclient.put_container.assert_called_with(
|
||||
'container', {'X-Container-Meta-Temp-Url-Key': '1-2-3-4'})
|
||||
|
||||
def test_get_tempurl(self):
|
||||
# temp-url-key already set on the container
|
||||
self._test_get_tempurl('1-2-3-4')
|
||||
|
||||
def test_get_tempurl_no_key(self):
|
||||
# temp-url-key not yet set
|
||||
self._test_get_tempurl(None)
|
|
@ -1,614 +0,0 @@
|
|||
# Copyright 2016 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from unittest import mock
|
||||
|
||||
import jinja2
|
||||
import yaml
|
||||
|
||||
from heatclient import exc as heat_exc
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.tests import base
|
||||
from tripleo_common.utils import template as template_utils
|
||||
|
||||
JINJA_SNIPPET = r"""
|
||||
# Jinja loop for Role in role_data.yaml
|
||||
{% for role in roles %}
|
||||
# Resources generated for {{role.name}} Role
|
||||
{{role.name}}ServiceChain:
|
||||
type: OS::TripleO::Services
|
||||
properties:
|
||||
Services:
|
||||
get_param: {{role.name}}Services
|
||||
ServiceNetMap: {get_attr: [ServiceNetMap, service_net_map]}
|
||||
EndpointMap: {get_attr: [EndpointMap, endpoint_map]}
|
||||
DefaultPasswords: {get_attr: [DefaultPasswords, passwords]}
|
||||
{% endfor %}"""
|
||||
|
||||
JINJA_RAISE_CONFIG = r"""
|
||||
# Jinja raise extension
|
||||
{{ raise('MESSAGE') }}
|
||||
"""
|
||||
|
||||
ROLE_DATA_YAML = r"""
|
||||
-
|
||||
name: CustomRole
|
||||
"""
|
||||
|
||||
NETWORK_DATA_YAML = r"""
|
||||
-
|
||||
name: InternalApi
|
||||
"""
|
||||
|
||||
EXPECTED_JINJA_RESULT = r"""
|
||||
# Jinja loop for Role in role_data.yaml
|
||||
|
||||
# Resources generated for CustomRole Role
|
||||
CustomRoleServiceChain:
|
||||
type: OS::TripleO::Services
|
||||
properties:
|
||||
Services:
|
||||
get_param: CustomRoleServices
|
||||
ServiceNetMap: {get_attr: [ServiceNetMap, service_net_map]}
|
||||
EndpointMap: {get_attr: [EndpointMap, endpoint_map]}
|
||||
DefaultPasswords: {get_attr: [DefaultPasswords, passwords]}
|
||||
"""
|
||||
|
||||
JINJA_SNIPPET_CONFIG = r"""
|
||||
outputs:
|
||||
OS::stack_id:
|
||||
description: The software config which runs puppet on the {{role}} role
|
||||
value: {get_resource: {{role}}PuppetConfigImpl}"""
|
||||
|
||||
J2_EXCLUDES = r"""
|
||||
name:
|
||||
- puppet/controller-role.yaml
|
||||
"""
|
||||
|
||||
J2_EXCLUDES_EMPTY_LIST = r"""
|
||||
name:
|
||||
"""
|
||||
|
||||
J2_EXCLUDES_EMPTY_FILE = r"""
|
||||
"""
|
||||
|
||||
ROLE_DATA_ENABLE_NETWORKS = r"""
|
||||
- name: RoleWithNetworks
|
||||
networks:
|
||||
- InternalApi
|
||||
"""
|
||||
|
||||
JINJA_SNIPPET_ROLE_NETWORKS = r"""
|
||||
{%- for network in networks %}
|
||||
{%- if network.name in role.networks%}
|
||||
{{network.name}}Port:
|
||||
type: {{role.name}}::{{network.name}}::Port
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
"""
|
||||
|
||||
EXPECTED_JINJA_RESULT_ROLE_NETWORKS = r"""
|
||||
InternalApiPort:
|
||||
type: RoleWithNetworks::InternalApi::Port
|
||||
"""
|
||||
|
||||
|
||||
class J2SwiftLoaderTest(base.TestCase):
|
||||
@staticmethod
|
||||
def _setup_swift():
|
||||
def return_multiple_files(*args):
|
||||
if args[1] == 'bar/foo.yaml':
|
||||
return ['', 'I am foo']
|
||||
raise swiftexceptions.ClientException('not found')
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files)
|
||||
return swift
|
||||
|
||||
def test_include_absolute_path(self):
|
||||
j2_loader = template_utils.J2SwiftLoader(self._setup_swift(), None, '')
|
||||
template = jinja2.Environment(loader=j2_loader).from_string(
|
||||
r'''
|
||||
Included this:
|
||||
{% include 'bar/foo.yaml' %}
|
||||
''')
|
||||
self.assertEqual(
|
||||
template.render(),
|
||||
'''
|
||||
Included this:
|
||||
I am foo
|
||||
''')
|
||||
|
||||
def test_include_search_path(self):
|
||||
j2_loader = template_utils.J2SwiftLoader(self._setup_swift(),
|
||||
None, 'bar')
|
||||
template = jinja2.Environment(loader=j2_loader).from_string(
|
||||
r'''
|
||||
Included this:
|
||||
{% include 'foo.yaml' %}
|
||||
''')
|
||||
self.assertEqual(
|
||||
template.render(),
|
||||
'''
|
||||
Included this:
|
||||
I am foo
|
||||
''')
|
||||
|
||||
def test_include_not_found(self):
|
||||
j2_loader = template_utils.J2SwiftLoader(self._setup_swift(), None, '')
|
||||
template = jinja2.Environment(loader=j2_loader).from_string(
|
||||
r'''
|
||||
Included this:
|
||||
{% include 'bar.yaml' %}
|
||||
''')
|
||||
self.assertRaises(
|
||||
jinja2.exceptions.TemplateNotFound,
|
||||
template.render)
|
||||
|
||||
def test_include_invalid_path(self):
|
||||
j2_loader = template_utils.J2SwiftLoader(self._setup_swift(),
|
||||
'bar', '')
|
||||
template = jinja2.Environment(loader=j2_loader).from_string(
|
||||
r'''
|
||||
Included this:
|
||||
{% include '../foo.yaml' %}
|
||||
''')
|
||||
self.assertRaises(
|
||||
jinja2.exceptions.TemplateNotFound,
|
||||
template.render)
|
||||
|
||||
|
||||
class ProcessTemplatesTest(base.TestCase):
|
||||
|
||||
@mock.patch('heatclient.common.template_utils.'
|
||||
'process_multiple_environments_and_files')
|
||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||
def test_process_template(self, mock_get_template_contents,
|
||||
mock_process_multiple_environments_and_files):
|
||||
|
||||
swift = mock.MagicMock(url="http://test.com")
|
||||
heat = mock.MagicMock()
|
||||
mock_env = yaml.safe_dump({
|
||||
'temp_environment': 'temp_environment',
|
||||
'template': 'template',
|
||||
'environments': [{u'path': u'environments/test.yaml'}]
|
||||
}, default_flow_style=False)
|
||||
swift.get_object.side_effect = (
|
||||
({}, mock_env),
|
||||
swiftexceptions.ClientException('atest2')
|
||||
)
|
||||
mock_get_template_contents.return_value = ({}, {
|
||||
'heat_template_version': '2016-04-30'
|
||||
})
|
||||
mock_process_multiple_environments_and_files.return_value = ({}, {})
|
||||
|
||||
# Test
|
||||
processed_data = template_utils.process_templates(
|
||||
swift, heat, constants.DEFAULT_CONTAINER_NAME)
|
||||
|
||||
# Verify the values we get out
|
||||
self.assertEqual(processed_data, {
|
||||
'environment': {},
|
||||
'files': {},
|
||||
'stack_name': constants.DEFAULT_CONTAINER_NAME,
|
||||
'template': {
|
||||
'heat_template_version': '2016-04-30'
|
||||
}
|
||||
})
|
||||
|
||||
def _custom_roles_mock_objclient(self, snippet_name, snippet_content,
|
||||
role_data=None):
|
||||
|
||||
def return_multiple_files(*args):
|
||||
if args[1] == constants.OVERCLOUD_J2_NAME:
|
||||
return ['', JINJA_SNIPPET]
|
||||
if args[1] == snippet_name:
|
||||
return ['', snippet_content]
|
||||
if args[1] == constants.OVERCLOUD_J2_EXCLUDES:
|
||||
return ['', J2_EXCLUDES]
|
||||
if args[1] == constants.OVERCLOUD_J2_ROLES_NAME:
|
||||
return ['', role_data or ROLE_DATA_YAML]
|
||||
if args[1] == constants.OVERCLOUD_J2_NETWORKS_NAME:
|
||||
return ['', NETWORK_DATA_YAML]
|
||||
|
||||
def return_container_files(*args):
|
||||
return ('headers', [
|
||||
{'name': constants.OVERCLOUD_J2_NAME},
|
||||
{'name': snippet_name},
|
||||
{'name': constants.OVERCLOUD_J2_ROLES_NAME},
|
||||
{'name': constants.OVERCLOUD_J2_NETWORKS_NAME}])
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files)
|
||||
swift.get_container = mock.MagicMock(
|
||||
side_effect=return_container_files)
|
||||
return swift
|
||||
|
||||
@mock.patch('tripleo_common.utils.template.heat_resource_exists')
|
||||
def test_process_custom_roles(self, resource_exists_mock):
|
||||
resource_exists_mock.return_value = False
|
||||
swift = self._custom_roles_mock_objclient(
|
||||
'foo.j2.yaml', JINJA_SNIPPET)
|
||||
heat = mock.MagicMock()
|
||||
|
||||
# Test
|
||||
template_utils.process_custom_roles(
|
||||
swift, heat, constants.DEFAULT_CONTAINER_NAME)
|
||||
|
||||
get_object_mock_calls = [
|
||||
mock.call('overcloud', constants.OVERCLOUD_J2_NAME),
|
||||
mock.call('overcloud', constants.OVERCLOUD_J2_ROLES_NAME),
|
||||
mock.call('overcloud', 'foo.j2.yaml'),
|
||||
]
|
||||
swift.get_object.assert_has_calls(
|
||||
get_object_mock_calls, any_order=True)
|
||||
|
||||
put_object_mock_calls = [
|
||||
mock.call(constants.DEFAULT_CONTAINER_NAME,
|
||||
constants.OVERCLOUD_YAML_NAME,
|
||||
EXPECTED_JINJA_RESULT),
|
||||
mock.call(constants.DEFAULT_CONTAINER_NAME,
|
||||
'foo.yaml',
|
||||
EXPECTED_JINJA_RESULT),
|
||||
]
|
||||
swift.put_object.assert_has_calls(
|
||||
put_object_mock_calls, any_order=True)
|
||||
|
||||
@mock.patch('tripleo_common.utils.template.heat_resource_exists')
|
||||
def test_custom_roles_networks(self, resource_exists_mock):
|
||||
heat = mock.MagicMock()
|
||||
resource_exists_mock.return_value = False
|
||||
swift = self._custom_roles_mock_objclient(
|
||||
'role-networks.role.j2.yaml', JINJA_SNIPPET_ROLE_NETWORKS,
|
||||
ROLE_DATA_ENABLE_NETWORKS)
|
||||
|
||||
# Test
|
||||
template_utils.process_custom_roles(
|
||||
swift, heat, constants.DEFAULT_CONTAINER_NAME)
|
||||
|
||||
expected = EXPECTED_JINJA_RESULT.replace(
|
||||
'CustomRole', 'RoleWithNetworks')
|
||||
put_object_mock_call = mock.call(
|
||||
constants.DEFAULT_CONTAINER_NAME,
|
||||
'overcloud.yaml',
|
||||
expected)
|
||||
self.assertEqual(swift.put_object.call_args_list[0],
|
||||
put_object_mock_call)
|
||||
|
||||
put_object_mock_call = mock.call(
|
||||
constants.DEFAULT_CONTAINER_NAME,
|
||||
"rolewithnetworks-role-networks.yaml",
|
||||
EXPECTED_JINJA_RESULT_ROLE_NETWORKS)
|
||||
self.assertEqual(put_object_mock_call,
|
||||
swift.put_object.call_args_list[1])
|
||||
|
||||
def test_j2_render_and_put(self):
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object = mock.MagicMock()
|
||||
swift.get_container = mock.MagicMock()
|
||||
|
||||
# Test
|
||||
template_utils.j2_render_and_put(
|
||||
swift,
|
||||
JINJA_SNIPPET_CONFIG, {'role': 'CustomRole'},
|
||||
'customrole-config.yaml',
|
||||
constants.DEFAULT_CONTAINER_NAME)
|
||||
|
||||
result = swift.put_object._mock_mock_calls[0]
|
||||
|
||||
self.assertTrue("CustomRole" in str(result))
|
||||
|
||||
def test_j2_render_and_put_include(self):
|
||||
|
||||
def return_multiple_files(*args):
|
||||
if args[1] == 'foo.yaml':
|
||||
return ['', JINJA_SNIPPET_CONFIG]
|
||||
|
||||
def return_container_files(*args):
|
||||
return ('headers', [{'name': 'foo.yaml'}])
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files)
|
||||
swift.get_container = mock.MagicMock(
|
||||
side_effect=return_container_files)
|
||||
|
||||
# Test
|
||||
template_utils.j2_render_and_put(
|
||||
swift,
|
||||
r"{% include 'foo.yaml' %}",
|
||||
{'role': 'CustomRole'},
|
||||
'customrole-config.yaml',
|
||||
constants.DEFAULT_CONTAINER_NAME)
|
||||
|
||||
result = swift.put_object._mock_mock_calls[0]
|
||||
|
||||
self.assertTrue("CustomRole" in str(result))
|
||||
|
||||
def test_j2_render_and_put_include_relative(self):
|
||||
|
||||
def return_multiple_files(*args):
|
||||
if args[1] == 'bar/foo.yaml':
|
||||
return ['', JINJA_SNIPPET_CONFIG]
|
||||
|
||||
def return_container_files(*args):
|
||||
return ('headers', [{'name': 'bar/foo.yaml'}])
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files)
|
||||
swift.get_container = mock.MagicMock(
|
||||
side_effect=return_container_files)
|
||||
|
||||
# Test
|
||||
template_utils.j2_render_and_put(
|
||||
swift,
|
||||
r"{% include 'bar/foo.yaml' %}",
|
||||
{'role': 'CustomRole'},
|
||||
'customrole-config.yaml',
|
||||
constants.DEFAULT_CONTAINER_NAME)
|
||||
|
||||
result = swift.put_object._mock_mock_calls[0]
|
||||
|
||||
self.assertTrue("CustomRole" in str(result))
|
||||
|
||||
def test_get_j2_excludes_file(self):
|
||||
|
||||
swift = mock.MagicMock()
|
||||
|
||||
def return_multiple_files(*args):
|
||||
if args[1] == constants.OVERCLOUD_J2_EXCLUDES:
|
||||
return ['', J2_EXCLUDES]
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files)
|
||||
# Test - J2 exclude file with valid templates
|
||||
self.assertTrue({'name': ['puppet/controller-role.yaml']} ==
|
||||
template_utils.get_j2_excludes_file(swift))
|
||||
|
||||
def return_multiple_files2(*args):
|
||||
if args[1] == constants.OVERCLOUD_J2_EXCLUDES:
|
||||
return ['', J2_EXCLUDES_EMPTY_LIST]
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files2)
|
||||
# Test - J2 exclude file with no template to exlude
|
||||
self.assertTrue(
|
||||
{'name': []} == template_utils.get_j2_excludes_file(swift))
|
||||
|
||||
def return_multiple_files3(*args):
|
||||
if args[1] == constants.OVERCLOUD_J2_EXCLUDES:
|
||||
return ['', J2_EXCLUDES_EMPTY_FILE]
|
||||
swift.get_object = mock.MagicMock(side_effect=return_multiple_files3)
|
||||
# Test - J2 exclude file empty
|
||||
self.assertTrue(
|
||||
{'name': []} == template_utils.get_j2_excludes_file(swift))
|
||||
|
||||
def test_j2_render_and_put_raise_in_template(self):
|
||||
|
||||
# setup swift
|
||||
swift = mock.MagicMock()
|
||||
swift.get_object = mock.MagicMock()
|
||||
swift.get_container = mock.MagicMock()
|
||||
|
||||
# Test
|
||||
args = (swift, JINJA_RAISE_CONFIG, {'role': 'CustomRole'},
|
||||
'customrole-config.yaml', constants.DEFAULT_CONTAINER_NAME)
|
||||
self.assertRaises(RuntimeError,
|
||||
template_utils.j2_render_and_put,
|
||||
*args)
|
||||
|
||||
def test_heat_resource_exists(self):
|
||||
heat_client = mock.MagicMock()
|
||||
stack = mock.MagicMock(stack_name='overcloud')
|
||||
heat_client.resources.get.return_value = mock.MagicMock(
|
||||
links=[{'rel': 'stack',
|
||||
'href': 'http://192.0.2.1:8004/v1/'
|
||||
'a959ac7d6a4a475daf2428df315c41ef/'
|
||||
'stacks/overcloud/123'}],
|
||||
logical_resource_id='logical_id',
|
||||
physical_resource_id='resource_id',
|
||||
resource_type='OS::Heat::ResourceGroup',
|
||||
resource_name='InternalApiNetwork'
|
||||
)
|
||||
self.assertTrue(
|
||||
template_utils.heat_resource_exists(
|
||||
heat_client, stack, 'Networks', 'InternalNetwork'))
|
||||
|
||||
def test_no_heat_resource_exists(self):
|
||||
heat_client = mock.MagicMock()
|
||||
stack = mock.MagicMock(stack_name='overcloud')
|
||||
|
||||
def return_not_found(*args):
|
||||
raise heat_exc.HTTPNotFound()
|
||||
|
||||
heat_client.resources.get.side_effect = return_not_found
|
||||
self.assertFalse(
|
||||
template_utils.heat_resource_exists(
|
||||
heat_client, stack, 'Networks', 'InternalNetwork'))
|
||||
|
||||
@mock.patch('tripleo_common.utils.template.heat_resource_exists')
|
||||
@mock.patch('tripleo_common.utils.template.j2_render_and_put')
|
||||
def test_legacy_api_network_exists(self, j2_mock,
|
||||
resource_exists_mock):
|
||||
resource_exists_mock.return_value = True
|
||||
swift = self._custom_roles_mock_objclient(
|
||||
'role-networks.role.j2.yaml', JINJA_SNIPPET_ROLE_NETWORKS,
|
||||
ROLE_DATA_ENABLE_NETWORKS)
|
||||
heat = mock.MagicMock()
|
||||
|
||||
# Test
|
||||
template_utils.process_custom_roles(swift, heat)
|
||||
expected_j2_template = swift.get_object(
|
||||
constants.DEFAULT_CONTAINER_NAME, 'network_data.yaml')[1]
|
||||
expected_j2_data = {'roles': [{'name': 'CustomRole'}],
|
||||
'networks': [{'name': 'InternalApi',
|
||||
'compat_name': 'Internal'}]
|
||||
}
|
||||
assert j2_mock.called_with(expected_j2_template, expected_j2_data,
|
||||
'foo.yaml')
|
||||
|
||||
@mock.patch('tripleo_common.utils.template.heat_resource_exists')
|
||||
@mock.patch('tripleo_common.utils.template.j2_render_and_put')
|
||||
def test_no_legacy_api_network_exists(self, j2_mock,
|
||||
resource_exists_mock):
|
||||
resource_exists_mock.return_value = False
|
||||
swift = self._custom_roles_mock_objclient(
|
||||
'role-networks.role.j2.yaml', JINJA_SNIPPET_ROLE_NETWORKS,
|
||||
ROLE_DATA_ENABLE_NETWORKS)
|
||||
heat = mock.MagicMock()
|
||||
|
||||
# Test
|
||||
template_utils.process_custom_roles(swift, heat)
|
||||
expected_j2_template = swift.get_object(
|
||||
constants.DEFAULT_CONTAINER_NAME, 'network_data.yaml')[1]
|
||||
expected_j2_data = {'roles': [{'name': 'CustomRole'}],
|
||||
'networks': [{'name': 'InternalApi'}]
|
||||
}
|
||||
assert j2_mock.called_with(expected_j2_template, expected_j2_data,
|
||||
'foo.yaml')
|
||||
|
||||
def test_prune_unused_services(self):
|
||||
resource_registry = {
|
||||
'OS::TripleO::Services::Foo': 'bar.yaml',
|
||||
'OS::TripleO::Services::Baz': 'OS::Heat::None',
|
||||
}
|
||||
swift = mock.MagicMock()
|
||||
mock_put = mock.MagicMock()
|
||||
swift.put_object = mock_put
|
||||
test_role_data = [{
|
||||
'name': 'Controller',
|
||||
'ServicesDefault': [
|
||||
'OS::TripleO::Services::Foo',
|
||||
'OS::TripleO::Services::Baz']
|
||||
}]
|
||||
|
||||
test_role_data_result = [{
|
||||
'name': 'Controller',
|
||||
'ServicesDefault': [
|
||||
'OS::TripleO::Services::Foo']
|
||||
}]
|
||||
|
||||
template_utils.prune_unused_services(swift, test_role_data,
|
||||
resource_registry, 'overcloud')
|
||||
|
||||
data = yaml.safe_dump(test_role_data_result, default_flow_style=False)
|
||||
mock_put.assert_called_once_with('overcloud', 'roles_data.yaml', data)
|
||||
|
||||
def test_prune_unused_services_no_removal(self):
|
||||
resource_registry = {
|
||||
'OS::TripleO::Services::Foo': 'bar.yaml',
|
||||
'OS::TripleO::Services::Baz': 'biz.yaml',
|
||||
}
|
||||
swift = mock.MagicMock()
|
||||
mock_put = mock.MagicMock()
|
||||
swift.put_object = mock_put
|
||||
test_role_data = [{
|
||||
'name': 'Controller',
|
||||
'ServicesDefault': [
|
||||
'OS::TripleO::Services::Foo',
|
||||
'OS::TripleO::Services::Baz']
|
||||
}]
|
||||
|
||||
template_utils.prune_unused_services(swift, test_role_data,
|
||||
resource_registry, 'overcloud')
|
||||
mock_put.assert_not_called()
|
||||
|
||||
@mock.patch.object(template_utils, 'LOG', autospec=True)
|
||||
def test__set_tags_based_on_role_name(self, mock_log):
|
||||
role_data = [
|
||||
{'name': 'CephStorageFoo'},
|
||||
{'name': 'CephStorageBar', 'tags': ['ceph', 'storage']},
|
||||
{'name': 'ObjectStorageFoo'},
|
||||
{'name': 'ObjectStorageBar', 'tags': ['storage']},
|
||||
{'name': 'BlockStorageFoo'},
|
||||
{'name': 'BlockStorageFoo', 'tags': ['storage']},
|
||||
{'name': 'ComputeOvsDpdkFoo'},
|
||||
{'name': 'ComputeOvsDpdkBar', 'tags': ['ovsdpdk']},
|
||||
]
|
||||
template_utils._set_tags_based_on_role_name(role_data)
|
||||
expected = [
|
||||
{'name': 'CephStorageFoo', 'tags': ['ceph', 'storage']},
|
||||
{'name': 'CephStorageBar', 'tags': ['ceph', 'storage']},
|
||||
{'name': 'ObjectStorageFoo', 'tags': ['storage']},
|
||||
{'name': 'ObjectStorageBar', 'tags': ['storage']},
|
||||
{'name': 'BlockStorageFoo', 'tags': ['storage']},
|
||||
{'name': 'BlockStorageFoo', 'tags': ['storage']},
|
||||
{'name': 'ComputeOvsDpdkFoo', 'tags': ['compute', 'ovsdpdk']},
|
||||
{'name': 'ComputeOvsDpdkBar', 'tags': ['ovsdpdk', 'compute']},
|
||||
]
|
||||
self.assertEqual(expected, role_data)
|
||||
mock_log.assert_has_calls([
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'ceph' tag "
|
||||
"detected, the tag was added automatically. Please add the "
|
||||
"'ceph' tag in roles data. The function to automatically "
|
||||
"add tags based on role name will be removed in the next "
|
||||
"release.", "CephStorageFoo"),
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'storage' "
|
||||
"tag detected, the tag was added automatically. Please add "
|
||||
"the 'storage' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be removed in "
|
||||
"the next release.", "CephStorageFoo"),
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'storage' "
|
||||
"tag detected, the tag was added automatically. Please add "
|
||||
"the 'storage' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be "
|
||||
"removed in the next release.", "ObjectStorageFoo"),
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'storage' tag "
|
||||
"detected, the tag was added automatically. Please add "
|
||||
"the 'storage' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be removed "
|
||||
"in the next release.", "BlockStorageFoo"),
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'compute' "
|
||||
"tag detected, the tag was added automatically. Please add "
|
||||
"the 'compute' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be removed in "
|
||||
"the next release.", "ComputeOvsDpdkFoo"),
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'ovsdpdk' "
|
||||
"tag detected, the tag was added automatically. Please add "
|
||||
"the 'ovsdpdk' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be removed in "
|
||||
"the next release.", "ComputeOvsDpdkFoo"),
|
||||
mock.call.warning(
|
||||
"DEPRECATED: Role '%s' without the 'compute' "
|
||||
"tag detected, the tag was added automatically. Please add "
|
||||
"the 'compute' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be removed in "
|
||||
"the next release.", "ComputeOvsDpdkBar"),
|
||||
])
|
||||
|
||||
|
||||
class UploadTemplatesTest(base.TestCase):
|
||||
|
||||
@mock.patch('tempfile.NamedTemporaryFile')
|
||||
@mock.patch('tripleo_common.utils.tarball.'
|
||||
'tarball_extract_to_swift_container')
|
||||
@mock.patch('tripleo_common.utils.tarball.create_tarball')
|
||||
def test_upload_templates(self, mock_create_tar,
|
||||
mock_extract_tar, tempfile):
|
||||
tempfile.return_value.__enter__.return_value.name = "test"
|
||||
|
||||
swift = mock.MagicMock()
|
||||
template_utils.upload_templates_as_tarball(
|
||||
swift, container='tar-container')
|
||||
mock_create_tar.assert_called_once_with(
|
||||
constants.DEFAULT_TEMPLATES_PATH, 'test')
|
||||
mock_extract_tar.assert_called_once_with(
|
||||
swift, 'test', 'tar-container')
|
|
@ -14,12 +14,10 @@
|
|||
# under the License.
|
||||
|
||||
from six import iteritems
|
||||
import yaml
|
||||
|
||||
from heatclient.common import template_utils
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.utils import swift as swiftutils
|
||||
|
||||
|
||||
def add_breakpoints_cleanup_into_env(env):
|
||||
|
@ -75,23 +73,8 @@ def check_neutron_mechanism_drivers(env, stack, plan_client, container):
|
|||
if configured_drivers:
|
||||
new_driver = get_exclusive_neutron_driver(configured_drivers)
|
||||
else:
|
||||
try:
|
||||
# TODO(beagles): we need to look for a better way to
|
||||
# get the current template default value. This is fragile
|
||||
# with respect to changing filenames, etc.
|
||||
ml2_tmpl = swiftutils.get_object_string(
|
||||
plan_client,
|
||||
container,
|
||||
'puppet/services/neutron-plugin-ml2.yaml')
|
||||
ml2_def = yaml.safe_load(ml2_tmpl)
|
||||
default_drivers = ml2_def.get(
|
||||
'parameters', {}).get(driver_key, {}).get('default')
|
||||
new_driver = get_exclusive_neutron_driver(default_drivers)
|
||||
except Exception:
|
||||
# NOTE: we restructured t-h-t in stein, if this happens we
|
||||
# assume neutron-plugin-ml2.yaml has been moved and
|
||||
# thus set the most recent default (OVN)
|
||||
new_driver = 'ovn'
|
||||
# thus set the most recent default (OVN)
|
||||
new_driver = 'ovn'
|
||||
if current_driver and new_driver and current_driver != new_driver:
|
||||
msg = ("Unable to switch from {} to {} neutron "
|
||||
"mechanism drivers on upgrade. Please consult the "
|
||||
|
|
|
@ -14,224 +14,18 @@
|
|||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
import sys
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
from heatclient.common import template_utils
|
||||
from heatclient import exc as heat_exc
|
||||
import six
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.image import kolla_builder
|
||||
from tripleo_common.utils import passwords as password_utils
|
||||
from tripleo_common.utils import swift as swiftutils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def update_in_env(swift, env, key, value='', delete_key=False):
|
||||
"""Update plan environment."""
|
||||
if delete_key:
|
||||
try:
|
||||
del env[key]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
env[key].update(value)
|
||||
except (KeyError, AttributeError):
|
||||
env[key] = value
|
||||
|
||||
put_env(swift, env)
|
||||
return env
|
||||
|
||||
|
||||
def get_env(swift, name):
|
||||
"""Get plan environment from Swift and convert it to a dictionary."""
|
||||
env = yaml.safe_load(
|
||||
swiftutils.get_object_string(swift, name, constants.PLAN_ENVIRONMENT)
|
||||
)
|
||||
|
||||
# Ensure the name is correct, as it will be used to update the
|
||||
# container later
|
||||
if env.get('name') != name:
|
||||
env['name'] = name
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def put_env(swift, env):
|
||||
"""Convert given environment to yaml and upload it to Swift."""
|
||||
swiftutils.put_object_string(
|
||||
swift,
|
||||
env['name'],
|
||||
constants.PLAN_ENVIRONMENT,
|
||||
yaml.safe_dump(env, default_flow_style=False)
|
||||
)
|
||||
|
||||
|
||||
def get_user_env(swift, container_name):
|
||||
"""Get user environment from Swift convert it to a dictionary."""
|
||||
return yaml.safe_load(
|
||||
swiftutils.get_object_string(swift, container_name,
|
||||
constants.USER_ENVIRONMENT))
|
||||
|
||||
|
||||
def put_user_env(swift, container_name, env):
|
||||
"""Convert given user environment to yaml and upload it to Swift."""
|
||||
swiftutils.put_object_string(
|
||||
swift,
|
||||
container_name,
|
||||
constants.USER_ENVIRONMENT,
|
||||
yaml.safe_dump(env, default_flow_style=False)
|
||||
)
|
||||
|
||||
|
||||
def write_json_temp_file(data):
|
||||
"""Writes the provided data to a json file and return the filename"""
|
||||
with tempfile.NamedTemporaryFile(delete=False, mode='wb') as temp_file:
|
||||
temp_file.write(json.dumps(data).encode('utf-8'))
|
||||
return temp_file.name
|
||||
|
||||
|
||||
def object_request(method, url, token):
|
||||
"""Fetch an object with the provided token"""
|
||||
response = requests.request(
|
||||
method, url, headers={'X-Auth-Token': token})
|
||||
response.raise_for_status()
|
||||
return response.content
|
||||
|
||||
|
||||
def process_environments_and_files(swift, env_paths):
|
||||
"""Wrap process_multiple_environments_and_files with swift object fetch"""
|
||||
def _env_path_is_object(env_path):
|
||||
return env_path.startswith(swift.url)
|
||||
|
||||
# XXX this should belong in heatclient, but for the time being and backport
|
||||
# purposes, let's do that here for now.
|
||||
_cache = {}
|
||||
|
||||
def _object_request(method, url, token=swift.token):
|
||||
if url not in _cache:
|
||||
_cache[url] = object_request(method, url, token)
|
||||
return _cache[url]
|
||||
|
||||
return template_utils.process_multiple_environments_and_files(
|
||||
env_paths=env_paths,
|
||||
env_path_is_object=_env_path_is_object,
|
||||
object_request=_object_request)
|
||||
|
||||
|
||||
def get_template_contents(swift, template_object):
|
||||
"""Wrap get_template_contents with swift object fetch"""
|
||||
def _object_request(method, url, token=swift.token):
|
||||
return object_request(method, url, token)
|
||||
|
||||
return template_utils.get_template_contents(
|
||||
template_object=template_object,
|
||||
object_request=_object_request)
|
||||
|
||||
|
||||
def build_env_paths(swift, container, plan_env):
|
||||
environments = plan_env.get('environments', [])
|
||||
env_paths = []
|
||||
temp_env_paths = []
|
||||
|
||||
for env in environments:
|
||||
if env.get('path'):
|
||||
env_paths.append(os.path.join(swift.url, container, env['path']))
|
||||
elif env.get('data'):
|
||||
env_file = write_json_temp_file(env['data'])
|
||||
temp_env_paths.append(env_file)
|
||||
|
||||
# create a dict to hold all user set params and merge
|
||||
# them in the appropriate order
|
||||
merged_params = {}
|
||||
# merge generated passwords into params first
|
||||
passwords = plan_env.get('passwords', {})
|
||||
merged_params.update(passwords)
|
||||
|
||||
# derived parameters are merged before 'parameter defaults'
|
||||
# so that user-specified values can override the derived values.
|
||||
derived_params = plan_env.get('derived_parameters', {})
|
||||
merged_params.update(derived_params)
|
||||
|
||||
# handle user set parameter values next in case a user has set
|
||||
# a new value for a password parameter
|
||||
params = plan_env.get('parameter_defaults', {})
|
||||
merged_params = template_utils.deep_update(merged_params, params)
|
||||
|
||||
if merged_params:
|
||||
env_temp_file = write_json_temp_file(
|
||||
{'parameter_defaults': merged_params})
|
||||
temp_env_paths.append(env_temp_file)
|
||||
|
||||
registry = plan_env.get('resource_registry', {})
|
||||
if registry:
|
||||
env_temp_file = write_json_temp_file(
|
||||
{'resource_registry': registry})
|
||||
temp_env_paths.append(env_temp_file)
|
||||
|
||||
env_paths.extend(temp_env_paths)
|
||||
return env_paths, temp_env_paths
|
||||
|
||||
|
||||
def create_plan_container(swift, plan_name):
|
||||
if not pattern_validator(constants.PLAN_NAME_PATTERN, plan_name):
|
||||
message = ("The plan name must "
|
||||
"only contain letters, numbers or dashes")
|
||||
raise RuntimeError(message)
|
||||
|
||||
# checks to see if a container with that name exists
|
||||
if plan_name in [container["name"] for container in
|
||||
swift.get_account()[1]]:
|
||||
message = ("A container with the name %s already "
|
||||
"exists.") % plan_name
|
||||
raise RuntimeError(message)
|
||||
default_container_headers = {constants.TRIPLEO_META_USAGE_KEY: 'plan'}
|
||||
swift.put_container(plan_name, headers=default_container_headers)
|
||||
|
||||
|
||||
def update_plan_environment(swift, environments,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
env = get_env(swift, container)
|
||||
for k, v in environments.items():
|
||||
found = False
|
||||
if {'path': k} in env['environments']:
|
||||
found = True
|
||||
if v:
|
||||
if not found:
|
||||
env['environments'].append({'path': k})
|
||||
else:
|
||||
if found:
|
||||
env['environments'].remove({'path': k})
|
||||
|
||||
put_env(swift, env)
|
||||
return env
|
||||
|
||||
|
||||
def get_role_data(swift, container=constants.DEFAULT_CONTAINER_NAME):
|
||||
try:
|
||||
j2_role_file = swiftutils.get_object_string(
|
||||
swift,
|
||||
container,
|
||||
constants.OVERCLOUD_J2_ROLES_NAME)
|
||||
role_data = yaml.safe_load(j2_role_file)
|
||||
except swiftexceptions.ClientException:
|
||||
LOG.info("No %s file found, not filtering container images by role",
|
||||
constants.OVERCLOUD_J2_ROLES_NAME)
|
||||
role_data = None
|
||||
return role_data
|
||||
|
||||
|
||||
def default_image_params():
|
||||
|
||||
|
@ -253,65 +47,6 @@ def default_image_params():
|
|||
return params
|
||||
|
||||
|
||||
def update_plan_environment_with_image_parameters(
|
||||
swift, container=constants.DEFAULT_CONTAINER_NAME,
|
||||
with_roledata=True):
|
||||
try:
|
||||
# ensure every image parameter has a default value, even if prepare
|
||||
# didn't return it
|
||||
params = default_image_params()
|
||||
|
||||
if with_roledata:
|
||||
plan_env = get_env(swift, container)
|
||||
env_paths, temp_env_paths = build_env_paths(
|
||||
swift, container, plan_env)
|
||||
env_files, env = process_environments_and_files(
|
||||
swift, env_paths)
|
||||
|
||||
role_data = get_role_data(swift)
|
||||
image_params = kolla_builder.container_images_prepare_multi(
|
||||
env, role_data, dry_run=True)
|
||||
if image_params:
|
||||
params.update(image_params)
|
||||
|
||||
except Exception as err:
|
||||
LOG.exception("Error occurred while updating plan files.")
|
||||
raise RuntimeError(six.text_type(err))
|
||||
finally:
|
||||
# cleanup any local temp files
|
||||
if with_roledata:
|
||||
for f in temp_env_paths:
|
||||
os.remove(f)
|
||||
|
||||
try:
|
||||
swiftutils.put_object_string(
|
||||
swift,
|
||||
container,
|
||||
constants.CONTAINER_DEFAULTS_ENVIRONMENT,
|
||||
yaml.safe_dump(
|
||||
{'parameter_defaults': params},
|
||||
default_flow_style=False
|
||||
)
|
||||
)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error updating %s for plan %s: %s" % (
|
||||
constants.CONTAINER_DEFAULTS_ENVIRONMENT, container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
environments = {constants.CONTAINER_DEFAULTS_ENVIRONMENT: True}
|
||||
|
||||
try:
|
||||
env = update_plan_environment(swift, environments,
|
||||
container=container)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error updating environment for plan %s: %s" % (
|
||||
container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
return env
|
||||
|
||||
|
||||
def generate_passwords(swift=None, heat=None,
|
||||
container=constants.DEFAULT_CONTAINER_NAME,
|
||||
rotate_passwords=False, rotate_pw_list=None):
|
||||
|
@ -373,36 +108,22 @@ def generate_passwords(swift=None, heat=None,
|
|||
return passwords
|
||||
|
||||
|
||||
def update_plan_rotate_fernet_keys(swift,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
def rotate_fernet_keys(heat,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
try:
|
||||
env = get_env(swift, container)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error retrieving environment for plan %s: %s" % (
|
||||
container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
stack_env = heat.stacks.environment(
|
||||
stack_id=container)
|
||||
except heat_exc.HTTPNotFound:
|
||||
stack_env = None
|
||||
|
||||
parameter_defaults = env.get('parameter_defaults', {})
|
||||
passwords = get_overriden_passwords(env.get(
|
||||
'passwords', {}), parameter_defaults)
|
||||
parameter_defaults = stack_env.get('parameter_defaults', {})
|
||||
passwords = get_overriden_passwords({}, parameter_defaults)
|
||||
|
||||
next_index = get_next_index(passwords['KeystoneFernetKeys'])
|
||||
keys_map = rotate_keys(passwords['KeystoneFernetKeys'],
|
||||
next_index)
|
||||
max_keys = get_max_keys_value(parameter_defaults)
|
||||
keys_map = purge_excess_keys(max_keys, keys_map)
|
||||
|
||||
env['passwords']['KeystoneFernetKeys'] = keys_map
|
||||
|
||||
try:
|
||||
put_env(swift, env)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = "Error uploading to container: %s" % err
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
return keys_map
|
||||
return purge_excess_keys(max_keys, keys_map)
|
||||
|
||||
|
||||
def get_overriden_passwords(env_passwords, parameter_defaults):
|
||||
|
@ -454,10 +175,3 @@ def purge_excess_keys(max_keys, keys_map):
|
|||
for key_path in key_paths[1:keys_to_be_purged + 1]:
|
||||
del keys_map[key_path]
|
||||
return keys_map
|
||||
|
||||
|
||||
def pattern_validator(pattern, value):
|
||||
LOG.debug('Validating %s with pattern %s', value, pattern)
|
||||
if not re.match(pattern, value):
|
||||
return False
|
||||
return True
|
||||
|
|
|
@ -16,16 +16,8 @@
|
|||
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from heatclient import exc as heat_exc
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common import update
|
||||
from tripleo_common.utils import plan as plan_utils
|
||||
from tripleo_common.utils import template as templates
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
@ -73,151 +65,6 @@ def _flat_it(flattened, name, data):
|
|||
return {key: value}
|
||||
|
||||
|
||||
def validate_stack_and_flatten_parameters(heat, processed_data, env):
|
||||
params = env.get('parameter_defaults')
|
||||
fields = {
|
||||
'template': processed_data['template'],
|
||||
'files': processed_data['files'],
|
||||
'environment': processed_data['environment'],
|
||||
'show_nested': True
|
||||
}
|
||||
|
||||
processed_data = {
|
||||
'heat_resource_tree': heat.stacks.validate(**fields),
|
||||
'environment_parameters': params,
|
||||
}
|
||||
|
||||
if processed_data['heat_resource_tree']:
|
||||
flattened = {'resources': {}, 'parameters': {}}
|
||||
_flat_it(flattened, 'Root',
|
||||
processed_data['heat_resource_tree'])
|
||||
processed_data['heat_resource_tree'] = flattened
|
||||
return processed_data
|
||||
|
||||
|
||||
def deploy_stack(swift, heat, container, skip_deploy_identifier=False,
|
||||
timeout_mins=240):
|
||||
try:
|
||||
stack = heat.stacks.get(container, resolve_outputs=False)
|
||||
except heat_exc.HTTPNotFound:
|
||||
stack = None
|
||||
|
||||
stack_is_new = stack is None
|
||||
|
||||
# update StackAction, DeployIdentifier
|
||||
|
||||
parameters = dict()
|
||||
if not skip_deploy_identifier:
|
||||
parameters['DeployIdentifier'] = int(time.time())
|
||||
else:
|
||||
parameters['DeployIdentifier'] = ''
|
||||
|
||||
parameters['StackAction'] = 'CREATE' if stack_is_new else 'UPDATE'
|
||||
|
||||
try:
|
||||
env = plan_utils.get_env(swift, container)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error retrieving environment for plan %s: %s" % (
|
||||
container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
set_tls_parameters(parameters, env)
|
||||
try:
|
||||
plan_utils.update_in_env(swift, env, 'parameter_defaults',
|
||||
parameters)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error updating environment for plan %s: %s" % (
|
||||
container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
if not stack_is_new:
|
||||
try:
|
||||
LOG.debug('Checking for compatible neutron mechanism drivers')
|
||||
msg = update.check_neutron_mechanism_drivers(env, stack,
|
||||
swift,
|
||||
container)
|
||||
if msg:
|
||||
raise RuntimeError(msg)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error getting template %s: %s" % (
|
||||
container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
# process all plan files and create or update a stack
|
||||
processed_data = templates.process_templates(
|
||||
swift, heat, container=container,
|
||||
prune_services=True
|
||||
)
|
||||
stack_args = processed_data.copy()
|
||||
stack_args['timeout_mins'] = timeout_mins
|
||||
|
||||
if stack_is_new:
|
||||
try:
|
||||
swift.copy_object(
|
||||
"%s-swift-rings" % container, "swift-rings.tar.gz",
|
||||
"%s-swift-rings/%s-%d" % (
|
||||
container, "swift-rings.tar.gz", time.time()))
|
||||
swift.delete_object(
|
||||
"%s-swift-rings" % container, "swift-rings.tar.gz")
|
||||
except swiftexceptions.ClientException:
|
||||
pass
|
||||
LOG.info("Perfoming Heat stack create")
|
||||
try:
|
||||
return heat.stacks.create(**stack_args)
|
||||
except heat_exc.HTTPException as err:
|
||||
err_msg = "Error during stack creation: %s" % (err,)
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
LOG.info("Performing Heat stack update")
|
||||
stack_args['existing'] = 'true'
|
||||
try:
|
||||
return heat.stacks.update(stack.id, **stack_args)
|
||||
except heat_exc.HTTPException as err:
|
||||
err_msg = "Error during stack update: %s" % (err,)
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(err_msg)
|
||||
|
||||
|
||||
def set_tls_parameters(parameters, env,
|
||||
local_ca_path=constants.LOCAL_CACERT_PATH):
|
||||
|
||||
def get_camap():
|
||||
return env['parameter_defaults'].get('CAMap', {})
|
||||
|
||||
def get_updated_camap_entry(entry_name, cacert, orig_camap):
|
||||
ca_map_entry = {
|
||||
entry_name: {
|
||||
'content': cacert
|
||||
}
|
||||
}
|
||||
orig_camap.update(ca_map_entry)
|
||||
return orig_camap
|
||||
|
||||
cacert_string = get_local_cacert(local_ca_path)
|
||||
if cacert_string:
|
||||
parameters['CAMap'] = get_updated_camap_entry(
|
||||
'undercloud-ca', cacert_string, get_camap())
|
||||
|
||||
|
||||
def get_local_cacert(local_ca_path):
|
||||
# Since the undercloud has TLS by default, we'll add the undercloud's
|
||||
# CA to be trusted by the overcloud.
|
||||
try:
|
||||
with open(local_ca_path, 'rb') as ca_file:
|
||||
return ca_file.read().decode('utf-8')
|
||||
except IOError:
|
||||
# If the file wasn't found it means that the undercloud's TLS
|
||||
# was explicitly disabled or another CA is being used. So we'll
|
||||
# let the user handle this.
|
||||
return None
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
|
||||
def preview_stack_and_network_configs(heat, processed_data,
|
||||
container, role_name):
|
||||
# stacks.preview method raises validation message if stack is
|
||||
|
|
|
@ -1,214 +0,0 @@
|
|||
# Copyright 2016 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import dateutil.parser
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
import six
|
||||
from six.moves import urllib
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
from swiftclient.service import SwiftError
|
||||
from swiftclient.service import SwiftUploadObject
|
||||
from swiftclient.utils import generate_temp_url
|
||||
|
||||
from tripleo_common.utils import tarball
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def empty_container(swiftclient, name):
|
||||
container_names = [container["name"] for container
|
||||
in swiftclient.get_account()[1]]
|
||||
|
||||
if name in container_names:
|
||||
headers, objects = swiftclient.get_container(name)
|
||||
# FIXME(rbrady): remove delete_object loop when
|
||||
# LP#1615830 is fixed. See LP#1615825 for more info.
|
||||
# delete files from plan
|
||||
for o in objects:
|
||||
swiftclient.delete_object(name, o['name'])
|
||||
else:
|
||||
error_text = "The {name} container does not exist.".format(name=name)
|
||||
raise ValueError(error_text)
|
||||
|
||||
|
||||
def delete_container(swiftclient, name):
|
||||
try:
|
||||
empty_container(swiftclient, name)
|
||||
swiftclient.delete_container(name)
|
||||
except ValueError as e:
|
||||
# ValueError is raised when we can't find the container, which means
|
||||
# that it's already deleted.
|
||||
LOG.info(six.text_type(e))
|
||||
|
||||
|
||||
def download_container(swiftclient, container, dest,
|
||||
overwrite_only_newer=False):
|
||||
"""Download the contents of a Swift container to a directory"""
|
||||
|
||||
objects = swiftclient.get_container(container)[1]
|
||||
|
||||
for obj in objects:
|
||||
is_newer = False
|
||||
filename = obj['name']
|
||||
contents = swiftclient.get_object(container, filename)[1]
|
||||
try:
|
||||
contents = contents.encode('utf-8')
|
||||
except (UnicodeDecodeError, AttributeError):
|
||||
pass
|
||||
path = os.path.join(dest, filename)
|
||||
dirname = os.path.dirname(path)
|
||||
already_exists = os.path.exists(path)
|
||||
|
||||
if already_exists:
|
||||
last_modified = obj.get('last_modified', None)
|
||||
|
||||
if last_modified is not None:
|
||||
last_mod_swift = int(dateutil.parser.parse(
|
||||
obj['last_modified']).strftime('%s'))
|
||||
|
||||
last_mod_disk = int(os.path.getmtime(path))
|
||||
|
||||
if last_mod_swift > last_mod_disk:
|
||||
is_newer = True
|
||||
|
||||
# write file if `overwrite_only_newer` is not set,
|
||||
# or if file does not exist at destination,
|
||||
# or if we found a newer file at source
|
||||
if (not overwrite_only_newer or
|
||||
not already_exists or
|
||||
(overwrite_only_newer and is_newer)):
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
|
||||
# open in binary as the swift client returns error
|
||||
# under python3 if opened as text
|
||||
with open(path, 'wb') as f:
|
||||
f.write(contents)
|
||||
|
||||
|
||||
def create_container(swiftclient, container):
|
||||
# If the container already exists, it will return 202 and everything will
|
||||
# work
|
||||
swiftclient.put_container(container)
|
||||
|
||||
|
||||
def create_and_upload_tarball(swiftservice,
|
||||
tmp_dir,
|
||||
container,
|
||||
tarball_name,
|
||||
tarball_options='-czf',
|
||||
delete_after=3600,
|
||||
segment_size=1048576000,
|
||||
use_slo=True,
|
||||
segment_container=None,
|
||||
leave_segments=False,
|
||||
changed=None,
|
||||
skip_identical=False,
|
||||
fail_fast=True,
|
||||
dir_marker=False):
|
||||
"""Create a tarball containing the tmp_dir and upload it to Swift.
|
||||
|
||||
This method allows to upload files bigger than 5GB.
|
||||
It will create 2 swift containers to store the segments and
|
||||
one container to reference the manifest with the segment pointers
|
||||
"""
|
||||
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile() as tmp_tarball:
|
||||
tarball.create_tarball(tmp_dir,
|
||||
tmp_tarball.name,
|
||||
tarball_options)
|
||||
objs = [SwiftUploadObject(tmp_tarball,
|
||||
object_name=tarball_name)]
|
||||
options = {'meta': [],
|
||||
'header': ['X-Delete-After: ' + str(delete_after)],
|
||||
'segment_size': segment_size,
|
||||
'use_slo': use_slo,
|
||||
'segment_container': segment_container,
|
||||
'leave_segments': leave_segments,
|
||||
'changed': changed,
|
||||
'skip_identical': skip_identical,
|
||||
'fail_fast': fail_fast,
|
||||
'dir_marker': dir_marker
|
||||
}
|
||||
|
||||
for r in swiftservice.upload(container,
|
||||
objs,
|
||||
options=options):
|
||||
if r['success']:
|
||||
if 'object' in r:
|
||||
LOG.info(r['object'])
|
||||
elif 'for_object' in r:
|
||||
LOG.info(
|
||||
'%s segment %s',
|
||||
r['for_object'],
|
||||
r['segment_index']
|
||||
)
|
||||
else:
|
||||
error = r['error']
|
||||
if r['action'] == "create_container":
|
||||
LOG.warning(
|
||||
'Warning: failed to create container '
|
||||
"'%s'%s", container, error
|
||||
)
|
||||
elif r['action'] == "upload_object":
|
||||
LOG.error(
|
||||
"Failed to upload object %s to container %s: %s",
|
||||
container, r['object'], error
|
||||
)
|
||||
else:
|
||||
LOG.error("%s", error)
|
||||
except SwiftError as e:
|
||||
LOG.error(e.value)
|
||||
|
||||
|
||||
def get_object_string(swift, container, object_name):
|
||||
"""Get the object contents as a string """
|
||||
data = swift.get_object(container, object_name)[1]
|
||||
try:
|
||||
return data.decode('utf-8')
|
||||
except AttributeError:
|
||||
return data
|
||||
|
||||
|
||||
def put_object_string(swift, container, object_name, contents):
|
||||
"""Put the object contents as a string """
|
||||
try:
|
||||
contents = contents.decode('utf-8')
|
||||
except AttributeError:
|
||||
pass
|
||||
return swift.put_object(container, object_name, contents)
|
||||
|
||||
|
||||
def get_temp_url(swift, container, object_name, method='GET', valid='86400'):
|
||||
try:
|
||||
cont_stat = swift.head_container(container)
|
||||
except swiftexceptions.ClientException:
|
||||
cont_stat = {}
|
||||
|
||||
key = cont_stat.get('x-container-meta-temp-url-key')
|
||||
if not key:
|
||||
key = str(uuid.uuid4())
|
||||
cont_stat = swift.put_container(
|
||||
container, {'X-Container-Meta-Temp-Url-Key': key})
|
||||
parsed = urllib.parse.urlparse(swift.url)
|
||||
path = "%s/%s/%s" % (parsed.path, container, object_name)
|
||||
temp_path = generate_temp_url(path, valid, key, method)
|
||||
return "%s://%s%s" % (parsed.scheme, parsed.netloc, temp_path)
|
|
@ -1,59 +0,0 @@
|
|||
# Copyright 2016 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import logging
|
||||
import os
|
||||
|
||||
from oslo_concurrency import processutils
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
DEFAULT_TARBALL_EXCLUDES = ['.git', '.tox', '*.pyc', '*.pyo']
|
||||
|
||||
|
||||
def create_tarball(directory, filename, options='-czf',
|
||||
excludes=DEFAULT_TARBALL_EXCLUDES):
|
||||
"""Create a tarball of a directory."""
|
||||
LOG.debug('Creating tarball of %s at location %s', directory, filename)
|
||||
cmd = ['/usr/bin/tar', '-C', directory, options, filename]
|
||||
for x in excludes:
|
||||
cmd.extend(['--exclude', x])
|
||||
cmd.extend(['.'])
|
||||
processutils.execute(*cmd)
|
||||
|
||||
|
||||
def tarball_extract_to_swift_container(object_client, filename, container):
|
||||
LOG.debug('Uploading filename %s to Swift container %s',
|
||||
filename, container)
|
||||
with open(filename, 'rb') as f:
|
||||
object_client.put_object(
|
||||
container=container,
|
||||
obj='',
|
||||
contents=f,
|
||||
query_string='extract-archive=tar.gz',
|
||||
headers={'X-Detect-Content-Type': 'true'}
|
||||
)
|
||||
|
||||
|
||||
def extract_tarball(directory, tarball, options='-xf', remove=False):
|
||||
"""Extracts the tarball contained in the directory."""
|
||||
full_path = directory + '/' + tarball
|
||||
if not os.path.exists(full_path):
|
||||
LOG.debug('Tarball %s does not exist', full_path)
|
||||
else:
|
||||
LOG.debug('Extracting tarball %s', full_path)
|
||||
cmd = ['/usr/bin/tar', '-C', directory, options, full_path]
|
||||
processutils.execute(*cmd)
|
||||
if remove:
|
||||
LOG.debug('Removing tarball %s', full_path)
|
||||
os.remove(full_path)
|
|
@ -1,478 +0,0 @@
|
|||
# Copyright 2016 Red Hat, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
import jinja2
|
||||
import logging
|
||||
import os
|
||||
import six
|
||||
import tempfile
|
||||
import yaml
|
||||
|
||||
from heatclient import exc as heat_exc
|
||||
from swiftclient import exceptions as swiftexceptions
|
||||
|
||||
from tripleo_common import constants
|
||||
from tripleo_common.utils import parameters
|
||||
from tripleo_common.utils import plan as plan_utils
|
||||
from tripleo_common.utils import swift as swiftutils
|
||||
from tripleo_common.utils import tarball
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class J2SwiftLoader(jinja2.BaseLoader):
|
||||
"""Jinja2 loader to fetch included files from swift
|
||||
|
||||
This attempts to fetch a template include file from the given container.
|
||||
An optional search path or list of search paths can be provided. By default
|
||||
only the absolute path relative to the container root is searched.
|
||||
"""
|
||||
|
||||
def __init__(self, swift, container, searchpath):
|
||||
self.swift = swift
|
||||
self.container = container
|
||||
self.searchpath = [searchpath]
|
||||
# Always search the absolute path from the root of the swift container
|
||||
if '' not in self.searchpath:
|
||||
self.searchpath.append('')
|
||||
|
||||
def get_source(self, environment, template):
|
||||
pieces = jinja2.loaders.split_template_path(template)
|
||||
for searchpath in self.searchpath:
|
||||
template_path = os.path.join(searchpath, *pieces)
|
||||
try:
|
||||
source = swiftutils.get_object_string(self.swift,
|
||||
self.container,
|
||||
template_path)
|
||||
return source, None, False
|
||||
except swiftexceptions.ClientException:
|
||||
pass
|
||||
raise jinja2.exceptions.TemplateNotFound(template)
|
||||
|
||||
|
||||
def j2_render_and_put(swift, j2_template, j2_data, yaml_f,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
|
||||
def raise_helper(msg):
|
||||
raise jinja2.exceptions.TemplateError(msg)
|
||||
|
||||
# Search for templates relative to the current template path first
|
||||
template_base = os.path.dirname(yaml_f)
|
||||
j2_loader = J2SwiftLoader(swift, container, template_base)
|
||||
|
||||
try:
|
||||
# Render the j2 template
|
||||
jinja2_env = jinja2.Environment(loader=j2_loader)
|
||||
jinja2_env.globals['raise'] = raise_helper
|
||||
template = jinja2_env.from_string(j2_template)
|
||||
r_template = template.render(**j2_data)
|
||||
except jinja2.exceptions.TemplateError as ex:
|
||||
error_msg = ("Error rendering template %s : %s"
|
||||
% (yaml_f, six.text_type(ex)))
|
||||
LOG.error(error_msg)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
try:
|
||||
# write the template back to the plan container
|
||||
LOG.info("Writing rendered template %s", yaml_f)
|
||||
swiftutils.put_object_string(swift, container, yaml_f,
|
||||
r_template)
|
||||
except swiftexceptions.ClientException:
|
||||
error_msg = ("Error storing file %s in container %s",
|
||||
yaml_f, container)
|
||||
LOG.error(error_msg)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
|
||||
def get_j2_excludes_file(swift, container=constants.DEFAULT_CONTAINER_NAME):
|
||||
try:
|
||||
j2_excl_file = swiftutils.get_object_string(
|
||||
swift, container, constants.OVERCLOUD_J2_EXCLUDES)
|
||||
j2_excl_data = yaml.safe_load(j2_excl_file)
|
||||
if (j2_excl_data is None or j2_excl_data.get('name') is None):
|
||||
j2_excl_data = {"name": []}
|
||||
LOG.info("j2_excludes.yaml is either empty or there are "
|
||||
"no templates to exclude, defaulting the J2 "
|
||||
"excludes list to: %s", j2_excl_data)
|
||||
except swiftexceptions.ClientException:
|
||||
j2_excl_data = {"name": []}
|
||||
LOG.info("No J2 exclude file found, defaulting "
|
||||
"the J2 excludes list to: %s", j2_excl_data)
|
||||
return j2_excl_data
|
||||
|
||||
|
||||
def heat_resource_exists(heat, stack, nested_stack_name, resource_name):
|
||||
if stack is None:
|
||||
LOG.debug("Resource does not exist because stack does not exist")
|
||||
return False
|
||||
|
||||
try:
|
||||
nested_stack = heat.resources.get(stack.id, nested_stack_name)
|
||||
except heat_exc.HTTPNotFound:
|
||||
LOG.debug(
|
||||
"Resource does not exist because %s stack does "
|
||||
"not exist", nested_stack_name)
|
||||
return False
|
||||
|
||||
try:
|
||||
heat.resources.get(nested_stack.physical_resource_id,
|
||||
resource_name)
|
||||
except heat_exc.HTTPNotFound:
|
||||
LOG.debug("Resource does not exist: %s", resource_name)
|
||||
return False
|
||||
else:
|
||||
LOG.debug("Resource exists: %s", resource_name)
|
||||
return True
|
||||
|
||||
|
||||
def _set_tags_based_on_role_name(role_data):
|
||||
for role in role_data:
|
||||
role['tags'] = role.get('tags', [])
|
||||
role_name = role.get('name', str())
|
||||
|
||||
if ((role_name.startswith('Compute') or role_name.startswith('HciCeph')
|
||||
or role_name.startswith('DistributedCompute'))
|
||||
and 'compute' not in role['tags']):
|
||||
role['tags'].append('compute')
|
||||
LOG.warning("DEPRECATED: Role '%s' without the 'compute' tag "
|
||||
"detected, the tag was added automatically. Please "
|
||||
"add the 'compute' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be "
|
||||
"removed in the next release.", role_name)
|
||||
if role_name.startswith('Ceph') and 'ceph' not in role['tags']:
|
||||
role['tags'].append('ceph')
|
||||
LOG.warning("DEPRECATED: Role '%s' without the 'ceph' tag "
|
||||
"detected, the tag was added automatically. Please "
|
||||
"add the 'ceph' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be "
|
||||
"removed in the next release.", role_name)
|
||||
if (role_name.startswith('ComputeOvsDpdk')
|
||||
and 'ovsdpdk' not in role['tags']):
|
||||
role['tags'].append('ovsdpdk')
|
||||
LOG.warning("DEPRECATED: Role '%s' without the 'ovsdpdk' tag "
|
||||
"detected, the tag was added automatically. Please "
|
||||
"add the 'ovsdpdk' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be "
|
||||
"removed in the next release.", role_name)
|
||||
if ((role_name.startswith('ObjectStorage')
|
||||
or role_name.startswith('BlockStorage')
|
||||
or role_name.startswith('Ceph'))
|
||||
and 'storage' not in role['tags']):
|
||||
role['tags'].append('storage')
|
||||
LOG.warning("DEPRECATED: Role '%s' without the 'storage' tag "
|
||||
"detected, the tag was added automatically. Please "
|
||||
"add the 'storage' tag in roles data. The function to "
|
||||
"automatically add tags based on role name will be "
|
||||
"removed in the next release.", role_name)
|
||||
|
||||
|
||||
def process_custom_roles(swift, heat,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
try:
|
||||
j2_role_file = swiftutils.get_object_string(
|
||||
swift, container, constants.OVERCLOUD_J2_ROLES_NAME)
|
||||
role_data = yaml.safe_load(j2_role_file)
|
||||
except swiftexceptions.ClientException:
|
||||
LOG.info("No %s file found, skipping jinja templating",
|
||||
constants.OVERCLOUD_J2_ROLES_NAME)
|
||||
return
|
||||
|
||||
try:
|
||||
j2_network_file = swiftutils.get_object_string(
|
||||
swift, container, constants.OVERCLOUD_J2_NETWORKS_NAME)
|
||||
network_data = yaml.safe_load(j2_network_file)
|
||||
# Allow no networks defined in network_data
|
||||
if network_data is None:
|
||||
network_data = []
|
||||
# Set internal network index key for each network, network resources
|
||||
# are created with a tag tripleo_net_idx
|
||||
for idx, net in enumerate(network_data):
|
||||
network_data[idx].update({'idx': idx})
|
||||
except swiftexceptions.ClientException:
|
||||
# Until t-h-t contains network_data.yaml we tolerate a missing file
|
||||
LOG.warning("No %s file found, ignoring",
|
||||
constants.OVERCLOUD_J2_ROLES_NAME)
|
||||
network_data = []
|
||||
|
||||
j2_excl_data = get_j2_excludes_file(swift, container)
|
||||
|
||||
try:
|
||||
# Iterate over all files in the plan container
|
||||
# we j2 render any with the .j2.yaml suffix
|
||||
container_files = swift.get_container(container)
|
||||
except swiftexceptions.ClientException as ex:
|
||||
error_msg = ("Error listing contents of container %s : %s",
|
||||
container, six.text_type(ex))
|
||||
LOG.error(error_msg)
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
# TODO(hjensas): In next release remove the function to automatically add
|
||||
# tags based on role name.
|
||||
_set_tags_based_on_role_name(role_data)
|
||||
|
||||
role_names = [r.get('name') for r in role_data]
|
||||
r_map = {}
|
||||
for r in role_data:
|
||||
r_map[r.get('name')] = r
|
||||
excl_templates = j2_excl_data.get('name')
|
||||
|
||||
stack = None
|
||||
try:
|
||||
stack = heat.stacks.get(container, resolve_outputs=False)
|
||||
except heat_exc.HTTPNotFound:
|
||||
LOG.debug("Stack does not exist")
|
||||
|
||||
n_map = {}
|
||||
for n in network_data:
|
||||
if n.get('enabled') is not False:
|
||||
n_map[n.get('name')] = n
|
||||
if not n.get('name_lower'):
|
||||
n_map[n.get('name')]['name_lower'] = n.get('name').lower()
|
||||
if n.get('name') == constants.API_NETWORK and 'compat_name' \
|
||||
not in n.keys():
|
||||
# Check to see if legacy named API network exists
|
||||
# and if so we need to set compat_name
|
||||
api_net = "{}Network".format(constants.LEGACY_API_NETWORK)
|
||||
if heat_resource_exists(heat, stack, 'Networks', api_net):
|
||||
n['compat_name'] = 'Internal'
|
||||
LOG.info("Upgrade compatibility enabled for legacy "
|
||||
"network resource Internal.")
|
||||
else:
|
||||
LOG.info("skipping %s network: network is disabled.",
|
||||
n.get('name'))
|
||||
|
||||
for f in [f.get('name') for f in container_files[1]]:
|
||||
# We do three templating passes here:
|
||||
# 1. *.role.j2.yaml - we template just the role name
|
||||
# and create multiple files (one per role)
|
||||
# 2 *.network.j2.yaml - we template the network name and
|
||||
# data and create multiple files for networks and
|
||||
# network ports (one per network)
|
||||
# 3. *.j2.yaml - we template with all roles_data,
|
||||
# and create one file common to all roles
|
||||
if f.endswith('.role.j2.yaml'):
|
||||
LOG.info("jinja2 rendering role template %s", f)
|
||||
j2_template = swiftutils.get_object_string(swift,
|
||||
container, f)
|
||||
LOG.info("jinja2 rendering roles %s", ","
|
||||
.join(role_names))
|
||||
for role in role_names:
|
||||
LOG.info("jinja2 rendering role %s", role)
|
||||
out_f = "-".join(
|
||||
[role.lower(),
|
||||
os.path.basename(f).replace('.role.j2.yaml',
|
||||
'.yaml')])
|
||||
out_f_path = os.path.join(os.path.dirname(f), out_f)
|
||||
if ('network/config' in os.path.dirname(f) and
|
||||
r_map[role].get('deprecated_nic_config_name')):
|
||||
d_name = r_map[role].get('deprecated_nic_config_name')
|
||||
out_f_path = os.path.join(os.path.dirname(f), d_name)
|
||||
elif ('network/config' in os.path.dirname(f)):
|
||||
d_name = "%s.yaml" % role.lower()
|
||||
out_f_path = os.path.join(os.path.dirname(f), d_name)
|
||||
if not (out_f_path in excl_templates):
|
||||
if '{{role.name}}' in j2_template:
|
||||
j2_data = {'role': r_map[role],
|
||||
'networks': network_data}
|
||||
j2_render_and_put(swift, j2_template,
|
||||
j2_data, out_f_path,
|
||||
container)
|
||||
else:
|
||||
# Backwards compatibility with templates
|
||||
# that specify {{role}} vs {{role.name}}
|
||||
j2_data = {'role': role, 'networks': network_data}
|
||||
LOG.debug("role legacy path for role %s", role)
|
||||
j2_render_and_put(swift, j2_template,
|
||||
j2_data, out_f_path,
|
||||
container)
|
||||
else:
|
||||
LOG.info("Skipping rendering of %s, defined in %s",
|
||||
out_f_path, j2_excl_data)
|
||||
|
||||
elif (f.endswith('.network.j2.yaml')):
|
||||
LOG.info("jinja2 rendering network template %s", f)
|
||||
j2_template = swiftutils.get_object_string(swift,
|
||||
container,
|
||||
f)
|
||||
LOG.info("jinja2 rendering networks %s", ",".join(n_map))
|
||||
for network in n_map:
|
||||
j2_data = {'network': n_map[network]}
|
||||
# Output file names in "<name>.yaml" format
|
||||
out_f = os.path.basename(f).replace('.network.j2.yaml',
|
||||
'.yaml')
|
||||
if os.path.dirname(f).endswith('ports'):
|
||||
out_f = out_f.replace('port',
|
||||
n_map[network]['name_lower'])
|
||||
else:
|
||||
out_f = out_f.replace('network',
|
||||
n_map[network]['name_lower'])
|
||||
out_f_path = os.path.join(os.path.dirname(f), out_f)
|
||||
if not (out_f_path in excl_templates):
|
||||
j2_render_and_put(swift, j2_template,
|
||||
j2_data, out_f_path,
|
||||
container)
|
||||
else:
|
||||
LOG.info("Skipping rendering of %s, defined in %s",
|
||||
out_f_path, j2_excl_data)
|
||||
|
||||
elif f.endswith('.j2.yaml'):
|
||||
LOG.info("jinja2 rendering %s", f)
|
||||
j2_template = swiftutils.get_object_string(swift,
|
||||
container,
|
||||
f)
|
||||
j2_data = {'roles': role_data, 'networks': network_data}
|
||||
out_f = f.replace('.j2.yaml', '.yaml')
|
||||
j2_render_and_put(swift, j2_template,
|
||||
j2_data, out_f,
|
||||
container)
|
||||
return role_data
|
||||
|
||||
|
||||
def prune_unused_services(swift, role_data,
|
||||
resource_registry,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
"""Remove unused services from role data
|
||||
|
||||
Finds the unused services in the resource registry and removes them
|
||||
from the role data in the plan so we do not create OS::Heat::None
|
||||
resources.
|
||||
|
||||
:param resource_registry: tripleo resource registry dict
|
||||
:param swift: swift client
|
||||
:param resource_registry: tripleo resource registry dict
|
||||
:returns: true if we updated the roles file. else false
|
||||
"""
|
||||
to_remove = set()
|
||||
for key, value in resource_registry.items():
|
||||
if (key.startswith('OS::TripleO::Services::') and
|
||||
value.startswith('OS::Heat::None')):
|
||||
to_remove.add(key)
|
||||
|
||||
if not to_remove or not role_data:
|
||||
LOG.info('No unused services to prune or no role data')
|
||||
return False
|
||||
|
||||
LOG.info('Removing unused services from role data')
|
||||
for role in role_data:
|
||||
role_name = role.get('name')
|
||||
for service in to_remove:
|
||||
try:
|
||||
role.get('ServicesDefault', []).remove(service)
|
||||
LOG.debug('Removing %s from %s role', service, role_name)
|
||||
except ValueError:
|
||||
pass
|
||||
LOG.debug('Saving updated role data to swift')
|
||||
swift.put_object(container,
|
||||
constants.OVERCLOUD_J2_ROLES_NAME,
|
||||
yaml.safe_dump(role_data,
|
||||
default_flow_style=False))
|
||||
return True
|
||||
|
||||
|
||||
def build_heat_args(swift, heat, container=constants.DEFAULT_CONTAINER_NAME):
|
||||
error_text = None
|
||||
try:
|
||||
plan_env = plan_utils.get_env(swift, container)
|
||||
except swiftexceptions.ClientException as err:
|
||||
err_msg = ("Error retrieving environment for plan %s: %s" % (
|
||||
container, err))
|
||||
LOG.exception(err_msg)
|
||||
raise RuntimeError(error_text)
|
||||
|
||||
try:
|
||||
# if the jinja overcloud template exists, process it and write it
|
||||
# back to the swift container before continuing processing. The
|
||||
# method called below should handle the case where the files are
|
||||
# not found in swift, but if they are found and an exception
|
||||
# occurs during processing, then it will be raised.
|
||||
role_data = process_custom_roles(swift, heat, container)
|
||||
except Exception as err:
|
||||
LOG.exception("Error occurred while processing custom roles.")
|
||||
raise RuntimeError(six.text_type(err))
|
||||
|
||||
template_name = plan_env.get('template', "")
|
||||
|
||||
template_object = os.path.join(swift.url, container,
|
||||
template_name)
|
||||
LOG.debug('Template: %s', template_name)
|
||||
try:
|
||||
template_files, template = plan_utils.get_template_contents(
|
||||
swift, template_object)
|
||||
except Exception as err:
|
||||
error_text = six.text_type(err)
|
||||
LOG.exception("Error occurred while fetching %s", template_object)
|
||||
|
||||
temp_env_paths = []
|
||||
try:
|
||||
env_paths, temp_env_paths = plan_utils.build_env_paths(
|
||||
swift, container, plan_env)
|
||||
env_files, env = plan_utils.process_environments_and_files(
|
||||
swift, env_paths)
|
||||
parameters.convert_docker_params(env)
|
||||
|
||||
except Exception as err:
|
||||
error_text = six.text_type(err)
|
||||
LOG.exception("Error occurred while processing plan files.")
|
||||
finally:
|
||||
# cleanup any local temp files
|
||||
for f in temp_env_paths:
|
||||
os.remove(f)
|
||||
if error_text:
|
||||
raise RuntimeError(six.text_type(error_text))
|
||||
|
||||
heat_args = {
|
||||
'template': template,
|
||||
'template_files': template_files,
|
||||
'env': env,
|
||||
'env_files': env_files
|
||||
}
|
||||
return heat_args, role_data
|
||||
|
||||
|
||||
def process_templates(swift, heat, container=constants.DEFAULT_CONTAINER_NAME,
|
||||
prune_services=False):
|
||||
heat_args, role_data = build_heat_args(swift, heat, container)
|
||||
if prune_services:
|
||||
try:
|
||||
# Prune OS::Heat::None resources
|
||||
resource_reg = heat_args['env'].get('resource_registry', {})
|
||||
roles_updated = prune_unused_services(
|
||||
swift, role_data, resource_reg, container)
|
||||
if roles_updated:
|
||||
heat_args, _ = build_heat_args(swift, heat, container)
|
||||
|
||||
except Exception as err:
|
||||
LOG.exception("Error occurred while prunning prune_services.")
|
||||
raise RuntimeError(six.text_type(err))
|
||||
|
||||
files = dict(list(heat_args['template_files'].items()) + list(
|
||||
heat_args['env_files'].items()))
|
||||
|
||||
return {
|
||||
'stack_name': container,
|
||||
'template': heat_args['template'],
|
||||
'environment': heat_args['env'],
|
||||
'files': files
|
||||
}
|
||||
|
||||
|
||||
def upload_templates_as_tarball(
|
||||
swift, dir_to_upload=constants.DEFAULT_TEMPLATES_PATH,
|
||||
container=constants.DEFAULT_CONTAINER_NAME):
|
||||
with tempfile.NamedTemporaryFile() as tmp_tarball:
|
||||
tarball.create_tarball(dir_to_upload, tmp_tarball.name)
|
||||
tarball.tarball_extract_to_swift_container(
|
||||
swift,
|
||||
tmp_tarball.name,
|
||||
container)
|
Loading…
Reference in New Issue