Remove support for Tuskar
This change removes Tuskar support from tripleo-common. Currently the only consumer of tripleo-common is the python-tripleoclient which is removing Tuskar support in the dependant change ID below. Future work in the client will be with Heat directly and maintaining support for Tuskar in tripleo-common just adds a large maintenance burden since it won't be used. Depends-On: Ic6fd4568054fe91b8fc898728022a7d7ed8cf3cf Change-Id: If8c2ab70edb5325bde2524a9a5d824605cadcc18
This commit is contained in:
parent
fdbff2b7eb
commit
14193ec99b
@ -2,7 +2,7 @@
|
|||||||
tripleo-common
|
tripleo-common
|
||||||
===============================
|
===============================
|
||||||
|
|
||||||
A common library for TripleO CLI and Tuskar UI.
|
A common library for TripleO workflows.
|
||||||
|
|
||||||
* Free software: Apache license
|
* Free software: Apache license
|
||||||
* Documentation: http://docs.openstack.org/developer/tripleo-common
|
* Documentation: http://docs.openstack.org/developer/tripleo-common
|
||||||
|
@ -5,4 +5,3 @@
|
|||||||
pbr>=0.6,!=0.7,<1.0
|
pbr>=0.6,!=0.7,<1.0
|
||||||
Babel>=1.3
|
Babel>=1.3
|
||||||
python-heatclient>=0.3.0
|
python-heatclient>=0.3.0
|
||||||
python-tuskarclient>=0.1.17
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
name = tripleo-common
|
name = tripleo-common
|
||||||
summary = A common library for TripleO CLI and Tuskar UI.
|
summary = A common library for TripleO workflows.
|
||||||
description-file =
|
description-file =
|
||||||
README.rst
|
README.rst
|
||||||
author = OpenStack
|
author = OpenStack
|
||||||
|
@ -1,41 +0,0 @@
|
|||||||
# Copyright 2015 Red Hat, Inc.
|
|
||||||
# All Rights Reserved.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def save_templates(templates):
|
|
||||||
output_dir = tempfile.mkdtemp()
|
|
||||||
|
|
||||||
for template_name, template_content in templates.items():
|
|
||||||
|
|
||||||
# It's possible to organize the role templates and their dependent
|
|
||||||
# files into directories, in which case the template_name will carry
|
|
||||||
# the directory information. If that's the case, first create the
|
|
||||||
# directory structure (if it hasn't already been created by another
|
|
||||||
# file in the templates list).
|
|
||||||
template_dir = os.path.dirname(template_name)
|
|
||||||
output_template_dir = os.path.join(output_dir, template_dir)
|
|
||||||
if template_dir and not os.path.exists(output_template_dir):
|
|
||||||
os.makedirs(output_template_dir)
|
|
||||||
|
|
||||||
filename = os.path.join(output_dir, template_name)
|
|
||||||
with open(filename, 'w+') as template_file:
|
|
||||||
template_file.write(template_content)
|
|
||||||
return output_dir
|
|
@ -13,48 +13,28 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
import collections
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import shutil
|
|
||||||
|
|
||||||
from heatclient.common import template_utils
|
from heatclient.common import template_utils
|
||||||
from tripleo_common import libutils
|
|
||||||
from tuskarclient.common import utils as tuskarutils
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
|
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
|
||||||
REGISTRY_NAME = "overcloud-resource-registry-puppet.yaml"
|
|
||||||
|
|
||||||
|
|
||||||
class ScaleManager(object):
|
class ScaleManager(object):
|
||||||
def __init__(self, heatclient, stack_id, tuskarclient=None, plan_id=None,
|
def __init__(self, heatclient, stack_id, tht_dir=None,
|
||||||
tht_dir=None, environment_files=None):
|
environment_files=None):
|
||||||
self.tuskarclient = tuskarclient
|
|
||||||
self.heatclient = heatclient
|
self.heatclient = heatclient
|
||||||
self.stack_id = stack_id
|
self.stack_id = stack_id
|
||||||
self.tht_dir = tht_dir
|
self.tht_dir = tht_dir
|
||||||
self.environment_files = environment_files
|
self.environment_files = environment_files
|
||||||
if self.tuskarclient:
|
|
||||||
self.plan = tuskarutils.find_resource(self.tuskarclient.plans,
|
|
||||||
plan_id)
|
|
||||||
|
|
||||||
def scaleup(self, role, num):
|
|
||||||
LOG.debug('updating role %s count to %d', role, num)
|
|
||||||
param_name = '{0}::count'.format(role)
|
|
||||||
param = next(x for x in self.plan.parameters if
|
|
||||||
x['name'] == param_name)
|
|
||||||
if num < int(param['value']):
|
|
||||||
raise ValueError("Role %s has already %s nodes, can't set lower "
|
|
||||||
"value" % (role, param['value']))
|
|
||||||
self.plan = self.tuskarclient.plans.patch(
|
|
||||||
self.plan.uuid,
|
|
||||||
[{'name': param_name, 'value': str(num)}])
|
|
||||||
self._update_stack()
|
|
||||||
|
|
||||||
def scaledown(self, instances):
|
def scaledown(self, instances):
|
||||||
resources = self.heatclient.resources.list(self.stack_id,
|
resources = self.heatclient.resources.list(self.stack_id,
|
||||||
nested_depth=5)
|
nested_depth=5)
|
||||||
resources_by_role = {}
|
resources_by_role = collections.defaultdict(list)
|
||||||
instance_list = list(instances)
|
instance_list = list(instances)
|
||||||
for res in resources:
|
for res in resources:
|
||||||
try:
|
try:
|
||||||
@ -69,13 +49,12 @@ class ScaleManager(object):
|
|||||||
# of nova server)
|
# of nova server)
|
||||||
role_resource = next(x for x in resources if
|
role_resource = next(x for x in resources if
|
||||||
x.physical_resource_id == stack_id)
|
x.physical_resource_id == stack_id)
|
||||||
# get tuskar role name from resource_type,
|
# Get the role name which is the resource_type in Heat.
|
||||||
# resource_type is in format like "Tuskar::Compute-1"
|
role = role_resource.resource_type
|
||||||
role = role_resource.resource_type.rsplit('::', 1)[-1]
|
|
||||||
if role not in resources_by_role:
|
|
||||||
resources_by_role[role] = []
|
|
||||||
resources_by_role[role].append(role_resource)
|
resources_by_role[role].append(role_resource)
|
||||||
|
|
||||||
|
resources_by_role = dict(resources_by_role)
|
||||||
|
|
||||||
if instance_list:
|
if instance_list:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Couldn't find following instances in stack %s: %s" %
|
"Couldn't find following instances in stack %s: %s" %
|
||||||
@ -83,26 +62,14 @@ class ScaleManager(object):
|
|||||||
|
|
||||||
# decrease count for each role (or resource group) and set removal
|
# decrease count for each role (or resource group) and set removal
|
||||||
# policy for each resource group
|
# policy for each resource group
|
||||||
if self.tuskarclient:
|
stack_params = self._get_removal_params_from_heat(resources_by_role)
|
||||||
stack_params = self._get_removal_params_from_plan(
|
|
||||||
resources_by_role)
|
|
||||||
else:
|
|
||||||
stack_params = self._get_removal_params_from_heat(
|
|
||||||
resources_by_role)
|
|
||||||
|
|
||||||
self._update_stack(parameters=stack_params)
|
self._update_stack(parameters=stack_params)
|
||||||
|
|
||||||
def _update_stack(self, parameters={}):
|
def _update_stack(self, parameters={}):
|
||||||
if self.tuskarclient:
|
|
||||||
self.tht_dir = libutils.save_templates(
|
|
||||||
self.tuskarclient.plans.templates(self.plan.uuid))
|
|
||||||
tpl_name = 'plan.yaml'
|
|
||||||
else:
|
|
||||||
tpl_name = TEMPLATE_NAME
|
|
||||||
|
|
||||||
try:
|
|
||||||
tpl_files, template = template_utils.get_template_contents(
|
tpl_files, template = template_utils.get_template_contents(
|
||||||
template_file=os.path.join(self.tht_dir, tpl_name))
|
template_file=os.path.join(self.tht_dir, TEMPLATE_NAME))
|
||||||
env_paths = []
|
env_paths = []
|
||||||
if self.environment_files:
|
if self.environment_files:
|
||||||
env_paths.extend(self.environment_files)
|
env_paths.extend(self.environment_files)
|
||||||
@ -121,39 +88,13 @@ class ScaleManager(object):
|
|||||||
|
|
||||||
LOG.debug('stack update params: %s', fields)
|
LOG.debug('stack update params: %s', fields)
|
||||||
self.heatclient.stacks.update(**fields)
|
self.heatclient.stacks.update(**fields)
|
||||||
finally:
|
|
||||||
if self.tuskarclient:
|
|
||||||
if LOG.isEnabledFor(logging.DEBUG):
|
|
||||||
LOG.debug("Tuskar templates saved in %s", self.tht_dir)
|
|
||||||
else:
|
|
||||||
shutil.rmtree(self.tht_dir)
|
|
||||||
|
|
||||||
def _get_removal_params_from_plan(self, resources_by_role):
|
|
||||||
patch_params = []
|
|
||||||
stack_params = {}
|
|
||||||
for role, role_resources in resources_by_role.items():
|
|
||||||
param_name = "{0}::count".format(role)
|
|
||||||
old_count = next(x['value'] for x in self.plan.parameters if
|
|
||||||
x['name'] == param_name)
|
|
||||||
count = max(int(old_count) - len(role_resources), 0)
|
|
||||||
patch_params.append({'name': param_name, 'value': str(count)})
|
|
||||||
# add instance resource names into removal_policies
|
|
||||||
# so heat knows which instances should be removed
|
|
||||||
removal_param = "{0}::removal_policies".format(role)
|
|
||||||
stack_params[removal_param] = [{
|
|
||||||
'resource_list': [r.resource_name for r in role_resources]
|
|
||||||
}]
|
|
||||||
|
|
||||||
LOG.debug('updating plan %s: %s', self.plan.uuid, patch_params)
|
|
||||||
self.plan = self.tuskarclient.plans.patch(self.plan.uuid, patch_params)
|
|
||||||
return stack_params
|
|
||||||
|
|
||||||
def _get_removal_params_from_heat(self, resources_by_role):
|
def _get_removal_params_from_heat(self, resources_by_role):
|
||||||
stack_params = {}
|
stack_params = {}
|
||||||
stack = self.heatclient.stacks.get(self.stack_id)
|
stack = self.heatclient.stacks.get(self.stack_id)
|
||||||
for role, role_resources in resources_by_role.items():
|
for role, role_resources in resources_by_role.items():
|
||||||
param_name = "{0}Count".format(role)
|
param_name = "{0}Count".format(role)
|
||||||
old_count = next(v for k, v in stack.parameters.iteritems() if
|
old_count = next(v for k, v in stack.parameters.items() if
|
||||||
k == param_name)
|
k == param_name)
|
||||||
count = max(int(old_count) - len(role_resources), 0)
|
count = max(int(old_count) - len(role_resources), 0)
|
||||||
stack_params[param_name] = str(count)
|
stack_params[param_name] = str(count)
|
||||||
|
@ -21,18 +21,16 @@ from tripleo_common import scale
|
|||||||
from tripleo_common.tests import base
|
from tripleo_common.tests import base
|
||||||
|
|
||||||
|
|
||||||
def mock_plan():
|
def mock_stack():
|
||||||
plan = mock.Mock()
|
stack = mock.Mock()
|
||||||
plan.uuid = '5'
|
stack.name = 'My Stack'
|
||||||
plan.name = 'My Plan'
|
stack.parameters = {'ComputeCount': '2'}
|
||||||
plan.parameters = []
|
stack.to_dict.return_value = {
|
||||||
plan.parameters.append({'name': 'compute-1::count', 'value': '2'})
|
|
||||||
plan.to_dict.return_value = {
|
|
||||||
'uuid': 5,
|
'uuid': 5,
|
||||||
'name': 'My Plan',
|
'name': 'My Stack',
|
||||||
'parameters': plan.parameters,
|
'parameters': stack.parameters,
|
||||||
}
|
}
|
||||||
return plan
|
return stack
|
||||||
|
|
||||||
|
|
||||||
class ScaleManagerTest(base.TestCase):
|
class ScaleManagerTest(base.TestCase):
|
||||||
@ -40,12 +38,6 @@ class ScaleManagerTest(base.TestCase):
|
|||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(ScaleManagerTest, self).setUp()
|
super(ScaleManagerTest, self).setUp()
|
||||||
self.image = collections.namedtuple('image', ['id'])
|
self.image = collections.namedtuple('image', ['id'])
|
||||||
self.tuskarclient = mock.MagicMock()
|
|
||||||
self.tuskarclient.plans.patch.return_value = mock_plan()
|
|
||||||
self.tuskarclient.plans.templates.return_value = {
|
|
||||||
'plan.yaml': 'template body',
|
|
||||||
'environment.yaml': 'resource_registry: {}\n',
|
|
||||||
}
|
|
||||||
self.heatclient = mock.MagicMock()
|
self.heatclient = mock.MagicMock()
|
||||||
self.heatclient.resources.list.return_value = [
|
self.heatclient.resources.list.return_value = [
|
||||||
mock.MagicMock(
|
mock.MagicMock(
|
||||||
@ -55,7 +47,7 @@ class ScaleManagerTest(base.TestCase):
|
|||||||
'stacks/overcloud/123'}],
|
'stacks/overcloud/123'}],
|
||||||
logical_resource_id='logical_id',
|
logical_resource_id='logical_id',
|
||||||
physical_resource_id='resource_id',
|
physical_resource_id='resource_id',
|
||||||
resource_type='compute-1'
|
resource_type='Compute'
|
||||||
),
|
),
|
||||||
mock.MagicMock(
|
mock.MagicMock(
|
||||||
links=[{'rel': 'stack',
|
links=[{'rel': 'stack',
|
||||||
@ -64,7 +56,7 @@ class ScaleManagerTest(base.TestCase):
|
|||||||
'stacks/overcloud/124'}],
|
'stacks/overcloud/124'}],
|
||||||
logical_resource_id='node0',
|
logical_resource_id='node0',
|
||||||
physical_resource_id='123',
|
physical_resource_id='123',
|
||||||
resource_type='compute-1',
|
resource_type='Compute',
|
||||||
resource_name='node0',
|
resource_name='node0',
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
@ -72,69 +64,27 @@ class ScaleManagerTest(base.TestCase):
|
|||||||
@mock.patch('heatclient.common.template_utils.'
|
@mock.patch('heatclient.common.template_utils.'
|
||||||
'process_multiple_environments_and_files')
|
'process_multiple_environments_and_files')
|
||||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||||
@mock.patch('tripleo_common.libutils.open', create=True)
|
def test_scaledown(self, mock_get_template_contents, mock_env_files):
|
||||||
@mock.patch('tuskarclient.common.utils.find_resource')
|
mock_get_template_contents.return_value = ({}, 'template_body')
|
||||||
def test_scaleup(self, mock_find_resource, mock_open,
|
|
||||||
mock_template_contents, mock_env_files):
|
|
||||||
mock_find_resource.return_value = mock_plan()
|
|
||||||
mock_template_contents.return_value = ({}, 'template body')
|
|
||||||
mock_env_files.return_value = ({}, {})
|
mock_env_files.return_value = ({}, {})
|
||||||
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
|
self.heatclient.stacks.get.return_value = mock_stack()
|
||||||
heatclient=self.heatclient,
|
manager = scale.ScaleManager(heatclient=self.heatclient,
|
||||||
stack_id='stack',
|
stack_id='stack', tht_dir='/tmp/')
|
||||||
plan_id='plan')
|
|
||||||
manager.scaleup(role='compute-1', num=3)
|
|
||||||
self.tuskarclient.plans.patch.assert_called_once_with(
|
|
||||||
'5', [{'name': 'compute-1::count', 'value': '3'}])
|
|
||||||
self.heatclient.stacks.update.assert_called_once_with(
|
|
||||||
stack_id='stack',
|
|
||||||
template='template body',
|
|
||||||
environment={},
|
|
||||||
existing=True,
|
|
||||||
files={},
|
|
||||||
parameters={})
|
|
||||||
mock_env_files.assert_called_once_with(env_paths=[])
|
|
||||||
|
|
||||||
@mock.patch('tuskarclient.common.utils.find_resource')
|
|
||||||
def test_invalid_scaleup(self, mock_find_resource):
|
|
||||||
mock_find_resource.return_value = mock_plan()
|
|
||||||
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
|
|
||||||
heatclient=self.heatclient,
|
|
||||||
stack_id='stack',
|
|
||||||
plan_id='plan')
|
|
||||||
self.assertRaises(ValueError, manager.scaleup, 'compute-1', 1)
|
|
||||||
|
|
||||||
@mock.patch('heatclient.common.template_utils.'
|
|
||||||
'process_multiple_environments_and_files')
|
|
||||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
|
||||||
@mock.patch('tripleo_common.libutils.open', create=True)
|
|
||||||
@mock.patch('tuskarclient.common.utils.find_resource')
|
|
||||||
def test_scaledown(self, mock_find_resource, mock_open,
|
|
||||||
mock_template_contents, mock_env_files):
|
|
||||||
mock_find_resource.return_value = mock_plan()
|
|
||||||
mock_template_contents.return_value = ({}, 'template body')
|
|
||||||
mock_env_files.return_value = ({}, {})
|
|
||||||
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
|
|
||||||
heatclient=self.heatclient,
|
|
||||||
stack_id='stack',
|
|
||||||
plan_id='plan')
|
|
||||||
manager.scaledown(['resource_id'])
|
manager.scaledown(['resource_id'])
|
||||||
self.tuskarclient.plans.patch.assert_called_once_with(
|
|
||||||
'5', [{'name': 'compute-1::count', 'value': '1'}])
|
|
||||||
self.heatclient.stacks.update.assert_called_once_with(
|
self.heatclient.stacks.update.assert_called_once_with(
|
||||||
stack_id='stack',
|
stack_id='stack',
|
||||||
template='template body',
|
template='template_body',
|
||||||
environment={},
|
environment={},
|
||||||
existing=True,
|
existing=True,
|
||||||
files={},
|
files={},
|
||||||
parameters={'compute-1::removal_policies': [
|
parameters={
|
||||||
{'resource_list': ['node0']}]})
|
'ComputeCount': '1',
|
||||||
|
'ComputeRemovalPolicies': [
|
||||||
|
{'resource_list': ['node0']}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
|
||||||
@mock.patch('tuskarclient.common.utils.find_resource')
|
def test_invalid_scaledown(self):
|
||||||
def test_invalid_scaledown(self, mock_find_resource):
|
manager = scale.ScaleManager(heatclient=self.heatclient,
|
||||||
mock_find_resource.return_value = mock_plan()
|
stack_id='stack')
|
||||||
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
|
|
||||||
heatclient=self.heatclient,
|
|
||||||
stack_id='stack',
|
|
||||||
plan_id='plan')
|
|
||||||
self.assertRaises(ValueError, manager.scaledown, 'invalid_resource_id')
|
self.assertRaises(ValueError, manager.scaledown, 'invalid_resource_id')
|
||||||
|
@ -29,38 +29,26 @@ class UpdateManagerTest(base.TestCase):
|
|||||||
@mock.patch('heatclient.common.template_utils.'
|
@mock.patch('heatclient.common.template_utils.'
|
||||||
'process_multiple_environments_and_files')
|
'process_multiple_environments_and_files')
|
||||||
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
@mock.patch('heatclient.common.template_utils.get_template_contents')
|
||||||
@mock.patch('tripleo_common.libutils.open', create=True)
|
def test_update(self, mock_template_contents, mock_env_files, mock_time):
|
||||||
@mock.patch('tuskarclient.common.utils.find_resource')
|
|
||||||
def test_update(self, mock_find_resource, mock_open,
|
|
||||||
mock_template_contents, mock_env_files, mock_time):
|
|
||||||
heatclient = mock.MagicMock()
|
heatclient = mock.MagicMock()
|
||||||
novaclient = mock.MagicMock()
|
novaclient = mock.MagicMock()
|
||||||
tuskarclient = mock.MagicMock()
|
|
||||||
mock_time.return_value = 123.5
|
mock_time.return_value = 123.5
|
||||||
heatclient.stacks.get.return_value = mock.MagicMock(
|
heatclient.stacks.get.return_value = mock.MagicMock(
|
||||||
stack_name='stack', id='stack_id')
|
stack_name='stack', id='stack_id')
|
||||||
mock_find_resource.return_value = mock.MagicMock(
|
|
||||||
uuid='plan',
|
|
||||||
parameters=[{'name': 'Compute-1::UpdateIdentifier', 'value': ''}])
|
|
||||||
mock_template_contents.return_value = ({}, 'template body')
|
mock_template_contents.return_value = ({}, 'template body')
|
||||||
mock_env_files.return_value = ({}, {})
|
mock_env_files.return_value = ({}, {})
|
||||||
tuskarclient.plans.templates.return_value = {
|
|
||||||
'plan.yaml': 'template body',
|
|
||||||
'environment.yaml': 'resource_registry: {}\n',
|
|
||||||
}
|
|
||||||
update.PackageUpdateManager(
|
update.PackageUpdateManager(
|
||||||
heatclient=heatclient,
|
heatclient=heatclient,
|
||||||
novaclient=novaclient,
|
novaclient=novaclient,
|
||||||
stack_id='stack_id',
|
stack_id='stack_id',
|
||||||
tuskarclient=tuskarclient,
|
tht_dir='/tmp/'
|
||||||
plan_id='plan'
|
|
||||||
).update()
|
).update()
|
||||||
params = {
|
params = {
|
||||||
'existing': True,
|
'existing': True,
|
||||||
'stack_id': 'stack_id',
|
'stack_id': 'stack_id',
|
||||||
'template': 'template body',
|
'template': 'template body',
|
||||||
'files': {},
|
'files': {},
|
||||||
'parameters': {'Compute-1::UpdateIdentifier': '123'},
|
'parameters': {'UpdateIdentifier': 123},
|
||||||
'environment': {
|
'environment': {
|
||||||
'resource_registry': {
|
'resource_registry': {
|
||||||
'resources': {
|
'resources': {
|
||||||
|
@ -15,31 +15,21 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from heatclient.common import template_utils
|
from heatclient.common import template_utils
|
||||||
from tripleo_common import libutils
|
|
||||||
from tripleo_common import stack_update
|
from tripleo_common import stack_update
|
||||||
from tuskarclient.common import utils as tuskarutils
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
|
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
|
||||||
REGISTRY_NAME = "overcloud-resource-registry-puppet.yaml"
|
|
||||||
|
|
||||||
|
|
||||||
class PackageUpdateManager(stack_update.StackUpdateManager):
|
class PackageUpdateManager(stack_update.StackUpdateManager):
|
||||||
def __init__(self, heatclient, novaclient, stack_id, tuskarclient=None,
|
def __init__(self, heatclient, novaclient, stack_id,
|
||||||
plan_id=None, tht_dir=None, environment_files=None):
|
tht_dir=None, environment_files=None):
|
||||||
stack = heatclient.stacks.get(stack_id)
|
stack = heatclient.stacks.get(stack_id)
|
||||||
self.tuskarclient = tuskarclient
|
|
||||||
self.plan_id = plan_id
|
|
||||||
self.tht_dir = tht_dir
|
self.tht_dir = tht_dir
|
||||||
self.hook_resource = 'UpdateDeployment'
|
self.hook_resource = 'UpdateDeployment'
|
||||||
if self.tuskarclient:
|
|
||||||
self.plan = tuskarutils.find_resource(self.tuskarclient.plans,
|
|
||||||
self.plan_id)
|
|
||||||
self.environment_files = environment_files
|
self.environment_files = environment_files
|
||||||
super(PackageUpdateManager, self).__init__(
|
super(PackageUpdateManager, self).__init__(
|
||||||
heatclient=heatclient, novaclient=novaclient, stack=stack,
|
heatclient=heatclient, novaclient=novaclient, stack=stack,
|
||||||
@ -47,22 +37,13 @@ class PackageUpdateManager(stack_update.StackUpdateManager):
|
|||||||
hook_resource=self.hook_resource)
|
hook_resource=self.hook_resource)
|
||||||
|
|
||||||
def update(self):
|
def update(self):
|
||||||
# time rounded to seconds, we explicitly convert to string because of
|
# time rounded to seconds
|
||||||
# tuskar
|
timestamp = int(time.time())
|
||||||
timestamp = str(int(time.time()))
|
|
||||||
|
|
||||||
if self.tuskarclient:
|
|
||||||
stack_params = self._set_update_params(timestamp)
|
|
||||||
self.tht_dir = libutils.save_templates(
|
|
||||||
self.tuskarclient.plans.templates(self.plan.uuid))
|
|
||||||
tpl_name = 'plan.yaml'
|
|
||||||
else:
|
|
||||||
tpl_name = TEMPLATE_NAME
|
|
||||||
stack_params = {'UpdateIdentifier': timestamp}
|
stack_params = {'UpdateIdentifier': timestamp}
|
||||||
|
|
||||||
try:
|
|
||||||
tpl_files, template = template_utils.get_template_contents(
|
tpl_files, template = template_utils.get_template_contents(
|
||||||
template_file=os.path.join(self.tht_dir, tpl_name))
|
template_file=os.path.join(self.tht_dir, TEMPLATE_NAME))
|
||||||
env_paths = []
|
env_paths = []
|
||||||
if self.environment_files:
|
if self.environment_files:
|
||||||
env_paths.extend(self.environment_files)
|
env_paths.extend(self.environment_files)
|
||||||
@ -93,17 +74,3 @@ class PackageUpdateManager(stack_update.StackUpdateManager):
|
|||||||
LOG.info('updating stack: %s', self.stack.stack_name)
|
LOG.info('updating stack: %s', self.stack.stack_name)
|
||||||
LOG.debug('stack update params: %s', fields)
|
LOG.debug('stack update params: %s', fields)
|
||||||
self.heatclient.stacks.update(**fields)
|
self.heatclient.stacks.update(**fields)
|
||||||
finally:
|
|
||||||
if self.tuskarclient:
|
|
||||||
if LOG.isEnabledFor(logging.DEBUG):
|
|
||||||
LOG.debug("Tuskar templates saved in %s", self.tht_dir)
|
|
||||||
else:
|
|
||||||
shutil.rmtree(self.tht_dir)
|
|
||||||
|
|
||||||
def _set_update_params(self, timestamp):
|
|
||||||
# set new update timestamp for each role
|
|
||||||
params = {}
|
|
||||||
for param in self.plan.parameters:
|
|
||||||
if re.match(r".*::UpdateIdentifier", param['name']):
|
|
||||||
params[param['name']] = timestamp
|
|
||||||
return params
|
|
||||||
|
Loading…
Reference in New Issue
Block a user