Remove support for Tuskar

This change removes Tuskar support from tripleo-common. Currently the only
consumer of tripleo-common is the python-tripleoclient which is removing
Tuskar support in the dependant change ID below.

Future work in the client will be with Heat directly and maintaining
support for Tuskar in tripleo-common just adds a large maintenance burden
since it won't be used.

Depends-On: Ic6fd4568054fe91b8fc898728022a7d7ed8cf3cf
Change-Id: If8c2ab70edb5325bde2524a9a5d824605cadcc18
This commit is contained in:
Dougal Matthews 2015-09-15 12:14:27 +01:00
parent fdbff2b7eb
commit 14193ec99b
8 changed files with 90 additions and 286 deletions

View File

@ -2,7 +2,7 @@
tripleo-common
===============================
A common library for TripleO CLI and Tuskar UI.
A common library for TripleO workflows.
* Free software: Apache license
* Documentation: http://docs.openstack.org/developer/tripleo-common

View File

@ -5,4 +5,3 @@
pbr>=0.6,!=0.7,<1.0
Babel>=1.3
python-heatclient>=0.3.0
python-tuskarclient>=0.1.17

View File

@ -1,6 +1,6 @@
[metadata]
name = tripleo-common
summary = A common library for TripleO CLI and Tuskar UI.
summary = A common library for TripleO workflows.
description-file =
README.rst
author = OpenStack

View File

@ -1,41 +0,0 @@
# Copyright 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import tempfile
LOG = logging.getLogger(__name__)
def save_templates(templates):
output_dir = tempfile.mkdtemp()
for template_name, template_content in templates.items():
# It's possible to organize the role templates and their dependent
# files into directories, in which case the template_name will carry
# the directory information. If that's the case, first create the
# directory structure (if it hasn't already been created by another
# file in the templates list).
template_dir = os.path.dirname(template_name)
output_template_dir = os.path.join(output_dir, template_dir)
if template_dir and not os.path.exists(output_template_dir):
os.makedirs(output_template_dir)
filename = os.path.join(output_dir, template_name)
with open(filename, 'w+') as template_file:
template_file.write(template_content)
return output_dir

View File

@ -13,48 +13,28 @@
# License for the specific language governing permissions and limitations
# under the License.
import collections
import logging
import os
import shutil
from heatclient.common import template_utils
from tripleo_common import libutils
from tuskarclient.common import utils as tuskarutils
LOG = logging.getLogger(__name__)
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
REGISTRY_NAME = "overcloud-resource-registry-puppet.yaml"
class ScaleManager(object):
def __init__(self, heatclient, stack_id, tuskarclient=None, plan_id=None,
tht_dir=None, environment_files=None):
self.tuskarclient = tuskarclient
def __init__(self, heatclient, stack_id, tht_dir=None,
environment_files=None):
self.heatclient = heatclient
self.stack_id = stack_id
self.tht_dir = tht_dir
self.environment_files = environment_files
if self.tuskarclient:
self.plan = tuskarutils.find_resource(self.tuskarclient.plans,
plan_id)
def scaleup(self, role, num):
LOG.debug('updating role %s count to %d', role, num)
param_name = '{0}::count'.format(role)
param = next(x for x in self.plan.parameters if
x['name'] == param_name)
if num < int(param['value']):
raise ValueError("Role %s has already %s nodes, can't set lower "
"value" % (role, param['value']))
self.plan = self.tuskarclient.plans.patch(
self.plan.uuid,
[{'name': param_name, 'value': str(num)}])
self._update_stack()
def scaledown(self, instances):
resources = self.heatclient.resources.list(self.stack_id,
nested_depth=5)
resources_by_role = {}
resources_by_role = collections.defaultdict(list)
instance_list = list(instances)
for res in resources:
try:
@ -69,13 +49,12 @@ class ScaleManager(object):
# of nova server)
role_resource = next(x for x in resources if
x.physical_resource_id == stack_id)
# get tuskar role name from resource_type,
# resource_type is in format like "Tuskar::Compute-1"
role = role_resource.resource_type.rsplit('::', 1)[-1]
if role not in resources_by_role:
resources_by_role[role] = []
# Get the role name which is the resource_type in Heat.
role = role_resource.resource_type
resources_by_role[role].append(role_resource)
resources_by_role = dict(resources_by_role)
if instance_list:
raise ValueError(
"Couldn't find following instances in stack %s: %s" %
@ -83,26 +62,14 @@ class ScaleManager(object):
# decrease count for each role (or resource group) and set removal
# policy for each resource group
if self.tuskarclient:
stack_params = self._get_removal_params_from_plan(
resources_by_role)
else:
stack_params = self._get_removal_params_from_heat(
resources_by_role)
stack_params = self._get_removal_params_from_heat(resources_by_role)
self._update_stack(parameters=stack_params)
def _update_stack(self, parameters={}):
if self.tuskarclient:
self.tht_dir = libutils.save_templates(
self.tuskarclient.plans.templates(self.plan.uuid))
tpl_name = 'plan.yaml'
else:
tpl_name = TEMPLATE_NAME
try:
tpl_files, template = template_utils.get_template_contents(
template_file=os.path.join(self.tht_dir, tpl_name))
template_file=os.path.join(self.tht_dir, TEMPLATE_NAME))
env_paths = []
if self.environment_files:
env_paths.extend(self.environment_files)
@ -121,39 +88,13 @@ class ScaleManager(object):
LOG.debug('stack update params: %s', fields)
self.heatclient.stacks.update(**fields)
finally:
if self.tuskarclient:
if LOG.isEnabledFor(logging.DEBUG):
LOG.debug("Tuskar templates saved in %s", self.tht_dir)
else:
shutil.rmtree(self.tht_dir)
def _get_removal_params_from_plan(self, resources_by_role):
patch_params = []
stack_params = {}
for role, role_resources in resources_by_role.items():
param_name = "{0}::count".format(role)
old_count = next(x['value'] for x in self.plan.parameters if
x['name'] == param_name)
count = max(int(old_count) - len(role_resources), 0)
patch_params.append({'name': param_name, 'value': str(count)})
# add instance resource names into removal_policies
# so heat knows which instances should be removed
removal_param = "{0}::removal_policies".format(role)
stack_params[removal_param] = [{
'resource_list': [r.resource_name for r in role_resources]
}]
LOG.debug('updating plan %s: %s', self.plan.uuid, patch_params)
self.plan = self.tuskarclient.plans.patch(self.plan.uuid, patch_params)
return stack_params
def _get_removal_params_from_heat(self, resources_by_role):
stack_params = {}
stack = self.heatclient.stacks.get(self.stack_id)
for role, role_resources in resources_by_role.items():
param_name = "{0}Count".format(role)
old_count = next(v for k, v in stack.parameters.iteritems() if
old_count = next(v for k, v in stack.parameters.items() if
k == param_name)
count = max(int(old_count) - len(role_resources), 0)
stack_params[param_name] = str(count)

View File

@ -21,18 +21,16 @@ from tripleo_common import scale
from tripleo_common.tests import base
def mock_plan():
plan = mock.Mock()
plan.uuid = '5'
plan.name = 'My Plan'
plan.parameters = []
plan.parameters.append({'name': 'compute-1::count', 'value': '2'})
plan.to_dict.return_value = {
def mock_stack():
stack = mock.Mock()
stack.name = 'My Stack'
stack.parameters = {'ComputeCount': '2'}
stack.to_dict.return_value = {
'uuid': 5,
'name': 'My Plan',
'parameters': plan.parameters,
'name': 'My Stack',
'parameters': stack.parameters,
}
return plan
return stack
class ScaleManagerTest(base.TestCase):
@ -40,12 +38,6 @@ class ScaleManagerTest(base.TestCase):
def setUp(self):
super(ScaleManagerTest, self).setUp()
self.image = collections.namedtuple('image', ['id'])
self.tuskarclient = mock.MagicMock()
self.tuskarclient.plans.patch.return_value = mock_plan()
self.tuskarclient.plans.templates.return_value = {
'plan.yaml': 'template body',
'environment.yaml': 'resource_registry: {}\n',
}
self.heatclient = mock.MagicMock()
self.heatclient.resources.list.return_value = [
mock.MagicMock(
@ -55,7 +47,7 @@ class ScaleManagerTest(base.TestCase):
'stacks/overcloud/123'}],
logical_resource_id='logical_id',
physical_resource_id='resource_id',
resource_type='compute-1'
resource_type='Compute'
),
mock.MagicMock(
links=[{'rel': 'stack',
@ -64,7 +56,7 @@ class ScaleManagerTest(base.TestCase):
'stacks/overcloud/124'}],
logical_resource_id='node0',
physical_resource_id='123',
resource_type='compute-1',
resource_type='Compute',
resource_name='node0',
)
]
@ -72,69 +64,27 @@ class ScaleManagerTest(base.TestCase):
@mock.patch('heatclient.common.template_utils.'
'process_multiple_environments_and_files')
@mock.patch('heatclient.common.template_utils.get_template_contents')
@mock.patch('tripleo_common.libutils.open', create=True)
@mock.patch('tuskarclient.common.utils.find_resource')
def test_scaleup(self, mock_find_resource, mock_open,
mock_template_contents, mock_env_files):
mock_find_resource.return_value = mock_plan()
mock_template_contents.return_value = ({}, 'template body')
def test_scaledown(self, mock_get_template_contents, mock_env_files):
mock_get_template_contents.return_value = ({}, 'template_body')
mock_env_files.return_value = ({}, {})
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
heatclient=self.heatclient,
stack_id='stack',
plan_id='plan')
manager.scaleup(role='compute-1', num=3)
self.tuskarclient.plans.patch.assert_called_once_with(
'5', [{'name': 'compute-1::count', 'value': '3'}])
self.heatclient.stacks.update.assert_called_once_with(
stack_id='stack',
template='template body',
environment={},
existing=True,
files={},
parameters={})
mock_env_files.assert_called_once_with(env_paths=[])
@mock.patch('tuskarclient.common.utils.find_resource')
def test_invalid_scaleup(self, mock_find_resource):
mock_find_resource.return_value = mock_plan()
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
heatclient=self.heatclient,
stack_id='stack',
plan_id='plan')
self.assertRaises(ValueError, manager.scaleup, 'compute-1', 1)
@mock.patch('heatclient.common.template_utils.'
'process_multiple_environments_and_files')
@mock.patch('heatclient.common.template_utils.get_template_contents')
@mock.patch('tripleo_common.libutils.open', create=True)
@mock.patch('tuskarclient.common.utils.find_resource')
def test_scaledown(self, mock_find_resource, mock_open,
mock_template_contents, mock_env_files):
mock_find_resource.return_value = mock_plan()
mock_template_contents.return_value = ({}, 'template body')
mock_env_files.return_value = ({}, {})
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
heatclient=self.heatclient,
stack_id='stack',
plan_id='plan')
self.heatclient.stacks.get.return_value = mock_stack()
manager = scale.ScaleManager(heatclient=self.heatclient,
stack_id='stack', tht_dir='/tmp/')
manager.scaledown(['resource_id'])
self.tuskarclient.plans.patch.assert_called_once_with(
'5', [{'name': 'compute-1::count', 'value': '1'}])
self.heatclient.stacks.update.assert_called_once_with(
stack_id='stack',
template='template body',
template='template_body',
environment={},
existing=True,
files={},
parameters={'compute-1::removal_policies': [
{'resource_list': ['node0']}]})
parameters={
'ComputeCount': '1',
'ComputeRemovalPolicies': [
{'resource_list': ['node0']}
]
})
@mock.patch('tuskarclient.common.utils.find_resource')
def test_invalid_scaledown(self, mock_find_resource):
mock_find_resource.return_value = mock_plan()
manager = scale.ScaleManager(tuskarclient=self.tuskarclient,
heatclient=self.heatclient,
stack_id='stack',
plan_id='plan')
def test_invalid_scaledown(self):
manager = scale.ScaleManager(heatclient=self.heatclient,
stack_id='stack')
self.assertRaises(ValueError, manager.scaledown, 'invalid_resource_id')

View File

@ -29,38 +29,26 @@ class UpdateManagerTest(base.TestCase):
@mock.patch('heatclient.common.template_utils.'
'process_multiple_environments_and_files')
@mock.patch('heatclient.common.template_utils.get_template_contents')
@mock.patch('tripleo_common.libutils.open', create=True)
@mock.patch('tuskarclient.common.utils.find_resource')
def test_update(self, mock_find_resource, mock_open,
mock_template_contents, mock_env_files, mock_time):
def test_update(self, mock_template_contents, mock_env_files, mock_time):
heatclient = mock.MagicMock()
novaclient = mock.MagicMock()
tuskarclient = mock.MagicMock()
mock_time.return_value = 123.5
heatclient.stacks.get.return_value = mock.MagicMock(
stack_name='stack', id='stack_id')
mock_find_resource.return_value = mock.MagicMock(
uuid='plan',
parameters=[{'name': 'Compute-1::UpdateIdentifier', 'value': ''}])
mock_template_contents.return_value = ({}, 'template body')
mock_env_files.return_value = ({}, {})
tuskarclient.plans.templates.return_value = {
'plan.yaml': 'template body',
'environment.yaml': 'resource_registry: {}\n',
}
update.PackageUpdateManager(
heatclient=heatclient,
novaclient=novaclient,
stack_id='stack_id',
tuskarclient=tuskarclient,
plan_id='plan'
tht_dir='/tmp/'
).update()
params = {
'existing': True,
'stack_id': 'stack_id',
'template': 'template body',
'files': {},
'parameters': {'Compute-1::UpdateIdentifier': '123'},
'parameters': {'UpdateIdentifier': 123},
'environment': {
'resource_registry': {
'resources': {

View File

@ -15,31 +15,21 @@
import logging
import os
import re
import shutil
import time
from heatclient.common import template_utils
from tripleo_common import libutils
from tripleo_common import stack_update
from tuskarclient.common import utils as tuskarutils
LOG = logging.getLogger(__name__)
TEMPLATE_NAME = 'overcloud-without-mergepy.yaml'
REGISTRY_NAME = "overcloud-resource-registry-puppet.yaml"
class PackageUpdateManager(stack_update.StackUpdateManager):
def __init__(self, heatclient, novaclient, stack_id, tuskarclient=None,
plan_id=None, tht_dir=None, environment_files=None):
def __init__(self, heatclient, novaclient, stack_id,
tht_dir=None, environment_files=None):
stack = heatclient.stacks.get(stack_id)
self.tuskarclient = tuskarclient
self.plan_id = plan_id
self.tht_dir = tht_dir
self.hook_resource = 'UpdateDeployment'
if self.tuskarclient:
self.plan = tuskarutils.find_resource(self.tuskarclient.plans,
self.plan_id)
self.environment_files = environment_files
super(PackageUpdateManager, self).__init__(
heatclient=heatclient, novaclient=novaclient, stack=stack,
@ -47,22 +37,13 @@ class PackageUpdateManager(stack_update.StackUpdateManager):
hook_resource=self.hook_resource)
def update(self):
# time rounded to seconds, we explicitly convert to string because of
# tuskar
timestamp = str(int(time.time()))
# time rounded to seconds
timestamp = int(time.time())
if self.tuskarclient:
stack_params = self._set_update_params(timestamp)
self.tht_dir = libutils.save_templates(
self.tuskarclient.plans.templates(self.plan.uuid))
tpl_name = 'plan.yaml'
else:
tpl_name = TEMPLATE_NAME
stack_params = {'UpdateIdentifier': timestamp}
try:
tpl_files, template = template_utils.get_template_contents(
template_file=os.path.join(self.tht_dir, tpl_name))
template_file=os.path.join(self.tht_dir, TEMPLATE_NAME))
env_paths = []
if self.environment_files:
env_paths.extend(self.environment_files)
@ -93,17 +74,3 @@ class PackageUpdateManager(stack_update.StackUpdateManager):
LOG.info('updating stack: %s', self.stack.stack_name)
LOG.debug('stack update params: %s', fields)
self.heatclient.stacks.update(**fields)
finally:
if self.tuskarclient:
if LOG.isEnabledFor(logging.DEBUG):
LOG.debug("Tuskar templates saved in %s", self.tht_dir)
else:
shutil.rmtree(self.tht_dir)
def _set_update_params(self, timestamp):
# set new update timestamp for each role
params = {}
for param in self.plan.parameters:
if re.match(r".*::UpdateIdentifier", param['name']):
params[param['name']] = timestamp
return params