Implement minor update workflow with config download

This change aim to refactor the way of doing the minor upgrade
via ansible playbook download by the config download
It will depend on a mistral change which will get the
ansible update_task and run it via mistral
The user will have two ways for performing the minor update:
  - the stack update command which will make an automatic minor
    upgrade (or just upgrade a given set of nodes)
  - running it manually via ansible on the undercloud

Closes-Bug: #1715557
Closes-Bug: #1723108
Change-Id: I4fcd443d975894a1da0286b19506d00682c5768c
This commit is contained in:
Mathieu Bultel 2017-07-26 17:40:46 +02:00 committed by mathieu bultel
parent 9fb431463d
commit 8a7da9fe26
7 changed files with 182 additions and 442 deletions

View File

@ -93,7 +93,6 @@ openstack.tripleoclient.v1 =
overcloud_role_list = tripleoclient.v1.overcloud_roles:RoleList overcloud_role_list = tripleoclient.v1.overcloud_roles:RoleList
overcloud_roles_generate = tripleoclient.v1.overcloud_roles:RolesGenerate overcloud_roles_generate = tripleoclient.v1.overcloud_roles:RolesGenerate
overcloud_support_report_collect = tripleoclient.v1.overcloud_support:ReportExecute overcloud_support_report_collect = tripleoclient.v1.overcloud_support:ReportExecute
overcloud_update_clear_breakpoints = tripleoclient.v1.overcloud_update:ClearBreakpointsOvercloud
overcloud_update_stack = tripleoclient.v1.overcloud_update:UpdateOvercloud overcloud_update_stack = tripleoclient.v1.overcloud_update:UpdateOvercloud
overcloud_execute = tripleoclient.v1.overcloud_execute:RemoteExecute overcloud_execute = tripleoclient.v1.overcloud_execute:RemoteExecute
overcloud_generate_fencing = tripleoclient.v1.overcloud_parameters:GenerateFencingParameters overcloud_generate_fencing = tripleoclient.v1.overcloud_parameters:GenerateFencingParameters

View File

@ -1,91 +0,0 @@
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
FAKE_STACK = {
'parameters': {
'ControllerCount': 1,
'ComputeCount': 1,
'ObjectStorageCount': 0,
'BlockStorageCount': 0,
'CephStorageCount': 0,
},
'stack_name': 'overcloud',
'stack_status': "CREATE_COMPLETE",
'outputs': [
{'output_key': 'RoleConfig',
'output_value': {
'foo_config': 'foo'}},
{'output_key': 'RoleData',
'output_value': {
'FakeCompute': {
'config_settings': {'nova::compute::libvirt::services::'
'libvirt_virt_type': 'qemu'},
'global_config_settings': {},
'logging_groups': ['root', 'neutron', 'nova'],
'logging_sources': [{'path': '/var/log/nova/nova-compute.log',
'type': 'tail'}],
'monitoring_subscriptions': ['overcloud-nova-compute'],
'service_config_settings': {'horizon': {'neutron::'
'plugins': ['ovs']}
},
'service_metadata_settings': None,
'service_names': ['nova_compute', 'fake_service'],
'step_config': ['include ::tripleo::profile::base::sshd',
'include ::timezone'],
'upgrade_batch_tasks': [],
'upgrade_tasks': [{'name': 'Stop fake service',
'service': 'name=fake state=stopped',
'tags': 'step1',
'when': 'existingcondition'},
{'name': 'Stop nova-compute service',
'service': 'name=openstack-nova-compute '
'state=stopped',
'tags': 'step1',
'when': ['existing', 'list']}]
},
'FakeController': {
'config_settings': {'tripleo::haproxy::user': 'admin'},
'global_config_settings': {},
'logging_groups': ['root', 'keystone', 'neutron'],
'logging_sources': [{'path': '/var/log/keystone/keystone.log',
'type': 'tail'}],
'monitoring_subscriptions': ['overcloud-keystone'],
'service_config_settings': {'horizon': {'neutron::'
'plugins': ['ovs']}
},
'service_metadata_settings': None,
'service_names': ['pacemaker', 'fake_service'],
'step_config': ['include ::tripleo::profile::base::sshd',
'include ::timezone'],
'upgrade_batch_tasks': [],
'upgrade_tasks': [{'name': 'Stop fake service',
'service': 'name=fake state=stopped',
'tags': 'step1'}]}}}]}
def create_to_dict_mock(**kwargs):
mock_with_to_dict = mock.Mock()
mock_with_to_dict.configure_mock(**kwargs)
mock_with_to_dict.to_dict.return_value = kwargs
return mock_with_to_dict
def create_tht_stack(**kwargs):
stack = FAKE_STACK.copy()
stack.update(kwargs)
return create_to_dict_mock(**stack)

View File

@ -10,15 +10,10 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import fixtures
import mock import mock
import os
from mock import call
from mock import patch
from osc_lib.tests import utils from osc_lib.tests import utils
from tripleoclient.tests.v1.overcloud_config import fakes
from tripleoclient.v1 import overcloud_config from tripleoclient.v1 import overcloud_config
@ -32,138 +27,14 @@ class TestOvercloudConfig(utils.TestCommand):
self.app.client_manager.orchestration = mock.Mock() self.app.client_manager.orchestration = mock.Mock()
self.workflow = self.app.client_manager.workflow_engine self.workflow = self.app.client_manager.workflow_engine
@patch.object(overcloud_config.DownloadConfig, '_mkdir') @mock.patch('tripleo_common.utils.config.Config.download_config')
@patch.object(overcloud_config.DownloadConfig, '_open_file') def test_overcloud_download_config(self, mock_config):
@mock.patch('tempfile.mkdtemp', autospec=True)
def test_overcloud_config_generate_config(self,
mock_tmpdir,
mock_open,
mock_mkdir):
arglist = ['--name', 'overcloud', '--config-dir', '/tmp'] arglist = ['--name', 'overcloud', '--config-dir', '/tmp']
verifylist = [ verifylist = [
('name', 'overcloud'), ('name', 'overcloud'),
('config_dir', '/tmp') ('config_dir', '/tmp')
] ]
config_type_list = ['config_settings', 'global_config_settings',
'logging_sources', 'monitoring_subscriptions',
'service_config_settings',
'service_metadata_settings',
'service_names',
'upgrade_batch_tasks', 'upgrade_tasks']
fake_role = [role for role in
fakes.FAKE_STACK['outputs'][1]['output_value']]
parsed_args = self.check_parser(self.cmd, arglist, verifylist) parsed_args = self.check_parser(self.cmd, arglist, verifylist)
clients = self.app.client_manager
orchestration_client = clients.orchestration
orchestration_client.stacks.get.return_value = fakes.create_tht_stack()
mock_tmpdir.return_value = "/tmp/tht"
self.cmd.take_action(parsed_args) self.cmd.take_action(parsed_args)
expected_mkdir_calls = [call('/tmp/tht/%s' % r) for r in fake_role] mock_config.assert_called_once_with('overcloud', '/tmp', None)
mock_mkdir.assert_has_calls(expected_mkdir_calls, any_order=True)
expected_calls = []
for config in config_type_list:
for role in fake_role:
if config == 'step_config':
expected_calls += [call('/tmp/tht/%s/%s.pp' %
(role, config))]
else:
expected_calls += [call('/tmp/tht/%s/%s.yaml' %
(role, config))]
mock_open.assert_has_calls(expected_calls, any_order=True)
@patch.object(overcloud_config.DownloadConfig, '_mkdir')
@patch.object(overcloud_config.DownloadConfig, '_open_file')
@mock.patch('tempfile.mkdtemp', autospec=True)
def test_overcloud_config_one_config_type(self,
mock_tmpdir,
mock_open,
mock_mkdir):
arglist = ['--name', 'overcloud', '--config-dir', '/tmp',
'--config-type', ['config_settings']]
verifylist = [
('name', 'overcloud'),
('config_dir', '/tmp'),
('config_type', ['config_settings'])
]
expected_config_type = 'config_settings'
fake_role = [role for role in
fakes.FAKE_STACK['outputs'][1]['output_value']]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
clients = self.app.client_manager
orchestration_client = clients.orchestration
orchestration_client.stacks.get.return_value = fakes.create_tht_stack()
mock_tmpdir.return_value = "/tmp/tht"
self.cmd.take_action(parsed_args)
expected_mkdir_calls = [call('/tmp/tht/%s' % r) for r in fake_role]
expected_calls = [call('/tmp/tht/%s/%s.yaml'
% (r, expected_config_type))
for r in fake_role]
mock_mkdir.assert_has_calls(expected_mkdir_calls, any_order=True)
mock_open.assert_has_calls(expected_calls, any_order=True)
@mock.patch('os.mkdir')
@mock.patch('six.moves.builtins.open')
@mock.patch('tempfile.mkdtemp', autospec=True)
def test_overcloud_config_wrong_config_type(self, mock_tmpdir,
mock_open, mock_mkdir):
arglist = [
'--name', 'overcloud',
'--config-dir',
'/tmp', '--config-type', ['bad_config']]
verifylist = [
('name', 'overcloud'),
('config_dir', '/tmp'),
('config_type', ['bad_config'])
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
clients = self.app.client_manager
mock_tmpdir.return_value = "/tmp/tht"
orchestration_client = clients.orchestration
orchestration_client.stacks.get.return_value = fakes.create_tht_stack()
self.assertRaises(
KeyError,
self.cmd.take_action, parsed_args)
@mock.patch('tripleoclient.utils.get_role_data', autospec=True)
def test_overcloud_config_upgrade_tasks(self, mock_get_role_data):
clients = self.app.client_manager
orchestration_client = clients.orchestration
orchestration_client.stacks.get.return_value = fakes.create_tht_stack()
self.tmp_dir = self.useFixture(fixtures.TempDir()).path
fake_role = [role for role in
fakes.FAKE_STACK['outputs'][1]['output_value']]
expected_tasks = {'FakeController': [{'name': 'Stop fake service',
'service': 'name=fake '
'state=stopped',
'tags': 'step1',
'when': 'step|int == 1'}],
'FakeCompute': [{'name': 'Stop fake service',
'service':
'name=fake state=stopped',
'tags': 'step1',
'when': ['existingcondition',
'step|int == 1']},
{'name': 'Stop nova-'
'compute service',
'service':
'name=openstack-nova-'
'compute state=stopped',
'tags': 'step1',
'when': ['existing',
'list', 'step|int == 1']}]}
mock_get_role_data.return_value = fake_role
for role in fake_role:
filedir = os.path.join(self.tmp_dir, role)
os.makedirs(filedir)
filepath = os.path.join(filedir, "upgrade_tasks_playbook.yaml")
playbook_tasks = self.cmd._write_playbook_get_tasks(
fakes.FAKE_STACK['outputs'][1]['output_value'][role]
['upgrade_tasks'], role, filepath)
self.assertTrue(os.path.isfile(filepath))
self.assertEqual(expected_tasks[role], playbook_tasks)

View File

@ -14,6 +14,7 @@
# #
import mock import mock
import uuid
from tripleoclient import exceptions from tripleoclient import exceptions
from tripleoclient.tests.v1.overcloud_update import fakes from tripleoclient.tests.v1.overcloud_update import fakes
@ -30,44 +31,89 @@ class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
app_args.verbose_level = 1 app_args.verbose_level = 1
self.cmd = overcloud_update.UpdateOvercloud(self.app, app_args) self.cmd = overcloud_update.UpdateOvercloud(self.app, app_args)
uuid4_patcher = mock.patch('uuid.uuid4', return_value="UUID4")
self.mock_uuid4 = uuid4_patcher.start()
self.addCleanup(self.mock_uuid4.stop)
@mock.patch('tripleoclient.utils.get_stack', @mock.patch('tripleoclient.utils.get_stack',
autospec=True) autospec=True)
@mock.patch('tripleoclient.v1.overcloud_update.UpdateOvercloud.log', @mock.patch('tripleoclient.v1.overcloud_update.UpdateOvercloud.log',
autospec=True) autospec=True)
@mock.patch('tripleoclient.workflows.package_update.update_and_wait', @mock.patch('tripleoclient.workflows.package_update.update',
autospec=True) autospec=True)
def test_update_out(self, mock_update_wait, mock_logger, mock_get_stack): @mock.patch('six.moves.builtins.open')
mock_update_wait.return_value = 'COMPLETE' @mock.patch('os.path.abspath')
@mock.patch('yaml.load')
def test_update_out(self, mock_yaml, mock_abspath, mock_open, mock_update,
mock_logger, mock_get_stack):
mock_stack = mock.Mock() mock_stack = mock.Mock()
mock_stack.stack_name = 'mystack' mock_stack.stack_name = 'mystack'
mock_get_stack.return_value = mock_stack mock_get_stack.return_value = mock_stack
# mock_logger.return_value = mock.Mock() mock_abspath.return_value = '/home/fake/my-fake-registry.yaml'
mock_yaml.return_value = {'fake_container': 'fake_value'}
argslist = ['overcloud', '-i', '--templates'] argslist = ['--stack', 'overcloud', '--init-minor-update',
'--container-registry-file', 'my-fake-registry.yaml']
verifylist = [ verifylist = [
('stack', 'overcloud'), ('stack', 'overcloud'),
('interactive', True), ('init_minor_update', True),
('templates', '/usr/share/openstack-tripleo-heat-templates/') ('container_registry_file', 'my-fake-registry.yaml')
] ]
parsed_args = self.check_parser(self.cmd, argslist, verifylist) parsed_args = self.check_parser(self.cmd, argslist, verifylist)
self.cmd.take_action(parsed_args) self.cmd.take_action(parsed_args)
mock_update_wait.assert_called_once_with( mock_update.assert_called_once_with(
mock_logger,
self.app.client_manager, self.app.client_manager,
mock_stack, 'mystack', 1, 0) container='mystack',
container_registry={'fake_container': 'fake_value'},
queue_name=str(uuid.uuid4()))
@mock.patch('tripleoclient.workflows.package_update.update_and_wait', @mock.patch('tripleoclient.workflows.package_update.update',
autospec=True) autospec=True)
def test_update_failed(self, mock_update_wait): @mock.patch('six.moves.builtins.open')
mock_update_wait.return_value = 'FAILED' @mock.patch('os.path.abspath')
argslist = ['overcloud', '-i', '--templates'] @mock.patch('yaml.load')
def test_update_failed(self, mock_yaml, mock_abspath, mock_open,
mock_update):
mock_update.side_effect = exceptions.DeploymentError()
mock_abspath.return_value = '/home/fake/my-fake-registry.yaml'
mock_yaml.return_value = {'fake_container': 'fake_value'}
argslist = ['--stack', 'overcloud', '--init-minor-update',
'--container-registry-file', 'my-fake-registry.yaml']
verifylist = [ verifylist = [
('stack', 'overcloud'), ('stack', 'overcloud'),
('interactive', True), ('init_minor_update', True),
('templates', '/usr/share/openstack-tripleo-heat-templates/') ('container_registry_file', 'my-fake-registry.yaml')
] ]
parsed_args = self.check_parser(self.cmd, argslist, verifylist) parsed_args = self.check_parser(self.cmd, argslist, verifylist)
self.assertRaises(exceptions.DeploymentError, self.assertRaises(exceptions.DeploymentError,
self.cmd.take_action, parsed_args) self.cmd.take_action, parsed_args)
@mock.patch('tripleoclient.workflows.package_update.update_ansible',
autospec=True)
@mock.patch('os.path.expanduser')
@mock.patch('oslo_concurrency.processutils.execute')
@mock.patch('six.moves.builtins.open')
def test_update_ansible(self, mock_open, mock_execute,
mock_expanduser, update_ansible):
mock_expanduser.return_value = '/home/fake/'
argslist = ['--stack', 'overcloud', '--nodes', 'Compute', '--playbook',
'fake-playbook.yaml']
verifylist = [
('stack', 'overcloud'),
('nodes', 'Compute'),
('generate_inventory', True),
('static_inventory', 'tripleo-hosts-inventory'),
('playbook', 'fake-playbook.yaml')
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
with mock.patch('os.path.exists') as mock_exists:
mock_exists.return_value = True
self.cmd.take_action(parsed_args)
update_ansible.assert_called_once_with(
self.app.client_manager,
nodes='Compute',
inventory_file=mock_open().read(),
playbook='fake-playbook.yaml',
queue_name=str(uuid.uuid4()))

View File

@ -12,15 +12,11 @@
import logging import logging
import os import os
import re
import six
import tempfile
import yaml
from osc_lib.command import command from osc_lib.command import command
from osc_lib.i18n import _ from osc_lib.i18n import _
from tripleoclient import utils from tripleo_common.utils import config as ooo_config
class DownloadConfig(command.Command): class DownloadConfig(command.Command):
@ -49,107 +45,20 @@ class DownloadConfig(command.Command):
'--config-type', '--config-type',
dest='config_type', dest='config_type',
type=list, type=list,
default=None,
help=_('Type of object config to be extract from the deployment, ' help=_('Type of object config to be extract from the deployment, '
'defaults to all keys available'), 'defaults to all keys available'),
) )
return parser return parser
@staticmethod
def _open_file(path):
return os.fdopen(os.open(path,
os.O_WRONLY | os.O_CREAT, 0o600),
'w')
def _step_tags_to_when(self, sorted_tasks):
for task in sorted_tasks:
tag = task.get('tags', '')
match = re.search('step([0-9]+)', tag)
if match:
step = match.group(1)
whenexpr = task.get('when', None)
if whenexpr:
# Handle when: foo and a list of when conditionals
if not isinstance(whenexpr, list):
whenexpr = [whenexpr]
for w in whenexpr:
when_exists = re.search('step|int == [0-9]', w)
if when_exists:
break
if when_exists:
# Skip to the next task,
# there is an existing 'step|int == N'
continue
whenexpr.append("step|int == %s" % step)
task['when'] = whenexpr
else:
task.update({"when": "step|int == %s" % step})
def _write_playbook_get_tasks(self, tasks, role, filepath):
playbook = []
sorted_tasks = sorted(tasks, key=lambda x: x.get('tags', None))
self._step_tags_to_when(sorted_tasks)
playbook.append({'name': '%s playbook' % role,
'hosts': role,
'tasks': sorted_tasks})
with self._open_file(filepath) as conf_file:
yaml.safe_dump(playbook, conf_file, default_flow_style=False)
return sorted_tasks
def _mkdir(self, dirname):
if not os.path.exists(dirname):
try:
os.mkdir(dirname, 0o700)
except OSError as e:
message = 'Failed to create: %s, error: %s' % (dirname,
str(e))
raise OSError(message)
def take_action(self, parsed_args): def take_action(self, parsed_args):
self.log.debug("take_action(%s)" % parsed_args) self.log.debug("take_action(%s)" % parsed_args)
# Get clients
clients = self.app.client_manager clients = self.app.client_manager
name = parsed_args.name name = parsed_args.name
config_dir = parsed_args.config_dir config_dir = parsed_args.config_dir
self._mkdir(config_dir) config_type = parsed_args.config_type
stack = utils.get_stack(clients.orchestration, name) # Get config
tmp_path = tempfile.mkdtemp(prefix='tripleo-', config = ooo_config.Config(clients.orchestration)
suffix='-config', return config.download_config(name, config_dir, config_type)
dir=config_dir)
self.log.info("Generating configuration under the directory: "
"%s" % tmp_path)
role_data = utils.get_role_data(stack)
for role_name, role in six.iteritems(role_data):
role_path = os.path.join(tmp_path, role_name)
self._mkdir(role_path)
for config in parsed_args.config_type or role.keys():
if config == 'step_config':
filepath = os.path.join(role_path, 'step_config.pp')
with self._open_file(filepath) as step_config:
step_config.write('\n'.join(step for step in
role[config]
if step is not None))
else:
if 'upgrade_tasks' in config:
filepath = os.path.join(role_path, '%s_playbook.yaml' %
config)
data = self._write_playbook_get_tasks(
role[config], role_name, filepath)
else:
try:
data = role[config]
except KeyError as e:
message = 'Invalid key: %s, error: %s' % (config,
str(e))
raise KeyError(message)
filepath = os.path.join(role_path, '%s.yaml' % config)
with self._open_file(filepath) as conf_file:
yaml.safe_dump(data,
conf_file,
default_flow_style=False)
role_config = utils.get_role_config(stack)
for config_name, config in six.iteritems(role_config):
conf_path = os.path.join(tmp_path, config_name + ".yaml")
with self._open_file(conf_path) as conf_file:
conf_file.write(config)
print("The TripleO configuration has been successfully generated "
"into: {0}".format(tmp_path))

View File

@ -14,11 +14,13 @@
# #
import logging import logging
import os
import uuid import uuid
import yaml
from osc_lib.command import command from osc_lib.command import command
from osc_lib.i18n import _ from osc_lib.i18n import _
from osc_lib import utils from oslo_concurrency import processutils
from tripleoclient import constants from tripleoclient import constants
from tripleoclient import exceptions from tripleoclient import exceptions
@ -33,36 +35,54 @@ class UpdateOvercloud(command.Command):
def get_parser(self, prog_name): def get_parser(self, prog_name):
parser = super(UpdateOvercloud, self).get_parser(prog_name) parser = super(UpdateOvercloud, self).get_parser(prog_name)
parser.add_argument('stack', nargs='?', parser.add_argument('--stack',
nargs='?',
dest='stack',
help=_('Name or ID of heat stack to scale ' help=_('Name or ID of heat stack to scale '
'(default=Env: OVERCLOUD_STACK_NAME)'), '(default=Env: OVERCLOUD_STACK_NAME)'),
default=utils.env('OVERCLOUD_STACK_NAME')) default='overcloud'
parser.add_argument( )
'--templates', nargs='?', const=constants.TRIPLEO_HEAT_TEMPLATES, parser.add_argument('--templates',
help=_("The directory containing the Heat templates to deploy. " nargs='?',
"This argument is deprecated. The command now utilizes " default=constants.TRIPLEO_HEAT_TEMPLATES,
"a deployment plan, which should be updated prior to " help=_("The directory containing the Heat"
"running this command, should that be required. Otherwise " "templates to deploy. "),
"this argument will be silently ignored."), )
) parser.add_argument('--init-minor-update',
parser.add_argument('-i', '--interactive', dest='interactive', dest='init_minor_update',
action='store_true') action='store_true',
parser.add_argument( help=_("Init the minor update heat config output."
'-e', '--environment-file', metavar='<HEAT ENVIRONMENT FILE>', "Needs to be run only once"),
action='append', dest='environment_files', )
help=_("Environment files to be passed to the heat stack-create " parser.add_argument('--container-registry-file',
"or heat stack-update command. (Can be specified more than " dest='container_registry_file',
"once.) This argument is deprecated. The command now " default=None,
"utilizes a deployment plan, which should be updated prior " help=_("File which contains the container "
"to running this command, should that be required. " "registry data for the update"),
"Otherwise this argument will be silently ignored."), )
) parser.add_argument('--nodes',
parser.add_argument( action="store",
'--answers-file', default=None,
help=_('Path to a YAML file with arguments and parameters. ' help=_('Nodes to update.')
'DEPRECATED. Not necessary when used with a plan. Will ' )
'be silently ignored, and removed in the "P" release.') parser.add_argument('--playbook',
) action="store",
default="update_steps_playbook.yaml",
help=_('Playbook to use for update')
)
parser.add_argument('--generate-inventory',
dest='generate_inventory',
action='store_true',
default=True,
help=_("Generate inventory for the ansible "
"playbook"),
)
parser.add_argument('--static-inventory',
dest='static_inventory',
action="store",
default='tripleo-hosts-inventory',
help=_('Path to the static inventory to use')
)
return parser return parser
def take_action(self, parsed_args): def take_action(self, parsed_args):
@ -73,47 +93,47 @@ class UpdateOvercloud(command.Command):
parsed_args.stack) parsed_args.stack)
stack_name = stack.stack_name stack_name = stack.stack_name
if parsed_args.interactive: container_registry = parsed_args.container_registry_file
timeout = 0
status = package_update.update_and_wait( if parsed_args.init_minor_update:
self.log, clients, stack, stack_name, # Update the container registry:
self.app_args.verbose_level, timeout) if container_registry:
if status not in ['COMPLETE']: with open(os.path.abspath(container_registry)) as content:
raise exceptions.DeploymentError("Package update failed.") registry = yaml.load(content.read())
else: else:
raise exceptions.InvalidConfiguration(
"You need to provide a container registry file in order "
"to update your current containers deployed.")
# Execute minor update
package_update.update(clients, container=stack_name, package_update.update(clients, container=stack_name,
container_registry=registry,
queue_name=str(uuid.uuid4())) queue_name=str(uuid.uuid4()))
print("Package update on stack {0} initiated.".format(
parsed_args.stack))
print("Minor update init on stack {0} complete.".format(
class ClearBreakpointsOvercloud(command.Command): parsed_args.stack))
"""Clears a set of breakpoints on a currently updating overcloud""" # Run ansible:
nodes = parsed_args.nodes
log = logging.getLogger(__name__ + ".ClearBreakpointsOvercloud") playbook = parsed_args.playbook
if nodes is not None:
def get_parser(self, prog_name): inventory_path = '%s/%s' % (os.path.expanduser('~'),
parser = super(ClearBreakpointsOvercloud, self).get_parser(prog_name) parsed_args.static_inventory)
parser.add_argument('stack', nargs='?', if parsed_args.generate_inventory:
help=_('Name or ID of heat stack to clear a ' try:
'breakpoint or set of breakpoints ' processutils.execute('/bin/tripleo-ansible-inventory',
'(default=Env: OVERCLOUD_STACK_NAME)'), '--static-inventory', inventory_path)
default=utils.env('OVERCLOUD_STACK_NAME')) except processutils.ProcessExecutionError as e:
parser.add_argument('--ref', message = "Failed to generate inventory file: %s" % str(e)
action='append', raise exceptions.InvalidConfiguration(message)
dest='refs', if os.path.exists(inventory_path):
help=_('Breakpoint to clear')) inventory = open(inventory_path, 'r').read()
else:
return parser raise exceptions.InvalidConfiguration(
"Inventory file missing, provide an inventory file or "
def take_action(self, parsed_args): "generate an inventory by using the --generate-inventory "
self.log.debug("take_action(%s)" % parsed_args) "option")
clients = self.app.client_manager output = package_update.update_ansible(
clients, nodes=nodes,
heat = clients.orchestration inventory_file=inventory,
playbook=playbook,
stack = oooutils.get_stack(heat, parsed_args.stack) queue_name=str(uuid.uuid4()))
print (output)
package_update.clear_breakpoints(clients, stack_id=stack.id,
refs=parsed_args.refs)

View File

@ -12,11 +12,13 @@
from __future__ import print_function from __future__ import print_function
import pprint import pprint
import uuid import time
from tripleo_common import update as update_common from heatclient.common import event_utils
from openstackclient import shell
from tripleoclient import exceptions
from tripleoclient import utils
from tripleoclient import utils as oooutils
from tripleoclient.workflows import base from tripleoclient.workflows import base
@ -24,6 +26,7 @@ def update(clients, **workflow_input):
workflow_client = clients.workflow_engine workflow_client = clients.workflow_engine
tripleoclients = clients.tripleoclient tripleoclients = clients.tripleoclient
queue_name = workflow_input['queue_name'] queue_name = workflow_input['queue_name']
plan_name = workflow_input['container']
with tripleoclients.messaging_websocket(queue_name) as ws: with tripleoclients.messaging_websocket(queue_name) as ws:
execution = base.start_workflow( execution = base.start_workflow(
@ -35,48 +38,31 @@ def update(clients, **workflow_input):
for payload in base.wait_for_messages(workflow_client, ws, execution): for payload in base.wait_for_messages(workflow_client, ws, execution):
assert payload['status'] == "SUCCESS", pprint.pformat(payload) assert payload['status'] == "SUCCESS", pprint.pformat(payload)
orchestration_client = clients.orchestration
def update_and_wait(log, clients, stack, plan_name, verbose_level, events = event_utils.get_events(orchestration_client,
timeout=None): stack_id=plan_name,
"""Start the update and wait for it to give breakpoints or finish""" event_args={'sort_dir': 'desc',
'limit': 1})
marker = events[0].id if events else None
log.info("Performing Heat stack update") time.sleep(10)
queue_name = str(uuid.uuid4()) create_result = utils.wait_for_stack_ready(
orchestration_client, plan_name, marker, 'UPDATE', 1)
workflow_input = { if not create_result:
"container": plan_name, shell.OpenStackShell().run(["stack", "failures", "list", plan_name])
"queue_name": queue_name, raise exceptions.DeploymentError("Heat Stack update failed.")
}
if timeout is not None:
workflow_input['timeout'] = timeout
update(clients, **workflow_input)
update_manager = update_common.PackageUpdateManager(
heatclient=clients.orchestration,
novaclient=clients.compute,
stack_id=plan_name,
stack_fields={})
update_manager.do_interactive_update()
stack = oooutils.get_stack(clients.orchestration,
plan_name)
return stack.status
def clear_breakpoints(clients, **workflow_input): def update_ansible(clients, **workflow_input):
workflow_client = clients.workflow_engine workflow_client = clients.workflow_engine
tripleoclients = clients.tripleoclient tripleoclients = clients.tripleoclient
workflow_input['queue_name'] = str(uuid.uuid4())
queue_name = workflow_input['queue_name'] queue_name = workflow_input['queue_name']
with tripleoclients.messaging_websocket(queue_name) as ws: with tripleoclients.messaging_websocket(queue_name) as ws:
execution = base.start_workflow( execution = base.start_workflow(
workflow_client, workflow_client,
'tripleo.package_update.v1.clear_breakpoints', 'tripleo.package_update.v1.update_nodes',
workflow_input=workflow_input workflow_input=workflow_input
) )