Clarify minor update CLI and split minor and major classes

This is the first step which defines the minor update related
classes and entry points:

    openstack overcloud update prepare --container-registry-file ...

For running the no-op heat stack update to refresh outputs and

    openstack overcloud update run --nodes FOO --playbook all

For running the minor update ansible playbooks on specified
nodes.

A child review will add the openstack overcloud upgrade prepare
and openstack overcloud upgrade run classes and entry points
here https://review.openstack.org/545365

Co-Authored-By: Marios Andreou <marios@redhat.com>
Change-Id: I32c672dffc876250b9949f4fc4ff77c648186e4d
This commit is contained in:
Mathieu Bultel 2018-02-15 17:25:49 +01:00 committed by Marios Andreou
parent f55ad2c80c
commit 6956e07e59
5 changed files with 232 additions and 120 deletions

View File

@ -87,8 +87,9 @@ openstack.tripleoclient.v1 =
overcloud_role_list = tripleoclient.v1.overcloud_roles:RoleList
overcloud_roles_generate = tripleoclient.v1.overcloud_roles:RolesGenerate
overcloud_support_report_collect = tripleoclient.v1.overcloud_support:ReportExecute
overcloud_update_stack = tripleoclient.v1.overcloud_update:UpdateOvercloud
overcloud_upgrade = tripleoclient.v1.overcloud_update:UpgradeOvercloud
overcloud_update_prepare= tripleoclient.v1.overcloud_update:UpdatePrepare
overcloud_update_run = tripleoclient.v1.overcloud_update:UpdateRun
overcloud_upgrade_converge = tripleoclient.v1.overcloud_update:UpgradeConvergeOvercloud
overcloud_execute = tripleoclient.v1.overcloud_execute:RemoteExecute
overcloud_generate_fencing = tripleoclient.v1.overcloud_parameters:GenerateFencingParameters
undercloud_deploy = tripleoclient.v1.undercloud_deploy:DeployUndercloud

View File

@ -36,3 +36,7 @@ PUPPET_BASE = "/etc/puppet/"
# Update Queue
UPDATE_QUEUE = 'update'
STACK_TIMEOUT = 240
# The default minor update ansible playbooks generated from heat stack output
MINOR_UPDATE_PLAYBOOKS = ['update_steps_playbook.yaml',
'deploy_steps_playbook.yaml']

View File

@ -39,13 +39,23 @@ class FakeObjectClient(object):
return
class TestOvercloudUpdate(utils.TestCommand):
class TestOvercloudUpdatePrepare(utils.TestCommand):
def setUp(self):
super(TestOvercloudUpdate, self).setUp()
super(TestOvercloudUpdatePrepare, self).setUp()
self.app.client_manager.auth_ref = mock.Mock(auth_token="TOKEN")
self.app.client_manager.baremetal = mock.Mock()
self.app.client_manager.orchestration = mock.Mock()
self.app.client_manager.tripleoclient = FakeClientWrapper()
self.app.client_manager.workflow_engine = mock.Mock()
class TestOvercloudUpdateRun(utils.TestCommand):
def setUp(self):
super(TestOvercloudUpdateRun, self).setUp()
self.app.client_manager.auth_ref = mock.Mock(auth_token="TOKEN")
self.app.client_manager.tripleoclient = FakeClientWrapper()
self.app.client_manager.workflow_engine = mock.Mock()

View File

@ -15,21 +15,22 @@
import mock
from osc_lib.tests.utils import ParserException
from tripleoclient import constants
from tripleoclient import exceptions
from tripleoclient.tests.v1.overcloud_update import fakes
from tripleoclient.v1 import overcloud_update
class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
class TestOvercloudUpdatePrepare(fakes.TestOvercloudUpdatePrepare):
def setUp(self):
super(TestOvercloudUpdate, self).setUp()
super(TestOvercloudUpdatePrepare, self).setUp()
# Get the command object to test
app_args = mock.Mock()
app_args.verbose_level = 1
self.cmd = overcloud_update.UpdateOvercloud(self.app, app_args)
self.cmd = overcloud_update.UpdatePrepare(self.app, app_args)
uuid4_patcher = mock.patch('uuid.uuid4', return_value="UUID4")
self.mock_uuid4 = uuid4_patcher.start()
@ -37,7 +38,7 @@ class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
@mock.patch('tripleoclient.utils.get_stack',
autospec=True)
@mock.patch('tripleoclient.v1.overcloud_update.UpdateOvercloud.log',
@mock.patch('tripleoclient.v1.overcloud_update.UpdatePrepare.log',
autospec=True)
@mock.patch('tripleoclient.workflows.package_update.update',
autospec=True)
@ -56,12 +57,11 @@ class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
mock_abspath.return_value = '/home/fake/my-fake-registry.yaml'
mock_yaml.return_value = {'fake_container': 'fake_value'}
argslist = ['--stack', 'overcloud', '--init-update', '--templates',
argslist = ['--stack', 'overcloud', '--templates',
'--container-registry-file', 'my-fake-registry.yaml']
verifylist = [
('stack', 'overcloud'),
('templates', constants.TRIPLEO_HEAT_TEMPLATES),
('init_update', True),
('container_registry_file', 'my-fake-registry.yaml')
]
@ -88,12 +88,11 @@ class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
mock_update.side_effect = exceptions.DeploymentError()
mock_abspath.return_value = '/home/fake/my-fake-registry.yaml'
mock_yaml.return_value = {'fake_container': 'fake_value'}
argslist = ['--stack', 'overcloud', '--init-update', '--templates',
argslist = ['--stack', 'overcloud', '--templates',
'--container-registry-file', 'my-fake-registry.yaml']
verifylist = [
('stack', 'overcloud'),
('templates', constants.TRIPLEO_HEAT_TEMPLATES),
('init_update', True),
('container_registry_file', 'my-fake-registry.yaml')
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
@ -101,18 +100,32 @@ class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
self.assertRaises(exceptions.DeploymentError,
self.cmd.take_action, parsed_args)
class TestOvercloudUpdateRun(fakes.TestOvercloudUpdateRun):
def setUp(self):
super(TestOvercloudUpdateRun, self).setUp()
# Get the command object to test
app_args = mock.Mock()
app_args.verbose_level = 1
self.cmd = overcloud_update.UpdateRun(self.app, app_args)
uuid4_patcher = mock.patch('uuid.uuid4', return_value="UUID4")
self.mock_uuid4 = uuid4_patcher.start()
self.addCleanup(self.mock_uuid4.stop)
@mock.patch('tripleoclient.workflows.package_update.update_ansible',
autospec=True)
@mock.patch('os.path.expanduser')
@mock.patch('oslo_concurrency.processutils.execute')
@mock.patch('six.moves.builtins.open')
def test_update_ansible(self, mock_open, mock_execute,
mock_expanduser, update_ansible):
def test_update_with_playbook(self, mock_open, mock_execute,
mock_expanduser, update_ansible):
mock_expanduser.return_value = '/home/fake/'
argslist = ['--stack', 'overcloud', '--nodes', 'Compute', '--playbook',
argslist = ['--nodes', 'Compute', '--playbook',
'fake-playbook.yaml']
verifylist = [
('stack', 'overcloud'),
('nodes', 'Compute'),
('static_inventory', None),
('playbook', 'fake-playbook.yaml')
@ -129,3 +142,74 @@ class TestOvercloudUpdate(fakes.TestOvercloudUpdate):
playbook='fake-playbook.yaml',
ansible_queue_name=constants.UPDATE_QUEUE
)
@mock.patch('tripleoclient.workflows.package_update.update_ansible',
autospec=True)
@mock.patch('os.path.expanduser')
@mock.patch('oslo_concurrency.processutils.execute')
@mock.patch('six.moves.builtins.open')
def test_update_with_all_playbooks(self, mock_open, mock_execute,
mock_expanduser, update_ansible):
mock_expanduser.return_value = '/home/fake/'
argslist = ['--nodes', 'Compute', '--playbook', 'all']
verifylist = [
('nodes', 'Compute'),
('static_inventory', None),
('playbook', 'all')
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
with mock.patch('os.path.exists') as mock_exists:
mock_exists.return_value = True
self.cmd.take_action(parsed_args)
for book in constants.MINOR_UPDATE_PLAYBOOKS:
update_ansible.assert_any_call(
self.app.client_manager,
nodes='Compute',
inventory_file=mock_open().read(),
playbook=book,
ansible_queue_name=constants.UPDATE_QUEUE
)
@mock.patch('tripleoclient.workflows.package_update.update_ansible',
autospec=True)
@mock.patch('os.path.expanduser')
@mock.patch('oslo_concurrency.processutils.execute')
@mock.patch('six.moves.builtins.open')
def test_update_with_all_nodes_default_all_playbooks(
self, mock_open, mock_execute, mock_expanduser, update_ansible):
mock_expanduser.return_value = '/home/fake/'
argslist = ['--nodes', 'all']
verifylist = [
('static_inventory', None),
('playbook', 'all'),
('nodes', 'all')
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
with mock.patch('os.path.exists') as mock_exists:
mock_exists.return_value = True
self.cmd.take_action(parsed_args)
for book in constants.MINOR_UPDATE_PLAYBOOKS:
update_ansible.assert_any_call(
self.app.client_manager,
nodes=None,
inventory_file=mock_open().read(),
playbook=book,
ansible_queue_name=constants.UPDATE_QUEUE
)
@mock.patch('tripleoclient.workflows.package_update.update_ansible',
autospec=True)
@mock.patch('os.path.expanduser')
@mock.patch('oslo_concurrency.processutils.execute')
@mock.patch('six.moves.builtins.open')
def test_update_with_no_nodes(self, mock_open, mock_execute,
mock_expanduser, update_ansible):
mock_expanduser.return_value = '/home/fake/'
argslist = []
verifylist = [
('static_inventory', None),
('playbook', 'all')
]
self.assertRaises(ParserException, lambda: self.check_parser(
self.cmd, argslist, verifylist))

View File

@ -20,6 +20,7 @@ import yaml
from osc_lib.i18n import _
from oslo_concurrency import processutils
from tripleoclient import command
from tripleoclient import constants
from tripleoclient import exceptions
from tripleoclient import utils as oooutils
@ -27,10 +28,15 @@ from tripleoclient.v1.overcloud_deploy import DeployOvercloud
from tripleoclient.workflows import package_update
class UpdateOvercloud(DeployOvercloud):
"""Updates packages on overcloud nodes"""
class UpdatePrepare(DeployOvercloud):
"""Run heat stack update for overcloud nodes to refresh heat stack outputs.
log = logging.getLogger(__name__ + ".UpdateOvercloud")
The heat stack outputs are what we use later on to generate ansible
playbooks which deliver the minor update workflow. This is used as the
first step for a minor update of your overcloud.
"""
log = logging.getLogger(__name__ + ".MinorUpdatePrepare")
# enable preservation of all important files (plan env, user env,
# roles/network data, user files) so that we don't have to pass
@ -38,14 +44,7 @@ class UpdateOvercloud(DeployOvercloud):
_keep_env_on_update = True
def get_parser(self, prog_name):
parser = super(UpdateOvercloud, self).get_parser(prog_name)
parser.add_argument('--init-update',
dest='init_update',
action='store_true',
help=_("Run a heat stack update to generate the "
"ansible playbooks."
"Needs to be run only once"),
)
parser = super(UpdatePrepare, self).get_parser(prog_name)
parser.add_argument('--container-registry-file',
dest='container_registry_file',
default=None,
@ -57,20 +56,83 @@ class UpdateOvercloud(DeployOvercloud):
default="/usr/share/ceph-ansible"
"/site-docker.yml.sample",
help=_('Path to switch the ceph-ansible playbook '
'used for update. This value should be set '
'during the init-minor-update step.')
'used for update. '),
)
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)" % parsed_args)
clients = self.app.client_manager
stack = oooutils.get_stack(clients.orchestration,
parsed_args.stack)
stack_name = stack.stack_name
container_registry = parsed_args.container_registry_file
# Update the container registry:
if container_registry:
with open(os.path.abspath(container_registry)) as content:
registry = yaml.load(content.read())
else:
self.log.warning(
"You have not provided a container registry file. Note "
"that none of the containers on your environement will be "
"updated. If you want to update your container you have "
"to re-run this command and provide the registry file "
"with: --container-registry-file option.")
registry = None
# Run update
ceph_ansible_playbook = parsed_args.ceph_ansible_playbook
# Run Overcloud deploy (stack update)
# In case of update and upgrade we need to force the
# update_plan_only. The heat stack update is done by the
# packag_update mistral action
parsed_args.update_plan_only = True
super(UpdatePrepare, self).take_action(parsed_args)
package_update.update(clients, container=stack_name,
container_registry=registry,
ceph_ansible_playbook=ceph_ansible_playbook)
package_update.get_config(clients, container=stack_name)
print("Update init on stack {0} complete.".format(
parsed_args.stack))
class UpdateRun(command.Command):
"""Run minor update ansible playbooks on Overcloud nodes"""
log = logging.getLogger(__name__ + ".MinorUpdateRun")
def get_parser(self, prog_name):
parser = super(UpdateRun, self).get_parser(prog_name)
parser.add_argument('--nodes',
action="store",
default=None,
help=_("Nodes to update. If none and the "
"--init-update set to false, it "
"will run the update on all nodes.")
required=True,
help=_("Required parameter. This specifies the "
"overcloud nodes to run the minor update "
"playbooks on. You can use the name of "
"a specific node, or the name of the role "
"(e.g. Compute). You may also use the "
"special value 'all' to run the minor "
"on all nodes. In all cases the minor "
"update ansible playbook is executed on "
"one node at a time (with serial 1)")
)
parser.add_argument('--playbook',
action="store",
default="update_steps_playbook.yaml",
help=_("Playbook to use for update/upgrade.")
default="all",
help=_("Ansible playbook to use for the minor "
"update. Defaults to the special value "
"\'all\' which causes all the update "
"playbooks to be executed. That is the "
"update_steps_playbook.yaml and then the"
"deploy_steps_playbook.yaml. "
"Set this to each of those playbooks in "
"consecutive invocations of this command "
"if you prefer to run them manually. Note: "
"make sure to run both those playbooks so "
"that all services are updated and running "
"with the target version configuration.")
)
parser.add_argument('--static-inventory',
dest='static_inventory',
@ -87,87 +149,47 @@ class UpdateOvercloud(DeployOvercloud):
self.log.debug("take_action(%s)" % parsed_args)
clients = self.app.client_manager
stack = oooutils.get_stack(clients.orchestration,
parsed_args.stack)
stack_name = stack.stack_name
container_registry = parsed_args.container_registry_file
init_update = parsed_args.init_update
if init_update:
# Update the container registry:
if container_registry:
with open(os.path.abspath(container_registry)) as content:
registry = yaml.load(content.read())
else:
self.log.warning(
"You have not provided a container registry file. Note "
"that none of the containers on your environement will be "
"updated. If you want to update your container you have "
"to re-run this command and provide the registry file "
"with: --container-registry-file option.")
registry = None
# Run update
ceph_ansible_playbook = parsed_args.ceph_ansible_playbook
# Run Overcloud deploy (stack update)
# In case of update and upgrade we need to force the
# update_plan_only. The heat stack update is done by the
# packag_update mistral action
parsed_args.update_plan_only = True
super(UpdateOvercloud, self).take_action(parsed_args)
package_update.update(clients, container=stack_name,
container_registry=registry,
ceph_ansible_playbook=ceph_ansible_playbook)
package_update.get_config(clients, container=stack_name)
print("Update init on stack {0} complete.".format(
parsed_args.stack))
# Run ansible:
nodes = parsed_args.nodes
if nodes == 'all':
# unset this, the ansible action deals with unset 'limithosts'
nodes = None
playbook = parsed_args.playbook
inventory_file = parsed_args.static_inventory
if inventory_file is None:
inventory_file = '%s/%s' % (os.path.expanduser('~'),
'tripleo-ansible-inventory.yaml')
try:
processutils.execute(
'/usr/bin/tripleo-ansible-inventory',
'--static-yaml-inventory', inventory_file)
except processutils.ProcessExecutionError as e:
message = "Failed to generate inventory: %s" % str(e)
raise exceptions.InvalidConfiguration(message)
if os.path.exists(inventory_file):
inventory = open(inventory_file, 'r').read()
else:
# Run ansible:
nodes = parsed_args.nodes
playbook = parsed_args.playbook
inventory_file = parsed_args.static_inventory
if inventory_file is None:
inventory_file = '%s/%s' % (os.path.expanduser('~'),
'tripleo-ansible-inventory.yaml')
try:
processutils.execute(
'/usr/bin/tripleo-ansible-inventory',
'--static-yaml-inventory', inventory_file)
except processutils.ProcessExecutionError as e:
message = "Failed to generate inventory: %s" % str(e)
raise exceptions.InvalidConfiguration(message)
if os.path.exists(inventory_file):
inventory = open(inventory_file, 'r').read()
else:
raise exceptions.InvalidConfiguration(
"Inventory file %s can not be found." % inventory_file)
raise exceptions.InvalidConfiguration(
"Inventory file %s can not be found." % inventory_file)
update_playbooks = [playbook]
if playbook == "all":
update_playbooks = constants.MINOR_UPDATE_PLAYBOOKS
for book in update_playbooks:
self.log.debug("Running minor update ansible playbook %s " % book)
package_update.update_ansible(
clients, nodes=nodes,
inventory_file=inventory,
playbook=playbook,
playbook=book,
ansible_queue_name=constants.UPDATE_QUEUE)
class UpgradeOvercloud(UpdateOvercloud):
"""Upgrade Overcloud Nodes"""
class UpgradeConvergeOvercloud(DeployOvercloud):
"""Converge the upgrade on Overcloud Nodes"""
log = logging.getLogger(__name__ + ".UpgradeOvercloud")
log = logging.getLogger(__name__ + ".UpgradeConvergeOvercloud")
def get_parser(self, prog_name):
parser = super(UpgradeOvercloud, self).get_parser(prog_name)
parser.add_argument('--converge',
dest='converge',
action='store_true',
help=_("Upgrade converge step"),
)
parser.add_argument('--upgrade-converge-environment-file',
dest='upgrade_converge_file',
default="%senvironments/%s" % (
constants.TRIPLEO_HEAT_TEMPLATES,
constants.UPGRADE_CONVERGE_FILE),
help=_("Upgrade environment file which perform "
"the converge of the Overcloud"),
)
parser = super(UpgradeConvergeOvercloud, self).get_parser(prog_name)
return parser
def take_action(self, parsed_args):
@ -176,18 +198,9 @@ class UpgradeOvercloud(UpdateOvercloud):
stack = oooutils.get_stack(clients.orchestration,
parsed_args.stack)
stack_name = stack.stack_name
converge = parsed_args.converge
if converge:
converge_file = parsed_args.upgrade_converge_file
# Add the converge file to the user environment:
if converge_file:
with open(os.path.abspath(converge_file)) as conv_content:
converge_env = yaml.load(conv_content.read())
# Run converge steps
package_update.converge_nodes(clients,
converge_env=converge_env,
container=stack_name)
else:
super(UpgradeOvercloud, self).take_action(parsed_args)
parsed_args.update_plan_only = True
super(UpgradeConvergeOvercloud, self).take_action(parsed_args)
# Run converge steps
package_update.converge_nodes(clients, container=stack_name)