Browse Source

Merge "openstack overcloud node delete --baremetal-deployment" into stable/train

changes/30/701830/7
Zuul Gerrit Code Review 2 weeks ago
parent
commit
f93fbd8c66
2 changed files with 349 additions and 5 deletions
  1. +244
    -0
      tripleoclient/tests/v1/overcloud_node/test_overcloud_node.py
  2. +105
    -5
      tripleoclient/v1/overcloud_node.py

+ 244
- 0
tripleoclient/tests/v1/overcloud_node/test_overcloud_node.py View File

@@ -165,6 +165,250 @@ class TestDeleteNode(fakes.TestDeleteNode):
self.assertRaises(exceptions.DeploymentError,
self.cmd.take_action, parsed_args)

@mock.patch('tripleoclient.workflows.baremetal.expand_roles',
autospec=True)
@mock.patch('tripleoclient.workflows.baremetal.undeploy_roles',
autospec=True)
def test_node_delete_baremetal_deployment(self, mock_undeploy_roles,
mock_expand_roles):
self.websocket.wait_for_messages.return_value = iter([{
"execution_id": "IDID",
"status": "SUCCESS",
"message": "Success.",
}])
bm_yaml = [{
'name': 'Compute',
'count': 5,
'instances': [{
'name': 'baremetal-2',
'hostname': 'overcast-compute-0',
'provisioned': False
}],
}, {
'name': 'Controller',
'count': 2,
'instances': [{
'name': 'baremetal-1',
'hostname': 'overcast-controller-1',
'provisioned': False
}]
}]

expand_to_delete = {
'instances': [{
'name': 'baremetal-1',
'hostname': 'overcast-controller-1'
}, {
'name': 'baremetal-2',
'hostname': 'overcast-compute-0'
}]
}
expand_to_translate = {
'environment': {
'parameter_defaults': {
'ComputeRemovalPolicies': [{
'resource_list': [0]
}],
'ControllerRemovalPolicies': [{
'resource_list': [1]
}]
}
}
}
mock_expand_roles.side_effect = [
expand_to_delete,
expand_to_translate
]

self.stack_name.return_value = mock.Mock(stack_name="overcast")
res_list = self.app.client_manager.orchestration.resources.list
res_list.return_value = [
mock.Mock(
resource_type='OS::TripleO::ComputeServer',
parent_resource='0',
physical_resource_id='aaaa'
),
mock.Mock(
resource_type='OS::TripleO::ComputeServer',
parent_resource='1',
physical_resource_id='bbbb'
),
mock.Mock(
resource_type='OS::TripleO::ControllerServer',
parent_resource='0',
physical_resource_id='cccc'
),
mock.Mock(
resource_type='OS::TripleO::ControllerServer',
parent_resource='1',
physical_resource_id='dddd'
),
mock.Mock(
resource_type='OS::TripleO::ControllerServer',
parent_resource='2',
physical_resource_id='eeee'
)
]

with tempfile.NamedTemporaryFile(mode='w') as inp:
yaml.dump(bm_yaml, inp, encoding='utf-8')
inp.flush()

argslist = ['--baremetal-deployment', inp.name, '--templates',
'--stack', 'overcast', '--timeout', '90', '--yes']
verifylist = [
('stack', 'overcast'),
('baremetal_deployment', inp.name)
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)

self.cmd.take_action(parsed_args)

# Verify
res_list.assert_called_once_with('overcast', nested_depth=5)
mock_expand_roles.assert_has_calls([
mock.call(
self.app.client_manager,
provisioned=False,
roles=bm_yaml,
stackname='overcast'
),
mock.call(
self.app.client_manager,
provisioned=True,
roles=bm_yaml,
stackname='overcast'
)
])
self.workflow.executions.create.assert_called_with(
'tripleo.scale.v1.delete_node',
workflow_input={
'plan_name': 'overcast',
'nodes': ['aaaa', 'dddd'],
'timeout': 90
})
mock_undeploy_roles.assert_called_once_with(
self.app.client_manager,
roles=bm_yaml,
plan='overcast'
)

@mock.patch('tripleoclient.workflows.baremetal.expand_roles',
autospec=True)
def test_nodes_to_delete(self, mock_expand_roles):
bm_yaml = [{
'name': 'Compute',
'count': 5,
'instances': [{
'name': 'baremetal-2',
'hostname': 'overcast-compute-0',
'provisioned': False
}],
}, {
'name': 'Controller',
'count': 2,
'instances': [{
'name': 'baremetal-1',
'hostname': 'overcast-controller-1',
'provisioned': False
}]
}]
mock_expand_roles.return_value = {
'instances': [{
'name': 'baremetal-1',
'hostname': 'overcast-controller-1'
}, {
'name': 'baremetal-2',
'hostname': 'overcast-compute-0'
}]
}
argslist = ['--baremetal-deployment', '/foo/bm_deploy.yaml']
verifylist = [
('baremetal_deployment', '/foo/bm_deploy.yaml')
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
result = self.cmd._nodes_to_delete(parsed_args, bm_yaml)
expected = '''+-----------------------+-------------+
| hostname | name |
+-----------------------+-------------+
| overcast-controller-1 | baremetal-1 |
| overcast-compute-0 | baremetal-2 |
+-----------------------+-------------+
'''
self.assertEqual(expected, result)

@mock.patch('tripleoclient.workflows.baremetal.expand_roles',
autospec=True)
def test_translate_nodes_to_resources(self, mock_expand_roles):
bm_yaml = [{
'name': 'Compute',
'count': 5,
'instances': [{
'name': 'baremetal-2',
'hostname': 'overcast-compute-0',
'provisioned': False
}],
}, {
'name': 'Controller',
'count': 2,
'instances': [{
'name': 'baremetal-1',
'hostname': 'overcast-controller-1',
'provisioned': False
}]
}]

res_list = self.app.client_manager.orchestration.resources.list
res_list.return_value = [
mock.Mock(
resource_type='OS::TripleO::ComputeServer',
parent_resource='0',
physical_resource_id='aaaa'
),
mock.Mock(
resource_type='OS::TripleO::ComputeServer',
parent_resource='1',
physical_resource_id='bbbb'
),
mock.Mock(
resource_type='OS::TripleO::ControllerServer',
parent_resource='0',
physical_resource_id='cccc'
),
mock.Mock(
resource_type='OS::TripleO::ControllerServer',
parent_resource='1',
physical_resource_id='dddd'
),
mock.Mock(
resource_type='OS::TripleO::ControllerServer',
parent_resource='2',
physical_resource_id='eeee'
)
]

mock_expand_roles.return_value = {
'environment': {
'parameter_defaults': {
'ComputeRemovalPolicies': [{
'resource_list': [0]
}],
'ControllerRemovalPolicies': [{
'resource_list': [1]
}]
}
}
}

argslist = ['--baremetal-deployment', '/foo/bm_deploy.yaml']
verifylist = [
('baremetal_deployment', '/foo/bm_deploy.yaml')
]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
result = self.cmd._translate_nodes_to_resources(
parsed_args, bm_yaml)
self.assertEqual(['aaaa', 'dddd'], result)


class TestProvideNode(fakes.TestOvercloudNode):



+ 105
- 5
tripleoclient/v1/overcloud_node.py View File

@@ -23,6 +23,7 @@ from cliff.formatters import table
from osc_lib import exceptions as oscexc
from osc_lib.i18n import _
from osc_lib import utils
import six
import yaml

from tripleoclient import command
@@ -40,8 +41,16 @@ class DeleteNode(command.Command):

def get_parser(self, prog_name):
parser = super(DeleteNode, self).get_parser(prog_name)
parser.add_argument('nodes', metavar='<node>', nargs="+",
help=_('Node ID(s) to delete'))
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('nodes', metavar='<node>', nargs="*",
default=[],
help=_('Node ID(s) to delete (otherwise specified '
'in the --baremetal-deployment file)'))
group.add_argument('-b', '--baremetal-deployment',
metavar='<BAREMETAL DEPLOYMENT FILE>',
help=_('Configuration file describing the '
'baremetal deployment'))

parser.add_argument('--stack', dest='stack',
help=_('Name or ID of heat stack to scale '
'(default=Env: OVERCLOUD_STACK_NAME)'),
@@ -79,10 +88,96 @@ class DeleteNode(command.Command):
action="store_true")
return parser

def _nodes_to_delete(self, parsed_args, roles):
# expand for provisioned:False to get a list of nodes
# to delete
expanded = baremetal.expand_roles(
self.app.client_manager,
roles=roles,
stackname=parsed_args.stack,
provisioned=False)
nodes = expanded.get('instances', [])
if not nodes:
print('No nodes to unprovision')
return
TableArgs = collections.namedtuple(
'TableArgs', 'print_empty max_width fit_width')
args = TableArgs(print_empty=True, max_width=80, fit_width=True)
nodes_data = [(i.get('hostname', ''),
i.get('name', '')) for i in nodes]

formatter = table.TableFormatter()
output = six.StringIO()
formatter.emit_list(
column_names=['hostname', 'name'],
data=nodes_data,
stdout=output,
parsed_args=args
)
return output.getvalue()

def _translate_nodes_to_resources(self, parsed_args, roles):
# build a dict of resource type names to role name
role_types = dict(
('OS::TripleO::%sServer' % r['name'], r['name'])
for r in roles
)
expanded = baremetal.expand_roles(
self.app.client_manager,
roles=roles,
stackname=parsed_args.stack,
provisioned=True)

parameters = expanded.get(
'environment', {}).get('parameter_defaults', {})

# build a dict with the role and
# a list of indexes of nodes to delete for that role
removal_indexes = {}
for role in role_types.values():
removal_indexes.setdefault(role, [])
param = '%sRemovalPolicies' % role
policies = parameters.get(param, [])
if policies:
removal_indexes[role] = policies[0].get('resource_list', [])

nodes = []
clients = self.app.client_manager

# iterate every server resource and compare its index with
# the list of indexes to be deleted
resources = clients.orchestration.resources.list(
parsed_args.stack, nested_depth=5)
for res in resources:
if res.resource_type not in role_types:
continue
role = role_types[res.resource_type]
removal_list = removal_indexes.get(role, [])

index = int(res.parent_resource)
if index in removal_list:
node = res.physical_resource_id
nodes.append(node)
return nodes

def take_action(self, parsed_args):
self.log.debug("take_action(%s)" % parsed_args)
clients = self.app.client_manager

if parsed_args.baremetal_deployment:
with open(parsed_args.baremetal_deployment, 'r') as fp:
roles = yaml.safe_load(fp)

nodes_text = self._nodes_to_delete(parsed_args, roles)
if nodes_text:
nodes = self._translate_nodes_to_resources(
parsed_args, roles)
print(nodes_text)
else:
return
else:
nodes = parsed_args.nodes
nodes_text = '\n'.join('- %s' % node for node in nodes)
if not parsed_args.yes:
confirm = oooutils.prompt_user_for_confirmation(
message=_("Are you sure you want to delete these overcloud "
@@ -99,17 +194,22 @@ class DeleteNode(command.Command):
raise InvalidConfiguration("stack {} not found".format(
parsed_args.stack))

nodes = '\n'.join('- %s' % node for node in parsed_args.nodes)
print("Deleting the following nodes from stack {stack}:\n{nodes}"
.format(stack=stack.stack_name, nodes=nodes))
.format(stack=stack.stack_name, nodes=nodes_text))

scale.scale_down(
clients,
stack.stack_name,
parsed_args.nodes,
nodes,
parsed_args.timeout
)

if parsed_args.baremetal_deployment:
baremetal.undeploy_roles(
self.app.client_manager,
roles=roles,
plan=parsed_args.stack)


class ProvideNode(command.Command):
"""Mark nodes as available based on UUIDs or current 'manageable' state."""


Loading…
Cancel
Save