Merge "Add 'openstack overcloud node import' command"

This commit is contained in:
Jenkins 2016-08-26 22:54:36 +00:00 committed by Gerrit Code Review
commit 105352b8e6
6 changed files with 258 additions and 59 deletions

View File

@ -67,6 +67,7 @@ openstack.tripleoclient.v1 =
overcloud_image_build = tripleoclient.v1.overcloud_image:BuildOvercloudImage overcloud_image_build = tripleoclient.v1.overcloud_image:BuildOvercloudImage
overcloud_image_upload = tripleoclient.v1.overcloud_image:UploadOvercloudImage overcloud_image_upload = tripleoclient.v1.overcloud_image:UploadOvercloudImage
overcloud_node_delete = tripleoclient.v1.overcloud_node:DeleteNode overcloud_node_delete = tripleoclient.v1.overcloud_node:DeleteNode
overcloud_node_import = tripleoclient.v1.overcloud_node:ImportNode
overcloud_node_introspect = tripleoclient.v1.overcloud_node:IntrospectNode overcloud_node_introspect = tripleoclient.v1.overcloud_node:IntrospectNode
overcloud_node_provide = tripleoclient.v1.overcloud_node:ProvideNode overcloud_node_provide = tripleoclient.v1.overcloud_node:ProvideNode
overcloud_plan_delete = tripleoclient.v1.overcloud_plan:DeletePlan overcloud_plan_delete = tripleoclient.v1.overcloud_plan:DeletePlan

View File

@ -13,7 +13,6 @@
# under the License. # under the License.
# #
import copy
import json import json
import os import os
import tempfile import tempfile
@ -369,19 +368,15 @@ pxe_ssh,192.168.122.2,stack,"KEY2",00:0b:d0:69:7e:58""")
def _check_workflow_call(self, local=True, provide=True, def _check_workflow_call(self, local=True, provide=True,
kernel_name='bm-deploy-kernel', kernel_name='bm-deploy-kernel',
ramdisk_name='bm-deploy-ramdisk'): ramdisk_name='bm-deploy-ramdisk'):
nodes_list = copy.deepcopy(self.nodes_list)
for node in nodes_list:
if local:
node['capabilities'] = {'boot_option': 'local'}
else:
node['capabilities'] = {'boot_option': 'netboot'}
call_list = [mock.call( call_list = [mock.call(
'tripleo.baremetal.v1.register_or_update', workflow_input={ 'tripleo.baremetal.v1.register_or_update', workflow_input={
'kernel_name': kernel_name, 'kernel_name': kernel_name,
'nodes_json': nodes_list, 'nodes_json': self.nodes_list,
'queue_name': 'UUID4', 'queue_name': 'UUID4',
'ramdisk_name': ramdisk_name} 'ramdisk_name': ramdisk_name,
'instance_boot_option': 'local' if local else 'netboot'
}
)] )]
if provide: if provide:

View File

@ -13,11 +13,14 @@
# under the License. # under the License.
# #
import copy
import json
import mock import mock
import os
import tempfile
from openstackclient.tests import utils as test_utils from openstackclient.tests import utils as test_utils
from tripleoclient.tests.v1.overcloud_node import fakes from tripleoclient.tests.v1.overcloud_node import fakes
from tripleoclient.v1 import overcloud_node from tripleoclient.v1 import overcloud_node
@ -234,3 +237,133 @@ class TestIntrospectNode(fakes.TestOvercloudNode):
self.assertRaises(test_utils.ParserException, self.assertRaises(test_utils.ParserException,
self.check_parser, self.check_parser,
self.cmd, argslist, verifylist) self.cmd, argslist, verifylist)
class TestImportNode(fakes.TestOvercloudNode):
def setUp(self):
super(TestImportNode, self).setUp()
self.nodes_list = [{
"pm_user": "stack",
"pm_addr": "192.168.122.1",
"pm_password": "KEY1",
"pm_type": "pxe_ssh",
"mac": [
"00:0b:d0:69:7e:59"
],
}, {
"pm_user": "stack",
"pm_addr": "192.168.122.2",
"pm_password": "KEY2",
"pm_type": "pxe_ssh",
"mac": [
"00:0b:d0:69:7e:58"
]
}]
self.json_file = tempfile.NamedTemporaryFile(
mode='w', delete=False, suffix='.json')
json.dump(self.nodes_list, self.json_file)
self.json_file.close()
self.addCleanup(os.unlink, self.json_file.name)
self.workflow = self.app.client_manager.workflow_engine
client = self.app.client_manager.tripleoclient
self.websocket = client.messaging_websocket()
# Get the command object to test
self.cmd = overcloud_node.ImportNode(self.app, None)
def _check_workflow_call(self, parsed_args, introspect=False,
provide=False, local=True, no_deploy_image=False):
self.websocket.wait_for_message.return_value = {
"status": "SUCCESS",
"message": "Success",
"registered_nodes": [{
"uuid": "MOCK_NODE_UUID"
}]
}
self.cmd.take_action(parsed_args)
nodes_list = copy.deepcopy(self.nodes_list)
call_count = 1
call_list = [mock.call(
'tripleo.baremetal.v1.register_or_update', workflow_input={
'nodes_json': nodes_list,
'queue_name': 'UUID4',
'kernel_name': None if no_deploy_image else 'bm-deploy-kernel',
'ramdisk_name': (None
if no_deploy_image else 'bm-deploy-ramdisk'),
'instance_boot_option': 'local' if local else 'netboot'
}
)]
if introspect:
call_count += 1
call_list.append(mock.call(
'tripleo.baremetal.v1.introspect', workflow_input={
'node_uuids': ['MOCK_NODE_UUID'],
'queue_name': 'UUID4'}
))
if provide:
call_count += 1
call_list.append(mock.call(
'tripleo.baremetal.v1.provide', workflow_input={
'node_uuids': ['MOCK_NODE_UUID'],
'queue_name': 'UUID4'
}
))
self.workflow.executions.create.assert_has_calls(call_list)
self.assertEqual(self.workflow.executions.create.call_count,
call_count)
def test_import_only(self):
argslist = [self.json_file.name]
verifylist = [('introspect', False),
('provide', False)]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
self._check_workflow_call(parsed_args)
def test_import_and_introspect(self):
argslist = [self.json_file.name, '--introspect']
verifylist = [('introspect', True),
('provide', False)]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
self._check_workflow_call(parsed_args, introspect=True)
def test_import_and_provide(self):
argslist = [self.json_file.name, '--provide']
verifylist = [('introspect', False),
('provide', True)]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
self._check_workflow_call(parsed_args, provide=True)
def test_import_and_introspect_and_provide(self):
argslist = [self.json_file.name, '--introspect', '--provide']
verifylist = [('introspect', True),
('provide', True)]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
self._check_workflow_call(parsed_args, introspect=True, provide=True)
def test_import_with_netboot(self):
arglist = [self.json_file.name, '--instance-boot-option', 'netboot']
verifylist = [('instance_boot_option', 'netboot')]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self._check_workflow_call(parsed_args, local=False)
def test_import_with_no_deployed_image(self):
arglist = [self.json_file.name, '--no-deploy-image']
verifylist = [('no_deploy_image', True)]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self._check_workflow_call(parsed_args, no_deploy_image=True)

View File

@ -15,6 +15,7 @@
from __future__ import print_function from __future__ import print_function
import base64 import base64
import csv
import hashlib import hashlib
import json import json
import logging import logging
@ -26,6 +27,7 @@ import socket
import struct import struct
import subprocess import subprocess
import time import time
import yaml
from heatclient.common import event_utils from heatclient.common import event_utils
from heatclient.exc import HTTPNotFound from heatclient.exc import HTTPNotFound
@ -837,3 +839,47 @@ def get_roles_info(parsed_args):
'swift-storage': (parsed_args.swift_storage_flavor, 'swift-storage': (parsed_args.swift_storage_flavor,
parsed_args.swift_storage_scale) parsed_args.swift_storage_scale)
} }
def _csv_to_nodes_dict(nodes_csv):
"""Convert CSV to a list of dicts formatted for os_cloud_config
Given a CSV file in the format below, convert it into the
structure expected by os_cloud_config JSON files.
pm_type, pm_addr, pm_user, pm_password, mac
"""
data = []
for row in csv.reader(nodes_csv):
node = {
"pm_user": row[2],
"pm_addr": row[1],
"pm_password": row[3],
"pm_type": row[0],
"mac": [
row[4]
]
}
data.append(node)
return data
def parse_env_file(env_file, file_type=None):
if file_type == 'json' or env_file.name.endswith('.json'):
nodes_config = json.load(env_file)
elif file_type == 'csv' or env_file.name.endswith('.csv'):
nodes_config = _csv_to_nodes_dict(env_file)
elif env_file.name.endswith('.yaml'):
nodes_config = yaml.safe_load(env_file)
else:
raise exceptions.InvalidConfiguration(
_("Invalid file extension for %s, must be json, yaml or csv") %
env_file.name)
if 'nodes' in nodes_config:
nodes_config = nodes_config['nodes']
return nodes_config

View File

@ -16,12 +16,10 @@
from __future__ import print_function from __future__ import print_function
import argparse import argparse
import csv
import json import json
import logging import logging
import time import time
import uuid import uuid
import yaml
from cliff import command from cliff import command
from cliff import lister from cliff import lister
@ -35,32 +33,6 @@ from tripleoclient import utils
from tripleoclient.workflows import baremetal from tripleoclient.workflows import baremetal
def _csv_to_nodes_dict(nodes_csv):
"""Convert CSV to a list of dicts formatted for os_cloud_config
Given a CSV file in the format below, convert it into the
structure expected by os_cloud_config JSON files.
pm_type, pm_addr, pm_user, pm_password, mac
"""
data = []
for row in csv.reader(nodes_csv):
node = {
"pm_user": row[2],
"pm_addr": row[1],
"pm_password": row[3],
"pm_type": row[0],
"mac": [
row[4]
]
}
data.append(node)
return data
class ValidateInstackEnv(command.Command): class ValidateInstackEnv(command.Command):
"""Validate `instackenv.json` which is used in `baremetal import`.""" """Validate `instackenv.json` which is used in `baremetal import`."""
@ -188,19 +160,13 @@ class ImportBaremetal(command.Command):
self.log.debug("take_action(%s)" % parsed_args) self.log.debug("take_action(%s)" % parsed_args)
if parsed_args.json or parsed_args.file_in.name.endswith('.json'): file_type = None
nodes_config = json.load(parsed_args.file_in) if parsed_args.json:
elif parsed_args.csv or parsed_args.file_in.name.endswith('.csv'): file_type = 'json'
nodes_config = _csv_to_nodes_dict(parsed_args.file_in) elif parsed_args.csv:
elif parsed_args.file_in.name.endswith('.yaml'): file_type = 'csv'
nodes_config = yaml.safe_load(parsed_args.file_in)
else:
raise exceptions.InvalidConfiguration(
_("Invalid file extension for %s, must be json, yaml or csv") %
parsed_args.file_in.name)
if 'nodes' in nodes_config: nodes_config = utils.parse_env_file(parsed_args.file_in, file_type)
nodes_config = nodes_config['nodes']
client = self.app.client_manager.baremetal client = self.app.client_manager.baremetal
if parsed_args.initial_state == "enroll": if parsed_args.initial_state == "enroll":
@ -210,14 +176,6 @@ class ImportBaremetal(command.Command):
_("OS_BAREMETAL_API_VERSION must be >=1.11 for use of " _("OS_BAREMETAL_API_VERSION must be >=1.11 for use of "
"'enroll' provision state; currently %s") % api_version) "'enroll' provision state; currently %s") % api_version)
# NOTE (dprince) move this to tripleo-common?
for node in nodes_config:
caps = node.get('capabilities', {})
if not isinstance(caps, dict):
caps = utils.capabilities_to_dict(caps)
caps.setdefault('boot_option', parsed_args.instance_boot_option)
node['capabilities'] = caps
queue_name = str(uuid.uuid4()) queue_name = str(uuid.uuid4())
if parsed_args.no_deploy_image: if parsed_args.no_deploy_image:
@ -232,7 +190,8 @@ class ImportBaremetal(command.Command):
nodes_json=nodes_config, nodes_json=nodes_config,
queue_name=queue_name, queue_name=queue_name,
kernel_name=deploy_kernel, kernel_name=deploy_kernel,
ramdisk_name=deploy_ramdisk ramdisk_name=deploy_ramdisk,
instance_boot_option=parsed_args.instance_boot_option
) )
node_uuids = [node['uuid'] for node in nodes] node_uuids = [node['uuid'] for node in nodes]

View File

@ -13,6 +13,7 @@
# under the License. # under the License.
# #
import argparse
import logging import logging
import uuid import uuid
@ -22,6 +23,7 @@ from openstackclient.i18n import _
from tripleo_common import scale from tripleo_common import scale
from tripleoclient import constants from tripleoclient import constants
from tripleoclient import utils as oooutils
from tripleoclient.workflows import baremetal from tripleoclient.workflows import baremetal
@ -146,3 +148,66 @@ class IntrospectNode(command.Command):
else: else:
baremetal.provide_manageable_nodes(self.app.client_manager, baremetal.provide_manageable_nodes(self.app.client_manager,
queue_name=queue_name) queue_name=queue_name)
class ImportNode(command.Command):
"""Import baremetal nodes from a JSON, YAML or CSV file.
The node status will be set to 'manageable' by default.
"""
log = logging.getLogger(__name__ + ".ImportNode")
def get_parser(self, prog_name):
parser = super(ImportNode, self).get_parser(prog_name)
parser.add_argument('--introspect',
action='store_true',
help=_('Introspect the imported nodes'))
parser.add_argument('--provide',
action='store_true',
help=_('Provide (make available) the nodes'))
parser.add_argument('--no-deploy-image', action='store_true',
help=_('Skip setting the deploy kernel and '
'ramdisk.'))
parser.add_argument('--instance-boot-option',
choices=['local', 'netboot'], default='local',
help=_('Whether to set instances for booting from '
'local hard drive (local) or network '
'(netboot).'))
parser.add_argument('env_file', type=argparse.FileType('r'))
return parser
def take_action(self, parsed_args):
self.log.debug("take_action(%s)" % parsed_args)
nodes_config = oooutils.parse_env_file(parsed_args.env_file)
queue_name = str(uuid.uuid4())
if parsed_args.no_deploy_image:
deploy_kernel = None
deploy_ramdisk = None
else:
deploy_kernel = 'bm-deploy-kernel'
deploy_ramdisk = 'bm-deploy-ramdisk'
nodes = baremetal.register_or_update(
self.app.client_manager,
nodes_json=nodes_config,
queue_name=queue_name,
kernel_name=deploy_kernel,
ramdisk_name=deploy_ramdisk,
instance_boot_option=parsed_args.instance_boot_option
)
nodes_uuids = [node['uuid'] for node in nodes]
if parsed_args.introspect:
baremetal.introspect(self.app.client_manager,
node_uuids=nodes_uuids,
queue_name=queue_name)
if parsed_args.provide:
baremetal.provide(self.app.client_manager,
node_uuids=nodes_uuids,
queue_name=queue_name)