Add API to import load metadata for SX upgrade
For SX upgrades we can skip importing the entire bootimage ISO before starting the upgrade. With duplex systems the ISO data is used to install the N+1 load on other hosts. For SX systems there are no other hosts, so transferring the ISO is not needed. For now this feature will be restricted to SX subclouds. When the metadata import is used patching will not be available for that load. If patches are required for the N+1 load they will need to be applied either via a patched ISO or with sw-patch before running the upgrade_platform playbook. Change-Id: I898786de42798191f9ec7b14d942456651f813a5 Story: 2007403 Task: 40806 Signed-off-by: David Sullivan <david.sullivan@windriver.com>
This commit is contained in:
parent
2b755b190c
commit
59b0ac7ce4
|
@ -57,9 +57,9 @@ class Manager(object):
|
|||
'POST', url, body=body, data=data)
|
||||
return resp
|
||||
|
||||
def _upload_multipart(self, url, body, data=None):
|
||||
def _upload_multipart(self, url, body, data=None, check_exceptions=False):
|
||||
resp = self.api.upload_request_with_multipart(
|
||||
'POST', url, body=body, data=data)
|
||||
'POST', url, body=body, data=data, check_exceptions=check_exceptions)
|
||||
return resp
|
||||
|
||||
def _json_get(self, url, body=None):
|
||||
|
|
|
@ -307,10 +307,17 @@ class HTTPClient(httplib2.Http):
|
|||
enc = MultipartEncoder(fields)
|
||||
headers = {'Content-Type': enc.content_type,
|
||||
"X-Auth-Token": self.auth_token}
|
||||
req = requests.post(connection_url,
|
||||
data=enc,
|
||||
headers=headers)
|
||||
return req.json()
|
||||
response = requests.post(connection_url,
|
||||
data=enc,
|
||||
headers=headers)
|
||||
|
||||
if kwargs.get('check_exceptions'):
|
||||
if response.status_code != 200:
|
||||
err_message = self._extract_error_json(response.text)
|
||||
fault_text = err_message.get('faultstring') or "Unknown Error"
|
||||
raise exceptions.HTTPBadRequest(fault_text)
|
||||
|
||||
return response.json()
|
||||
|
||||
#################
|
||||
# AUTHENTICATE
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2015-2016 Wind River Systems, Inc.
|
||||
# Copyright (c) 2015-2020 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
@ -34,15 +32,19 @@ class LoadManager(base.Manager):
|
|||
except IndexError:
|
||||
return None
|
||||
|
||||
def create(self, **kwargs):
|
||||
def _create_load(self, load, path):
|
||||
if set(load.keys()) != set(CREATION_ATTRIBUTES):
|
||||
raise exc.InvalidAttribute()
|
||||
|
||||
return self._create(path, load)
|
||||
|
||||
def create(self, load):
|
||||
path = '/v1/loads/'
|
||||
new = {}
|
||||
for (key, value) in kwargs.items():
|
||||
if key in CREATION_ATTRIBUTES:
|
||||
new[key] = value
|
||||
else:
|
||||
raise exc.InvalidAttribute(key)
|
||||
return self._create(path, new)
|
||||
self._create_load(load, path)
|
||||
|
||||
def import_load_metadata(self, load):
|
||||
path = '/v1/loads/import_load_metadata'
|
||||
return self._create_load(load, path)
|
||||
|
||||
def import_load(self, **kwargs):
|
||||
path = '/v1/loads/import_load'
|
||||
|
@ -58,10 +60,9 @@ class LoadManager(base.Manager):
|
|||
else:
|
||||
raise exc.InvalidAttribute(key)
|
||||
|
||||
return self._upload_multipart(
|
||||
path,
|
||||
load_info,
|
||||
data={'active': active})
|
||||
json_data = self._upload_multipart(
|
||||
path, body=load_info, data={'active': active}, check_exceptions=True)
|
||||
return self.resource_class(self, json_data)
|
||||
|
||||
def delete(self, load_id):
|
||||
path = '/v1/loads/%s' % load_id
|
||||
|
|
|
@ -111,27 +111,13 @@ def do_load_import(cc, args):
|
|||
print("This operation will take a while. Please wait.")
|
||||
wait_task = WaitThread()
|
||||
wait_task.start()
|
||||
resp = cc.load.import_load(**patch)
|
||||
imported_load = cc.load.import_load(**patch)
|
||||
wait_task.join()
|
||||
error = resp.get('error')
|
||||
if error:
|
||||
raise exc.CommandError("%s" % error)
|
||||
except Exception as e:
|
||||
wait_task.join()
|
||||
raise exc.CommandError(_("Load import failed. Reason: %s" % e))
|
||||
else:
|
||||
new_load = resp.get('new_load')
|
||||
if new_load:
|
||||
uuid = new_load["uuid"]
|
||||
else:
|
||||
raise exc.CommandError(_("Load was not created."))
|
||||
|
||||
try:
|
||||
load = cc.load.get(uuid)
|
||||
except exc.HTTPNotFound:
|
||||
raise exc.CommandError(_("Load UUID not found: %s" % uuid))
|
||||
|
||||
_print_load_show(load)
|
||||
_print_load_show(imported_load)
|
||||
|
||||
|
||||
class WaitThread(threading.Thread):
|
||||
|
|
|
@ -25,6 +25,7 @@ from pecan import rest
|
|||
import six
|
||||
import shutil
|
||||
import socket
|
||||
import sys
|
||||
import wsme
|
||||
from wsme import types as wtypes
|
||||
import wsmeext.pecan as wsme_pecan
|
||||
|
@ -43,6 +44,7 @@ from sysinv.common import exception
|
|||
from sysinv.common import utils as cutils
|
||||
from sysinv import objects
|
||||
from sysinv.openstack.common.rpc import common
|
||||
import tsconfig.tsconfig as tsc
|
||||
|
||||
LOG = log.getLogger(__name__)
|
||||
|
||||
|
@ -144,6 +146,7 @@ class LoadController(rest.RestController):
|
|||
_custom_actions = {
|
||||
'detail': ['GET'],
|
||||
'import_load': ['POST'],
|
||||
'import_load_metadata': ['POST']
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
@ -284,10 +287,20 @@ class LoadController(rest.RestController):
|
|||
@expose('json')
|
||||
@cutils.synchronized(LOCK_NAME)
|
||||
def import_load(self):
|
||||
"""Create a new Load."""
|
||||
"""Import a load from iso/sig files"""
|
||||
try:
|
||||
return self._import_load()
|
||||
except Exception as e:
|
||||
# Duplicate the exception handling behavior of the wsmeext.pecan wsexpose decorator
|
||||
# This can be moved to a decorator if we need to reuse this in other modules
|
||||
exception_code = getattr(e, 'code', None)
|
||||
pecan.response.status = exception_code if wsme.utils.is_valid_code(exception_code) else 500
|
||||
return wsme.api.format_exception(sys.exc_info())
|
||||
|
||||
def _import_load(self):
|
||||
"""Create a new load from iso/sig files"""
|
||||
|
||||
LOG.info("Load import request received.")
|
||||
err_msg = None
|
||||
|
||||
system_controller_import_active = False
|
||||
data = dict((k, v) for (k, v) in request.POST.items())
|
||||
|
@ -302,53 +315,26 @@ class LoadController(rest.RestController):
|
|||
# is only installed locally and we will be booting controller-1 from
|
||||
# this load during the upgrade.
|
||||
if socket.gethostname() != constants.CONTROLLER_0_HOSTNAME:
|
||||
err_msg = _("A load can only be imported when %s is "
|
||||
"active. ") % constants.CONTROLLER_0_HOSTNAME
|
||||
raise wsme.exc.ClientSideError(
|
||||
_("A load can only be imported when %s is active. ") % constants.CONTROLLER_0_HOSTNAME)
|
||||
else:
|
||||
loads = pecan.request.dbapi.load_get_list()
|
||||
|
||||
# Only 2 loads are allowed at one time: the active load
|
||||
# and an imported load regardless of its current state
|
||||
# (e.g. importing, error, deleting).
|
||||
if len(loads) > constants.IMPORTED_LOAD_MAX_COUNT:
|
||||
for load in loads:
|
||||
if load.state == constants.ACTIVE_LOAD_STATE:
|
||||
pass
|
||||
elif load.state == constants.ERROR_LOAD_STATE:
|
||||
err_msg = _("Please remove the load in error state "
|
||||
"before importing a new one.")
|
||||
elif load.state == constants.DELETING_LOAD_STATE:
|
||||
err_msg = _("Please wait for the current load delete "
|
||||
"to complete before importing a new one.")
|
||||
else:
|
||||
# Already imported or being imported
|
||||
# For SystemController allow import of an active load
|
||||
if not system_controller_import_active:
|
||||
err_msg = _(
|
||||
"Max number of loads (2) reached. Please "
|
||||
"remove the old or unused load before "
|
||||
"importing a new one.")
|
||||
if err_msg:
|
||||
return dict(error=err_msg)
|
||||
self._check_existing_loads(active_import=system_controller_import_active)
|
||||
|
||||
load_files = dict()
|
||||
for f in constants.IMPORT_LOAD_FILES:
|
||||
if f not in request.POST:
|
||||
err_msg = _("Missing required file for %s") % f
|
||||
return dict(error=err_msg)
|
||||
raise wsme.exc.ClientSideError(_("Missing required file for %s") % f)
|
||||
|
||||
file_item = request.POST[f]
|
||||
if not file_item.filename:
|
||||
err_msg = _("No %s file uploaded") % f
|
||||
return dict(error=err_msg)
|
||||
raise wsme.exc.ClientSideError(_("No %s file uploaded") % f)
|
||||
|
||||
fn = self._upload_file(file_item)
|
||||
if fn:
|
||||
load_files.update({f: fn})
|
||||
else:
|
||||
err_msg = _("Failed to save file %s to disk. Please check "
|
||||
"sysinv logs for details." % file_item.filename)
|
||||
return dict(error=err_msg)
|
||||
raise wsme.exc.ClientSideError(_("Failed to save file %s to disk. Please check "
|
||||
"sysinv logs for details." % file_item.filename))
|
||||
|
||||
LOG.info("Load files: %s saved to disk." % load_files)
|
||||
|
||||
|
@ -358,33 +344,91 @@ class LoadController(rest.RestController):
|
|||
load_files[constants.LOAD_ISO],
|
||||
load_files[constants.LOAD_SIGNATURE],
|
||||
system_controller_import_active)
|
||||
except common.RemoteError as e:
|
||||
if os.path.isdir(constants.LOAD_FILES_STAGING_DIR):
|
||||
shutil.rmtree(constants.LOAD_FILES_STAGING_DIR)
|
||||
# Keep only the message raised originally by sysinv conductor.
|
||||
return dict(error=str(e.value))
|
||||
|
||||
if new_load is None:
|
||||
return dict(error=_("Error importing load. Load not found"))
|
||||
if new_load is None:
|
||||
raise wsme.exc.ClientSideError(_("Error importing load. Load not found"))
|
||||
|
||||
if not system_controller_import_active:
|
||||
# Signature and upgrade path checks have passed, make rpc call
|
||||
# to the conductor to run import script in the background.
|
||||
try:
|
||||
if not system_controller_import_active:
|
||||
# Signature and upgrade path checks have passed, make rpc call
|
||||
# to the conductor to run import script in the background.
|
||||
pecan.request.rpcapi.import_load(
|
||||
pecan.request.context,
|
||||
load_files[constants.LOAD_ISO],
|
||||
new_load)
|
||||
except common.RemoteError as e:
|
||||
if os.path.isdir(constants.LOAD_FILES_STAGING_DIR):
|
||||
shutil.rmtree(constants.LOAD_FILES_STAGING_DIR)
|
||||
# Keep only the message raised originally by sysinv conductor.
|
||||
return dict(error=str(e.value))
|
||||
except common.RemoteError as e:
|
||||
if os.path.isdir(constants.LOAD_FILES_STAGING_DIR):
|
||||
shutil.rmtree(constants.LOAD_FILES_STAGING_DIR)
|
||||
raise wsme.exc.ClientSideError(e.value)
|
||||
|
||||
new_load_dict = new_load.as_dict()
|
||||
load_data = new_load.as_dict()
|
||||
LOG.info("Load import request validated, returning new load data: %s"
|
||||
% new_load_dict)
|
||||
return dict(new_load=new_load_dict)
|
||||
% load_data)
|
||||
return load_data
|
||||
|
||||
@cutils.synchronized(LOCK_NAME)
|
||||
@wsme_pecan.wsexpose(Load, body=Load)
|
||||
def import_load_metadata(self, load):
|
||||
"""Import a new load using only the metadata. Only available to SX subcoulds."""
|
||||
|
||||
LOG.info("Load import metadata request received.")
|
||||
err_msg = None
|
||||
|
||||
# Enforce system type restrictions
|
||||
err_msg = _("Metadata load import is only available to simplex subclouds.")
|
||||
if utils.get_system_mode() != constants.SYSTEM_MODE_SIMPLEX:
|
||||
raise wsme.exc.ClientSideError(err_msg)
|
||||
if utils.get_distributed_cloud_role() != constants.DISTRIBUTED_CLOUD_ROLE_SUBCLOUD:
|
||||
raise wsme.exc.ClientSideError(err_msg)
|
||||
|
||||
self._check_existing_loads()
|
||||
|
||||
if load.software_version == load.compatible_version:
|
||||
raise wsme.exc.ClientSideError(_("Invalid load software_version."))
|
||||
if load.compatible_version != tsc.SW_VERSION:
|
||||
raise wsme.exc.ClientSideError(_("Load compatible_version does not match SW_VERSION."))
|
||||
|
||||
patch = load.as_dict()
|
||||
self._new_load_semantic_checks(patch)
|
||||
patch['state'] = constants.IMPORTED_METADATA_LOAD_STATE
|
||||
patch['uuid'] = None
|
||||
|
||||
LOG.info("Load import metadata validated, creating new load: %s" % patch)
|
||||
try:
|
||||
new_load = pecan.request.dbapi.load_create(patch)
|
||||
except exception.SysinvException:
|
||||
LOG.exception("Failure to create load")
|
||||
raise wsme.exc.ClientSideError(_("Failure to create load"))
|
||||
|
||||
return load.convert_with_links(new_load)
|
||||
|
||||
def _check_existing_loads(self, active_import=False):
|
||||
loads = pecan.request.dbapi.load_get_list()
|
||||
|
||||
# Only 2 loads are allowed at one time: the active load
|
||||
# and an imported load regardless of its current state
|
||||
# (e.g. importing, error, deleting).
|
||||
load_state = None
|
||||
if len(loads) > constants.IMPORTED_LOAD_MAX_COUNT:
|
||||
for load in loads:
|
||||
if load.state != constants.ACTIVE_LOAD_STATE:
|
||||
load_state = load.state
|
||||
else:
|
||||
return
|
||||
|
||||
if load_state == constants.ERROR_LOAD_STATE:
|
||||
err_msg = _("Please remove the load in error state "
|
||||
"before importing a new one.")
|
||||
elif load_state == constants.DELETING_LOAD_STATE:
|
||||
err_msg = _("Please wait for the current load delete "
|
||||
"to complete before importing a new one.")
|
||||
elif not active_import:
|
||||
# Already imported or being imported
|
||||
err_msg = _("Max number of loads (2) reached. Please "
|
||||
"remove the old or unused load before "
|
||||
"importing a new one.")
|
||||
else:
|
||||
return
|
||||
raise wsme.exc.ClientSideError(err_msg)
|
||||
|
||||
@cutils.synchronized(LOCK_NAME)
|
||||
@wsme.validate(six.text_type, [LoadPatchType])
|
||||
|
|
|
@ -752,8 +752,13 @@ MNT_DIR = '/tmp/mnt'
|
|||
ACTIVE_LOAD_STATE = 'active'
|
||||
IMPORTING_LOAD_STATE = 'importing'
|
||||
IMPORTED_LOAD_STATE = 'imported'
|
||||
IMPORTED_METADATA_LOAD_STATE = 'imported-metadata'
|
||||
ERROR_LOAD_STATE = 'error'
|
||||
DELETING_LOAD_STATE = 'deleting'
|
||||
IMPORTED_LOAD_STATES = [
|
||||
IMPORTED_LOAD_STATE,
|
||||
IMPORTED_METADATA_LOAD_STATE
|
||||
]
|
||||
|
||||
DELETE_LOAD_SCRIPT = '/etc/sysinv/upgrades/delete_load.sh'
|
||||
IMPORTED_LOAD_MAX_COUNT = 1
|
||||
|
|
|
@ -1078,33 +1078,28 @@ class ISO(object):
|
|||
|
||||
|
||||
def get_active_load(loads):
|
||||
active_load = None
|
||||
for db_load in loads:
|
||||
if db_load.state == constants.ACTIVE_LOAD_STATE:
|
||||
active_load = db_load
|
||||
|
||||
if active_load is None:
|
||||
active_state = constants.ACTIVE_LOAD_STATE
|
||||
matches = [load for load in loads if load.state == active_state]
|
||||
if matches:
|
||||
return matches[0]
|
||||
else:
|
||||
raise exception.SysinvException(_("No active load found"))
|
||||
|
||||
return active_load
|
||||
|
||||
|
||||
def get_imported_load(loads):
|
||||
imported_load = None
|
||||
for db_load in loads:
|
||||
if db_load.state == constants.IMPORTED_LOAD_STATE:
|
||||
imported_load = db_load
|
||||
|
||||
if imported_load is None:
|
||||
imported_states = constants.IMPORTED_LOAD_STATES
|
||||
matches = [load for load in loads if load.state in imported_states]
|
||||
if matches:
|
||||
return matches[0]
|
||||
else:
|
||||
raise exception.SysinvException(_("No imported load found"))
|
||||
|
||||
return imported_load
|
||||
|
||||
|
||||
def validate_loads_for_import(loads):
|
||||
for db_load in loads:
|
||||
if db_load.state == constants.IMPORTED_LOAD_STATE:
|
||||
raise exception.SysinvException(_("Imported load exists."))
|
||||
imported_states = constants.IMPORTED_LOAD_STATES
|
||||
matches = [load for load in loads if load.state in imported_states]
|
||||
if matches:
|
||||
raise exception.SysinvException(_("Imported load exists."))
|
||||
|
||||
|
||||
def validate_load_for_delete(load):
|
||||
|
@ -1113,6 +1108,7 @@ def validate_load_for_delete(load):
|
|||
|
||||
valid_delete_states = [
|
||||
constants.IMPORTED_LOAD_STATE,
|
||||
constants.IMPORTED_METADATA_LOAD_STATE,
|
||||
constants.ERROR_LOAD_STATE,
|
||||
constants.DELETING_LOAD_STATE
|
||||
]
|
||||
|
|
|
@ -9388,11 +9388,6 @@ class ConductorManager(service.PeriodicService):
|
|||
(from_version, to_version))
|
||||
|
||||
try:
|
||||
# Extract N+1 packages necessary for installation of controller-1
|
||||
# (ie. installer images, kickstarts)
|
||||
subprocess.check_call(['/usr/sbin/upgrade-start-pkg-extract',
|
||||
'-r', to_version])
|
||||
|
||||
if tsc.system_mode == constants.SYSTEM_MODE_SIMPLEX:
|
||||
LOG.info("Generating agent request to create simplex upgrade "
|
||||
"data")
|
||||
|
@ -9401,6 +9396,10 @@ class ConductorManager(service.PeriodicService):
|
|||
rpcapi.create_simplex_backup(context, software_upgrade)
|
||||
return
|
||||
else:
|
||||
# Extract N+1 packages necessary for installation of controller-1
|
||||
# (ie. installer images, kickstarts)
|
||||
subprocess.check_call(['/usr/sbin/upgrade-start-pkg-extract',
|
||||
'-r', to_version])
|
||||
# get the floating management IP
|
||||
mgmt_address = self.dbapi.address_get_by_name(
|
||||
cutils.format_address_name(constants.CONTROLLER_HOSTNAME,
|
||||
|
|
Loading…
Reference in New Issue