Merge "Support application/json load-import request"
This commit is contained in:
commit
ac3971e337
@ -18,6 +18,8 @@
|
|||||||
# Copyright (c) 2015-2021 Wind River Systems, Inc.
|
# Copyright (c) 2015-2021 Wind River Systems, Inc.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
import jsonpatch
|
import jsonpatch
|
||||||
import os
|
import os
|
||||||
import pecan
|
import pecan
|
||||||
@ -257,53 +259,50 @@ class LoadController(rest.RestController):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _upload_file(file_item):
|
def _upload_file(file_item):
|
||||||
dst = None
|
|
||||||
try:
|
try:
|
||||||
staging_dir = constants.LOAD_FILES_STAGING_DIR
|
staging_dir = constants.LOAD_FILES_STAGING_DIR
|
||||||
if not os.path.isdir(staging_dir):
|
if not os.path.isdir(staging_dir):
|
||||||
os.makedirs(staging_dir)
|
os.makedirs(staging_dir)
|
||||||
|
|
||||||
fn = os.path.join(staging_dir,
|
source_file = file_item.file
|
||||||
|
staging_file = os.path.join(staging_dir,
|
||||||
os.path.basename(file_item.filename))
|
os.path.basename(file_item.filename))
|
||||||
if hasattr(file_item.file, 'fileno'):
|
|
||||||
|
if source_file is None:
|
||||||
|
LOG.error("Failed to upload load file %s, invalid file object"
|
||||||
|
% staging_file)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if hasattr(source_file, 'fileno'):
|
||||||
# Only proceed if there is space available for copying
|
# Only proceed if there is space available for copying
|
||||||
file_size = os.stat(file_item.filename).st_size
|
file_size = os.fstat(source_file.fileno()).st_size
|
||||||
avail_space = psutil.disk_usage('/scratch').free
|
avail_space = psutil.disk_usage('/scratch').free
|
||||||
if (avail_space < file_size):
|
if (avail_space < file_size):
|
||||||
LOG.error("Failed to upload load file %s, not enough space on /scratch "
|
LOG.error("Failed to upload load file %s, not enough space on /scratch"
|
||||||
"partition: %d bytes available " % (fn, avail_space))
|
" partition: %d bytes available "
|
||||||
|
% (staging_file, avail_space))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# Large iso file
|
# Large iso file, allocate the required space
|
||||||
dst = os.open(fn, os.O_WRONLY | os.O_CREAT)
|
|
||||||
subprocess.check_call(["/usr/bin/fallocate", # pylint: disable=not-callable
|
subprocess.check_call(["/usr/bin/fallocate", # pylint: disable=not-callable
|
||||||
"-l " + str(file_size), fn])
|
"-l " + str(file_size), staging_file])
|
||||||
src = file_item.file.fileno()
|
|
||||||
size = 64 * 1024
|
with open(staging_file, 'wb') as destination_file:
|
||||||
n = size
|
shutil.copyfileobj(source_file, destination_file)
|
||||||
while n >= size:
|
|
||||||
s = os.read(src, size)
|
|
||||||
n = os.write(dst, s)
|
|
||||||
|
|
||||||
os.close(dst)
|
|
||||||
else:
|
|
||||||
# Small signature file
|
|
||||||
with open(fn, 'wb') as sigfile:
|
|
||||||
sigfile.write(file_item.file.read())
|
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
LOG.error("Failed to upload load file %s, /usr/bin/fallocate error: %s "
|
LOG.error("Failed to upload load file %s, /usr/bin/fallocate error: %s"
|
||||||
% (fn, e.output))
|
% (staging_file, e.output))
|
||||||
os.close(dst)
|
if os.path.isfile(staging_file):
|
||||||
os.remove(fn)
|
os.remove(staging_file)
|
||||||
return None
|
return None
|
||||||
except Exception:
|
except Exception:
|
||||||
if dst:
|
if os.path.isfile(staging_file):
|
||||||
os.close(dst)
|
os.remove(staging_file)
|
||||||
os.remove(fn)
|
|
||||||
LOG.exception("Failed to upload load file %s" % file_item.filename)
|
LOG.exception("Failed to upload load file %s" % file_item.filename)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return fn
|
return staging_file
|
||||||
|
|
||||||
@expose('json')
|
@expose('json')
|
||||||
@cutils.synchronized(LOCK_NAME)
|
@cutils.synchronized(LOCK_NAME)
|
||||||
@ -324,38 +323,62 @@ class LoadController(rest.RestController):
|
|||||||
LOG.info("Load import request received.")
|
LOG.info("Load import request received.")
|
||||||
|
|
||||||
system_controller_import_active = False
|
system_controller_import_active = False
|
||||||
data = dict((k, v) for (k, v) in request.POST.items())
|
|
||||||
if data.get('active') == 'true':
|
|
||||||
if pecan.request.dbapi.isystem_get_one().\
|
|
||||||
distributed_cloud_role == \
|
|
||||||
constants.DISTRIBUTED_CLOUD_ROLE_SYSTEMCONTROLLER:
|
|
||||||
LOG.info("System Controller allow start import_load")
|
|
||||||
system_controller_import_active = True
|
|
||||||
|
|
||||||
# Only import loads on controller-0. This is required because the load
|
# Only import loads on controller-0. This is required because the load
|
||||||
# is only installed locally and we will be booting controller-1 from
|
# is only installed locally and we will be booting controller-1 from
|
||||||
# this load during the upgrade.
|
# this load during the upgrade.
|
||||||
if socket.gethostname() != constants.CONTROLLER_0_HOSTNAME:
|
if socket.gethostname() != constants.CONTROLLER_0_HOSTNAME:
|
||||||
raise wsme.exc.ClientSideError(
|
raise wsme.exc.ClientSideError(_("A load can only be imported when"
|
||||||
_("A load can only be imported when %s is active. ") % constants.CONTROLLER_0_HOSTNAME)
|
" %s is active.")
|
||||||
|
% constants.CONTROLLER_0_HOSTNAME)
|
||||||
|
|
||||||
|
req_content = dict()
|
||||||
|
load_files = dict()
|
||||||
|
is_multiform_req = True
|
||||||
|
# Request coming from dc-api-proxy is not multiform, file transfer is handled
|
||||||
|
# by dc-api-proxy, the request contains only the vault file location
|
||||||
|
if request.content_type == "application/json":
|
||||||
|
req_content = dict(json.loads(request.body))
|
||||||
|
is_multiform_req = False
|
||||||
else:
|
else:
|
||||||
|
req_content = dict(request.POST.items())
|
||||||
|
|
||||||
|
if not req_content:
|
||||||
|
raise wsme.exc.ClientSideError(_("Empty request."))
|
||||||
|
|
||||||
|
if 'active' in req_content:
|
||||||
|
if req_content['active'] == 'true':
|
||||||
|
if pecan.request.dbapi.isystem_get_one().\
|
||||||
|
distributed_cloud_role == \
|
||||||
|
constants.DISTRIBUTED_CLOUD_ROLE_SYSTEMCONTROLLER:
|
||||||
|
LOG.info("System Controller allow start import_load")
|
||||||
|
system_controller_import_active = True
|
||||||
|
|
||||||
self._check_existing_loads(active_import=system_controller_import_active)
|
self._check_existing_loads(active_import=system_controller_import_active)
|
||||||
|
|
||||||
load_files = dict()
|
for file in constants.IMPORT_LOAD_FILES:
|
||||||
for f in constants.IMPORT_LOAD_FILES:
|
if file not in req_content:
|
||||||
if f not in request.POST:
|
raise wsme.exc.ClientSideError(_("Missing required file for %s")
|
||||||
raise wsme.exc.ClientSideError(_("Missing required file for %s") % f)
|
% file)
|
||||||
|
|
||||||
file_item = request.POST[f]
|
if not is_multiform_req:
|
||||||
if not file_item.filename:
|
load_files.update({file: req_content[file]})
|
||||||
raise wsme.exc.ClientSideError(_("No %s file uploaded") % f)
|
|
||||||
|
|
||||||
fn = self._upload_file(file_item)
|
|
||||||
if fn:
|
|
||||||
load_files.update({f: fn})
|
|
||||||
else:
|
else:
|
||||||
raise wsme.exc.ClientSideError(_("Failed to save file %s to disk. Please check "
|
if file not in request.POST:
|
||||||
"sysinv logs for details." % file_item.filename))
|
raise wsme.exc.ClientSideError(_("Missing required file for %s")
|
||||||
|
% file)
|
||||||
|
|
||||||
|
file_item = request.POST[file]
|
||||||
|
if not file_item.filename:
|
||||||
|
raise wsme.exc.ClientSideError(_("No %s file uploaded") % file)
|
||||||
|
|
||||||
|
file_location = self._upload_file(file_item)
|
||||||
|
if file_location:
|
||||||
|
load_files.update({file: file_location})
|
||||||
|
else:
|
||||||
|
raise wsme.exc.ClientSideError(_("Failed to save file %s to disk."
|
||||||
|
" Please check sysinv logs for"
|
||||||
|
" details." % file_item.filename))
|
||||||
|
|
||||||
LOG.info("Load files: %s saved to disk." % load_files)
|
LOG.info("Load files: %s saved to disk." % load_files)
|
||||||
|
|
||||||
|
@ -10568,8 +10568,9 @@ class ConductorManager(service.PeriodicService):
|
|||||||
|
|
||||||
if not os.path.exists(path_to_iso):
|
if not os.path.exists(path_to_iso):
|
||||||
self._import_load_error(new_load)
|
self._import_load_error(new_load)
|
||||||
raise exception.SysinvException(_("Specified path not found %s") %
|
raise exception.SysinvException(_("Specified path not found: %s") %
|
||||||
path_to_iso)
|
path_to_iso)
|
||||||
|
|
||||||
mounted_iso = None
|
mounted_iso = None
|
||||||
|
|
||||||
mntdir = tempfile.mkdtemp(dir='/tmp')
|
mntdir = tempfile.mkdtemp(dir='/tmp')
|
||||||
@ -10580,8 +10581,7 @@ class ConductorManager(service.PeriodicService):
|
|||||||
|
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
self._import_load_error(new_load)
|
self._import_load_error(new_load)
|
||||||
raise exception.SysinvException(_(
|
raise exception.SysinvException(_("Unable to mount iso"))
|
||||||
"Unable to mount iso"))
|
|
||||||
|
|
||||||
# Run the upgrade script
|
# Run the upgrade script
|
||||||
with open(os.devnull, "w") as fnull:
|
with open(os.devnull, "w") as fnull:
|
||||||
@ -10621,14 +10621,8 @@ class ConductorManager(service.PeriodicService):
|
|||||||
raise exception.SysinvException(_(
|
raise exception.SysinvException(_(
|
||||||
"Failure during sw-patch init-release"))
|
"Failure during sw-patch init-release"))
|
||||||
|
|
||||||
# TODO(tngo): a less efficient but cleaner solution is to let sysinv
|
if os.path.exists(constants.LOAD_FILES_STAGING_DIR):
|
||||||
# api proxy copy the load files directly from the request as opposed
|
shutil.rmtree(constants.LOAD_FILES_STAGING_DIR)
|
||||||
# to relying on load files in sysinv staging directory being there.
|
|
||||||
system = self.dbapi.isystem_get_one()
|
|
||||||
if system.distributed_cloud_role == \
|
|
||||||
constants.DISTRIBUTED_CLOUD_ROLE_SYSTEMCONTROLLER:
|
|
||||||
greenthread.sleep(constants.STAGING_LOAD_FILES_REMOVAL_WAIT_TIME)
|
|
||||||
shutil.rmtree(constants.LOAD_FILES_STAGING_DIR)
|
|
||||||
|
|
||||||
LOG.info("Load import completed.")
|
LOG.info("Load import completed.")
|
||||||
return True
|
return True
|
||||||
|
Loading…
Reference in New Issue
Block a user