2014-04-09 14:31:31 -07:00
|
|
|
# Copyright 2013 Rackspace, Inc.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2013-09-23 23:29:55 -07:00
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
import base64
|
|
|
|
import gzip
|
2014-01-15 12:48:33 -08:00
|
|
|
import hashlib
|
2014-01-13 16:36:55 -08:00
|
|
|
import os
|
2014-01-10 13:15:13 -08:00
|
|
|
import requests
|
2014-04-10 11:16:05 -07:00
|
|
|
import six
|
2014-01-24 14:20:14 -08:00
|
|
|
import time
|
2014-01-10 13:15:13 -08:00
|
|
|
|
2014-12-01 18:17:35 +02:00
|
|
|
from oslo_concurrency import processutils
|
2015-03-09 23:48:47 +01:00
|
|
|
from oslo_log import log
|
2014-12-01 18:17:35 +02:00
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
from ironic_lib import disk_utils
|
2014-03-19 16:19:52 -07:00
|
|
|
from ironic_python_agent import errors
|
2014-04-11 16:46:36 -07:00
|
|
|
from ironic_python_agent.extensions import base
|
2014-03-19 16:19:52 -07:00
|
|
|
from ironic_python_agent import hardware
|
2014-04-10 11:16:05 -07:00
|
|
|
from ironic_python_agent import utils
|
2013-12-17 15:13:30 -08:00
|
|
|
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG = log.getLogger(__name__)
|
2014-01-24 14:20:14 -08:00
|
|
|
|
2014-12-25 17:25:36 +02:00
|
|
|
IMAGE_CHUNK_SIZE = 1024 * 1024 # 1MB
|
|
|
|
|
2013-09-23 23:29:55 -07:00
|
|
|
|
2014-01-17 12:40:29 -08:00
|
|
|
def _configdrive_location():
|
|
|
|
return '/tmp/configdrive'
|
|
|
|
|
|
|
|
|
2014-01-10 13:15:13 -08:00
|
|
|
def _image_location(image_info):
|
2014-03-11 13:31:19 -07:00
|
|
|
return '/tmp/{0}'.format(image_info['id'])
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
2014-01-17 15:59:56 -08:00
|
|
|
def _path_to_script(script):
|
|
|
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
2014-04-29 14:55:45 -07:00
|
|
|
return os.path.join(cwd, '..', script)
|
2014-01-17 15:59:56 -08:00
|
|
|
|
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
def _write_partition_image(image, image_info, device):
|
|
|
|
"""Call disk_util to create partition and write the partition image."""
|
|
|
|
node_uuid = image_info['id']
|
|
|
|
preserve_ep = image_info['preserve_ephemeral']
|
|
|
|
configdrive = image_info['configdrive']
|
|
|
|
boot_option = image_info.get('boot_option', 'netboot')
|
|
|
|
boot_mode = image_info.get('deploy_boot_mode', 'bios')
|
|
|
|
image_mb = disk_utils.get_image_mb(image)
|
|
|
|
root_mb = image_info['root_mb']
|
|
|
|
if image_mb > int(root_mb):
|
|
|
|
msg = ('Root partition is too small for requested image. Image '
|
|
|
|
'virtual size: {0} MB, Root size: {1} MB').format(image_mb,
|
|
|
|
root_mb)
|
|
|
|
raise errors.InvalidCommandParamsError(msg)
|
|
|
|
try:
|
|
|
|
return disk_utils.work_on_disk(device, root_mb,
|
|
|
|
image_info['swap_mb'],
|
|
|
|
image_info['ephemeral_mb'],
|
|
|
|
image_info['ephemeral_format'],
|
|
|
|
image, node_uuid,
|
|
|
|
preserve_ephemeral=preserve_ep,
|
|
|
|
configdrive=configdrive,
|
|
|
|
boot_option=boot_option,
|
|
|
|
boot_mode=boot_mode)
|
|
|
|
except processutils.ProcessExecutionError as e:
|
|
|
|
raise errors.ImageWriteError(device, e.exit_code, e.stdout, e.stderr)
|
|
|
|
|
2014-01-10 16:13:47 -08:00
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
def _write_whole_disk_image(image, image_info, device):
|
2014-01-28 15:55:07 -08:00
|
|
|
script = _path_to_script('shell/write_image.sh')
|
|
|
|
command = ['/bin/bash', script, image, device]
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('Writing image with command: {0}'.format(' '.join(command)))
|
2014-06-12 06:40:27 -07:00
|
|
|
try:
|
|
|
|
stdout, stderr = utils.execute(*command, check_exit_code=[0])
|
|
|
|
except processutils.ProcessExecutionError as e:
|
2014-06-12 06:47:16 -07:00
|
|
|
raise errors.ImageWriteError(device, e.exit_code, e.stdout, e.stderr)
|
2015-02-26 11:12:36 -08:00
|
|
|
|
|
|
|
|
|
|
|
def _write_image(image_info, device):
|
|
|
|
starttime = time.time()
|
|
|
|
image = _image_location(image_info)
|
|
|
|
uuids = {}
|
|
|
|
if image_info.get('image_type') == 'partition':
|
|
|
|
uuids = _write_partition_image(image, image_info, device)
|
|
|
|
else:
|
|
|
|
_write_whole_disk_image(image, image_info, device)
|
2014-01-24 15:02:11 -08:00
|
|
|
totaltime = time.time() - starttime
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('Image {0} written to device {1} in {2} seconds'.format(
|
|
|
|
image, device, totaltime))
|
2015-02-26 11:12:36 -08:00
|
|
|
return uuids
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
2014-11-19 16:08:16 -08:00
|
|
|
def _configdrive_is_url(configdrive):
|
|
|
|
return (configdrive.startswith('http://')
|
|
|
|
or configdrive.startswith('https://'))
|
|
|
|
|
|
|
|
|
|
|
|
def _download_configdrive_to_file(configdrive, filename):
|
|
|
|
content = requests.get(configdrive).content
|
|
|
|
_write_configdrive_to_file(content, filename)
|
|
|
|
|
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
def _write_configdrive_to_file(configdrive, filename):
|
|
|
|
LOG.debug('Writing configdrive to {0}'.format(filename))
|
|
|
|
# configdrive data is base64'd, decode it first
|
2015-07-15 16:08:10 +01:00
|
|
|
data = six.StringIO(base64.b64decode(configdrive))
|
2014-04-10 11:16:05 -07:00
|
|
|
gunzipped = gzip.GzipFile('configdrive', 'rb', 9, data)
|
|
|
|
with open(filename, 'wb') as f:
|
|
|
|
f.write(gunzipped.read())
|
|
|
|
gunzipped.close()
|
|
|
|
|
|
|
|
|
|
|
|
def _write_configdrive_to_partition(configdrive, device):
|
|
|
|
filename = _configdrive_location()
|
2014-11-19 16:08:16 -08:00
|
|
|
if _configdrive_is_url(configdrive):
|
|
|
|
_download_configdrive_to_file(configdrive, filename)
|
|
|
|
else:
|
|
|
|
_write_configdrive_to_file(configdrive, filename)
|
2014-04-10 11:16:05 -07:00
|
|
|
|
2014-04-21 10:44:49 -07:00
|
|
|
# check configdrive size before writing it
|
|
|
|
filesize = os.stat(filename).st_size
|
|
|
|
if filesize > (64 * 1024 * 1024):
|
|
|
|
raise errors.ConfigDriveTooLargeError(filename, filesize)
|
|
|
|
|
2014-01-28 15:55:07 -08:00
|
|
|
starttime = time.time()
|
|
|
|
script = _path_to_script('shell/copy_configdrive_to_disk.sh')
|
2014-04-10 11:16:05 -07:00
|
|
|
command = ['/bin/bash', script, filename, device]
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('copying configdrive to disk with command {0}'.format(
|
|
|
|
' '.join(command)))
|
2014-01-28 15:55:07 -08:00
|
|
|
|
2014-06-12 06:40:27 -07:00
|
|
|
try:
|
|
|
|
stdout, stderr = utils.execute(*command, check_exit_code=[0])
|
|
|
|
except processutils.ProcessExecutionError as e:
|
2014-06-12 06:47:16 -07:00
|
|
|
raise errors.ConfigDriveWriteError(device,
|
|
|
|
e.exit_code,
|
|
|
|
e.stdout,
|
|
|
|
e.stderr)
|
2014-01-28 15:55:07 -08:00
|
|
|
|
|
|
|
totaltime = time.time() - starttime
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('configdrive copied from {0} to {1} in {2} seconds'.format(
|
2014-12-10 12:12:34 -08:00
|
|
|
filename,
|
2014-03-17 15:17:27 -07:00
|
|
|
device,
|
|
|
|
totaltime))
|
2014-01-28 15:55:07 -08:00
|
|
|
|
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
def _message_format(msg, image_info, device, partition_uuids):
|
|
|
|
"""Helper method to get and populate different messages."""
|
|
|
|
message = None
|
|
|
|
result_msg = msg
|
|
|
|
if image_info.get('image_type') == 'partition':
|
|
|
|
root_uuid = partition_uuids.get('root uuid')
|
|
|
|
efi_system_partition_uuid = (
|
|
|
|
partition_uuids.get('efi system partition uuid'))
|
2016-03-21 18:02:59 +00:00
|
|
|
if (image_info.get('deploy_boot_mode') == 'uefi' and
|
|
|
|
image_info.get('boot_option') == 'local'):
|
2015-02-26 11:12:36 -08:00
|
|
|
result_msg = msg + 'root_uuid={2} efi_system_partition_uuid={3}'
|
|
|
|
message = result_msg.format(image_info['id'], device,
|
|
|
|
root_uuid,
|
|
|
|
efi_system_partition_uuid)
|
|
|
|
else:
|
|
|
|
result_msg = msg + 'root_uuid={2}'
|
|
|
|
message = result_msg.format(image_info['id'], device, root_uuid)
|
|
|
|
else:
|
|
|
|
message = result_msg.format(image_info['id'], device)
|
|
|
|
return message
|
|
|
|
|
|
|
|
|
2015-10-13 17:10:37 +01:00
|
|
|
class ImageDownload(object):
|
|
|
|
"""Helper class that opens a HTTP connection to download an image.
|
|
|
|
|
|
|
|
This class opens a HTTP connection to download an image from a URL
|
|
|
|
and create an iterator so the image can be downloaded in chunks. The
|
|
|
|
MD5 hash of the image being downloaded is calculated on-the-fly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, image_info, time_obj=None):
|
|
|
|
self._md5checksum = hashlib.md5()
|
|
|
|
self._time = time_obj or time.time()
|
|
|
|
self._request = None
|
|
|
|
|
|
|
|
for url in image_info['urls']:
|
|
|
|
try:
|
|
|
|
LOG.info("Attempting to download image from {0}".format(url))
|
|
|
|
self._request = self._download_file(image_info, url)
|
|
|
|
except errors.ImageDownloadError as e:
|
|
|
|
failtime = time.time() - self._time
|
|
|
|
log_msg = ('Image download failed. URL: {0}; time: {1} '
|
|
|
|
'seconds. Error: {2}')
|
|
|
|
LOG.warning(log_msg.format(url, failtime, e.details))
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
msg = 'Image download failed for all URLs.'
|
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
|
|
|
|
|
|
|
def _download_file(self, image_info, url):
|
|
|
|
no_proxy = image_info.get('no_proxy')
|
|
|
|
if no_proxy:
|
|
|
|
os.environ['no_proxy'] = no_proxy
|
|
|
|
proxies = image_info.get('proxies', {})
|
|
|
|
resp = requests.get(url, stream=True, proxies=proxies)
|
|
|
|
if resp.status_code != 200:
|
|
|
|
msg = ('Received status code {0} from {1}, expected 200. Response '
|
|
|
|
'body: {2}').format(resp.status_code, url, resp.text)
|
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
|
|
|
return resp
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
for chunk in self._request.iter_content(IMAGE_CHUNK_SIZE):
|
|
|
|
self._md5checksum.update(chunk)
|
|
|
|
yield chunk
|
|
|
|
|
|
|
|
def md5sum(self):
|
|
|
|
return self._md5checksum.hexdigest()
|
|
|
|
|
|
|
|
|
|
|
|
def _verify_image(image_info, image_location, checksum):
|
|
|
|
LOG.debug('Verifying image at {0} against MD5 checksum '
|
|
|
|
'{1}'.format(image_location, checksum))
|
|
|
|
if checksum != image_info['checksum']:
|
|
|
|
LOG.error(errors.ImageChecksumError.details_str.format(
|
|
|
|
image_location, image_info['id'],
|
|
|
|
image_info['checksum'], checksum))
|
|
|
|
raise errors.ImageChecksumError(image_location, image_info['id'],
|
|
|
|
image_info['checksum'], checksum)
|
2014-01-14 16:53:46 -08:00
|
|
|
|
|
|
|
|
2014-01-10 13:15:13 -08:00
|
|
|
def _download_image(image_info):
|
2014-01-24 14:20:14 -08:00
|
|
|
starttime = time.time()
|
2014-01-14 16:53:46 -08:00
|
|
|
image_location = _image_location(image_info)
|
2015-10-13 17:10:37 +01:00
|
|
|
image_download = ImageDownload(image_info, time_obj=starttime)
|
|
|
|
|
2014-01-14 16:53:46 -08:00
|
|
|
with open(image_location, 'wb') as f:
|
2014-01-15 17:01:19 -08:00
|
|
|
try:
|
2015-10-13 17:10:37 +01:00
|
|
|
for chunk in image_download:
|
2014-01-15 17:01:19 -08:00
|
|
|
f.write(chunk)
|
2015-02-11 14:20:02 -08:00
|
|
|
except Exception as e:
|
|
|
|
msg = 'Unable to write image to {0}. Error: {1}'.format(
|
2015-10-02 10:01:00 -07:00
|
|
|
image_location, str(e))
|
2015-02-11 14:20:02 -08:00
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
2014-01-14 16:53:46 -08:00
|
|
|
|
2014-01-24 15:02:11 -08:00
|
|
|
totaltime = time.time() - starttime
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info("Image downloaded from {0} in {1} seconds".format(image_location,
|
|
|
|
totaltime))
|
2015-10-13 17:10:37 +01:00
|
|
|
_verify_image(image_info, image_location, image_download.md5sum())
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
2014-04-14 15:36:59 +04:00
|
|
|
def _validate_image_info(ext, image_info=None, **kwargs):
|
2014-02-20 12:06:24 -08:00
|
|
|
image_info = image_info or {}
|
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
for field in ['id', 'urls', 'checksum']:
|
2014-02-06 16:40:01 -08:00
|
|
|
if field not in image_info:
|
2014-03-11 13:31:19 -07:00
|
|
|
msg = 'Image is missing \'{0}\' field.'.format(field)
|
2014-02-06 16:40:01 -08:00
|
|
|
raise errors.InvalidCommandParamsError(msg)
|
|
|
|
|
|
|
|
if type(image_info['urls']) != list or not image_info['urls']:
|
|
|
|
raise errors.InvalidCommandParamsError(
|
|
|
|
'Image \'urls\' must be a list with at least one element.')
|
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
if (not isinstance(image_info['checksum'], six.string_types)
|
|
|
|
or not image_info['checksum']):
|
2014-02-06 16:40:01 -08:00
|
|
|
raise errors.InvalidCommandParamsError(
|
2014-04-10 11:16:05 -07:00
|
|
|
'Image \'checksum\' must be a non-empty string.')
|
2014-02-06 16:40:01 -08:00
|
|
|
|
|
|
|
|
2014-03-25 18:00:10 +04:00
|
|
|
class StandbyExtension(base.BaseAgentExtension):
|
2014-10-01 10:59:09 -07:00
|
|
|
def __init__(self, agent=None):
|
|
|
|
super(StandbyExtension, self).__init__(agent=agent)
|
2013-12-21 17:22:09 -08:00
|
|
|
|
2014-02-20 12:11:01 -08:00
|
|
|
self.cached_image_id = None
|
2015-02-26 11:12:36 -08:00
|
|
|
self.partition_uuids = None
|
2014-02-20 12:11:01 -08:00
|
|
|
|
2015-10-13 17:31:46 +01:00
|
|
|
def _cache_and_write_image(self, image_info, device):
|
|
|
|
_download_image(image_info)
|
2015-02-26 11:12:36 -08:00
|
|
|
self.partition_uuids = _write_image(image_info, device)
|
2015-10-13 17:31:46 +01:00
|
|
|
self.cached_image_id = image_info['id']
|
|
|
|
|
|
|
|
def _stream_raw_image_onto_device(self, image_info, device):
|
|
|
|
starttime = time.time()
|
|
|
|
image_download = ImageDownload(image_info, time_obj=starttime)
|
|
|
|
|
|
|
|
with open(device, 'wb+') as f:
|
|
|
|
try:
|
|
|
|
for chunk in image_download:
|
|
|
|
f.write(chunk)
|
|
|
|
except Exception as e:
|
|
|
|
msg = 'Unable to write image to device {0}. Error: {1}'.format(
|
|
|
|
device, str(e))
|
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
|
|
|
|
|
|
|
totaltime = time.time() - starttime
|
|
|
|
LOG.info("Image streamed onto device {0} in {1} "
|
|
|
|
"seconds".format(device, totaltime))
|
|
|
|
# Verify if the checksum of the streamed image is correct
|
|
|
|
_verify_image(image_info, device, image_download.md5sum())
|
|
|
|
|
2014-09-11 10:49:33 +02:00
|
|
|
@base.async_command('cache_image', _validate_image_info)
|
2014-04-13 17:45:18 -07:00
|
|
|
def cache_image(self, image_info=None, force=False):
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Caching image %s', image_info['id'])
|
2014-12-19 13:24:21 -08:00
|
|
|
device = hardware.dispatch_to_managers('get_os_install_device')
|
2014-02-05 15:41:48 -08:00
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
msg = 'image ({0}) already present on device {1} '
|
2015-01-27 13:56:05 -08:00
|
|
|
|
2014-02-20 13:04:26 -08:00
|
|
|
if self.cached_image_id != image_info['id'] or force:
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Already had %s cached, overwriting',
|
|
|
|
self.cached_image_id)
|
2015-10-13 17:31:46 +01:00
|
|
|
self._cache_and_write_image(image_info, device)
|
2015-02-26 11:12:36 -08:00
|
|
|
msg = 'image ({0}) cached to device {1} '
|
2015-01-27 13:56:05 -08:00
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
result_msg = _message_format(msg, image_info, device,
|
|
|
|
self.partition_uuids)
|
|
|
|
|
|
|
|
LOG.info(result_msg)
|
|
|
|
return result_msg
|
2014-02-20 12:11:01 -08:00
|
|
|
|
2014-09-11 10:49:33 +02:00
|
|
|
@base.async_command('prepare_image', _validate_image_info)
|
2014-02-20 12:37:27 -08:00
|
|
|
def prepare_image(self,
|
|
|
|
image_info=None,
|
2014-04-10 11:16:05 -07:00
|
|
|
configdrive=None):
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Preparing image %s', image_info['id'])
|
2014-12-19 13:24:21 -08:00
|
|
|
device = hardware.dispatch_to_managers('get_os_install_device')
|
2014-02-05 15:41:48 -08:00
|
|
|
|
2015-10-13 17:31:46 +01:00
|
|
|
disk_format = image_info.get('disk_format')
|
|
|
|
stream_raw_images = image_info.get('stream_raw_images', False)
|
2014-02-20 12:11:01 -08:00
|
|
|
# don't write image again if already cached
|
|
|
|
if self.cached_image_id != image_info['id']:
|
2015-10-13 17:31:46 +01:00
|
|
|
|
|
|
|
if self.cached_image_id is not None:
|
|
|
|
LOG.debug('Already had %s cached, overwriting',
|
|
|
|
self.cached_image_id)
|
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
if (stream_raw_images and disk_format == 'raw' and
|
|
|
|
image_info.get('image_type') != 'partition'):
|
2015-10-13 17:31:46 +01:00
|
|
|
self._stream_raw_image_onto_device(image_info, device)
|
|
|
|
else:
|
|
|
|
self._cache_and_write_image(image_info, device)
|
2014-02-05 15:41:48 -08:00
|
|
|
|
2015-02-26 11:12:36 -08:00
|
|
|
# the configdrive creation is taken care by ironic-lib's
|
|
|
|
# work_on_disk().
|
|
|
|
if image_info.get('image_type') != 'partition':
|
|
|
|
if configdrive is not None:
|
|
|
|
_write_configdrive_to_partition(configdrive, device)
|
|
|
|
|
|
|
|
msg = 'image ({0}) written to device {1} '
|
|
|
|
result_msg = _message_format(msg, image_info, device,
|
|
|
|
self.partition_uuids)
|
|
|
|
LOG.info(result_msg)
|
|
|
|
return result_msg
|
2015-01-27 13:56:05 -08:00
|
|
|
|
2015-06-08 02:55:17 -07:00
|
|
|
def _run_shutdown_script(self, parameter):
|
|
|
|
script = _path_to_script('shell/shutdown.sh')
|
|
|
|
command = ['/bin/bash', script, parameter]
|
2014-02-06 11:04:44 -08:00
|
|
|
# this should never return if successful
|
2014-06-12 06:40:27 -07:00
|
|
|
try:
|
|
|
|
stdout, stderr = utils.execute(*command, check_exit_code=[0])
|
|
|
|
except processutils.ProcessExecutionError as e:
|
2014-06-12 06:47:16 -07:00
|
|
|
raise errors.SystemRebootError(e.exit_code, e.stdout, e.stderr)
|
2015-06-08 02:55:17 -07:00
|
|
|
|
|
|
|
@base.async_command('run_image')
|
|
|
|
def run_image(self):
|
|
|
|
LOG.info('Rebooting system')
|
|
|
|
self._run_shutdown_script('-r')
|
|
|
|
|
|
|
|
@base.async_command('power_off')
|
|
|
|
def power_off(self):
|
|
|
|
LOG.info('Powering off system')
|
|
|
|
self._run_shutdown_script('-h')
|
2016-03-14 11:50:13 +00:00
|
|
|
|
|
|
|
@base.sync_command('sync')
|
|
|
|
def sync(self):
|
|
|
|
"""Flush file system buffers forcing changed blocks to disk.
|
|
|
|
|
|
|
|
:raises: CommandExecutionError if flushing file system buffers fails.
|
|
|
|
"""
|
|
|
|
LOG.debug('Flushing file system buffers')
|
|
|
|
try:
|
|
|
|
utils.execute('sync')
|
|
|
|
except processutils.ProcessExecutionError as e:
|
|
|
|
error_msg = 'Flushing file system buffers failed. Error: %s' % e
|
|
|
|
LOG.error(error_msg)
|
|
|
|
raise errors.CommandExecutionError(error_msg)
|