2014-04-09 14:31:31 -07:00
|
|
|
# Copyright 2013 Rackspace, Inc.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2013-09-23 23:29:55 -07:00
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
import base64
|
|
|
|
import gzip
|
2014-01-15 12:48:33 -08:00
|
|
|
import hashlib
|
2014-01-13 16:36:55 -08:00
|
|
|
import os
|
2014-01-10 13:15:13 -08:00
|
|
|
import requests
|
2014-04-10 11:16:05 -07:00
|
|
|
import six
|
2014-01-24 14:20:14 -08:00
|
|
|
import time
|
2014-01-10 13:15:13 -08:00
|
|
|
|
2014-12-01 18:17:35 +02:00
|
|
|
from oslo_concurrency import processutils
|
2015-03-09 23:48:47 +01:00
|
|
|
from oslo_log import log
|
2014-12-01 18:17:35 +02:00
|
|
|
|
2014-03-19 16:19:52 -07:00
|
|
|
from ironic_python_agent import errors
|
2014-04-11 16:46:36 -07:00
|
|
|
from ironic_python_agent.extensions import base
|
2014-03-19 16:19:52 -07:00
|
|
|
from ironic_python_agent import hardware
|
2014-04-10 11:16:05 -07:00
|
|
|
from ironic_python_agent import utils
|
2013-12-17 15:13:30 -08:00
|
|
|
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG = log.getLogger(__name__)
|
2014-01-24 14:20:14 -08:00
|
|
|
|
2014-12-25 17:25:36 +02:00
|
|
|
IMAGE_CHUNK_SIZE = 1024 * 1024 # 1MB
|
|
|
|
|
2013-09-23 23:29:55 -07:00
|
|
|
|
2014-01-17 12:40:29 -08:00
|
|
|
def _configdrive_location():
|
|
|
|
return '/tmp/configdrive'
|
|
|
|
|
|
|
|
|
2014-01-10 13:15:13 -08:00
|
|
|
def _image_location(image_info):
|
2014-03-11 13:31:19 -07:00
|
|
|
return '/tmp/{0}'.format(image_info['id'])
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
2014-01-17 15:59:56 -08:00
|
|
|
def _path_to_script(script):
|
|
|
|
cwd = os.path.dirname(os.path.realpath(__file__))
|
2014-04-29 14:55:45 -07:00
|
|
|
return os.path.join(cwd, '..', script)
|
2014-01-17 15:59:56 -08:00
|
|
|
|
|
|
|
|
2014-01-28 15:55:07 -08:00
|
|
|
def _write_image(image_info, device):
|
2014-01-24 14:20:14 -08:00
|
|
|
starttime = time.time()
|
2014-01-10 13:15:13 -08:00
|
|
|
image = _image_location(image_info)
|
2014-01-10 16:13:47 -08:00
|
|
|
|
2014-01-28 15:55:07 -08:00
|
|
|
script = _path_to_script('shell/write_image.sh')
|
|
|
|
command = ['/bin/bash', script, image, device]
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('Writing image with command: {0}'.format(' '.join(command)))
|
2014-06-12 06:40:27 -07:00
|
|
|
try:
|
|
|
|
stdout, stderr = utils.execute(*command, check_exit_code=[0])
|
|
|
|
except processutils.ProcessExecutionError as e:
|
2014-06-12 06:47:16 -07:00
|
|
|
raise errors.ImageWriteError(device, e.exit_code, e.stdout, e.stderr)
|
2014-01-24 15:02:11 -08:00
|
|
|
totaltime = time.time() - starttime
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('Image {0} written to device {1} in {2} seconds'.format(
|
|
|
|
image, device, totaltime))
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
2014-11-19 16:08:16 -08:00
|
|
|
def _configdrive_is_url(configdrive):
|
|
|
|
return (configdrive.startswith('http://')
|
|
|
|
or configdrive.startswith('https://'))
|
|
|
|
|
|
|
|
|
|
|
|
def _download_configdrive_to_file(configdrive, filename):
|
|
|
|
content = requests.get(configdrive).content
|
|
|
|
_write_configdrive_to_file(content, filename)
|
|
|
|
|
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
def _write_configdrive_to_file(configdrive, filename):
|
|
|
|
LOG.debug('Writing configdrive to {0}'.format(filename))
|
|
|
|
# configdrive data is base64'd, decode it first
|
2015-07-15 16:08:10 +01:00
|
|
|
data = six.StringIO(base64.b64decode(configdrive))
|
2014-04-10 11:16:05 -07:00
|
|
|
gunzipped = gzip.GzipFile('configdrive', 'rb', 9, data)
|
|
|
|
with open(filename, 'wb') as f:
|
|
|
|
f.write(gunzipped.read())
|
|
|
|
gunzipped.close()
|
|
|
|
|
|
|
|
|
|
|
|
def _write_configdrive_to_partition(configdrive, device):
|
|
|
|
filename = _configdrive_location()
|
2014-11-19 16:08:16 -08:00
|
|
|
if _configdrive_is_url(configdrive):
|
|
|
|
_download_configdrive_to_file(configdrive, filename)
|
|
|
|
else:
|
|
|
|
_write_configdrive_to_file(configdrive, filename)
|
2014-04-10 11:16:05 -07:00
|
|
|
|
2014-04-21 10:44:49 -07:00
|
|
|
# check configdrive size before writing it
|
|
|
|
filesize = os.stat(filename).st_size
|
|
|
|
if filesize > (64 * 1024 * 1024):
|
|
|
|
raise errors.ConfigDriveTooLargeError(filename, filesize)
|
|
|
|
|
2014-01-28 15:55:07 -08:00
|
|
|
starttime = time.time()
|
|
|
|
script = _path_to_script('shell/copy_configdrive_to_disk.sh')
|
2014-04-10 11:16:05 -07:00
|
|
|
command = ['/bin/bash', script, filename, device]
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('copying configdrive to disk with command {0}'.format(
|
|
|
|
' '.join(command)))
|
2014-01-28 15:55:07 -08:00
|
|
|
|
2014-06-12 06:40:27 -07:00
|
|
|
try:
|
|
|
|
stdout, stderr = utils.execute(*command, check_exit_code=[0])
|
|
|
|
except processutils.ProcessExecutionError as e:
|
2014-06-12 06:47:16 -07:00
|
|
|
raise errors.ConfigDriveWriteError(device,
|
|
|
|
e.exit_code,
|
|
|
|
e.stdout,
|
|
|
|
e.stderr)
|
2014-01-28 15:55:07 -08:00
|
|
|
|
|
|
|
totaltime = time.time() - starttime
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info('configdrive copied from {0} to {1} in {2} seconds'.format(
|
2014-12-10 12:12:34 -08:00
|
|
|
filename,
|
2014-03-17 15:17:27 -07:00
|
|
|
device,
|
|
|
|
totaltime))
|
2014-01-28 15:55:07 -08:00
|
|
|
|
|
|
|
|
2014-01-14 16:53:46 -08:00
|
|
|
def _request_url(image_info, url):
|
2015-10-05 18:02:17 +03:00
|
|
|
no_proxy = image_info.get('no_proxy')
|
|
|
|
if no_proxy:
|
|
|
|
os.environ['no_proxy'] = no_proxy
|
|
|
|
proxies = image_info.get('proxies', {})
|
|
|
|
resp = requests.get(url, stream=True, proxies=proxies)
|
2014-01-14 16:53:46 -08:00
|
|
|
if resp.status_code != 200:
|
2015-02-11 14:20:02 -08:00
|
|
|
msg = ('Received status code {0} from {1}, expected 200. Response '
|
|
|
|
'body: {2}').format(resp.status_code, url, resp.text)
|
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
2014-01-14 16:53:46 -08:00
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2014-01-10 13:15:13 -08:00
|
|
|
def _download_image(image_info):
|
2014-01-24 14:20:14 -08:00
|
|
|
starttime = time.time()
|
2014-01-14 16:53:46 -08:00
|
|
|
resp = None
|
|
|
|
for url in image_info['urls']:
|
2014-01-14 13:50:57 -08:00
|
|
|
try:
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info("Attempting to download image from {0}".format(url))
|
2014-01-14 16:53:46 -08:00
|
|
|
resp = _request_url(image_info, url)
|
2015-02-11 14:20:02 -08:00
|
|
|
except errors.ImageDownloadError as e:
|
2014-01-24 14:55:33 -08:00
|
|
|
failtime = time.time() - starttime
|
2015-02-11 14:20:02 -08:00
|
|
|
log_msg = ('Image download failed. URL: {0}; time: {1} seconds. '
|
|
|
|
'Error: {2}')
|
|
|
|
LOG.warning(log_msg.format(url, failtime, e.details))
|
2014-01-14 13:50:57 -08:00
|
|
|
continue
|
|
|
|
else:
|
2014-01-14 16:53:46 -08:00
|
|
|
break
|
|
|
|
if resp is None:
|
2015-02-11 14:20:02 -08:00
|
|
|
msg = 'Image download failed for all URLs.'
|
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
2014-01-14 16:53:46 -08:00
|
|
|
|
|
|
|
image_location = _image_location(image_info)
|
|
|
|
with open(image_location, 'wb') as f:
|
2014-01-15 17:01:19 -08:00
|
|
|
try:
|
2014-12-25 17:25:36 +02:00
|
|
|
for chunk in resp.iter_content(IMAGE_CHUNK_SIZE):
|
2014-01-15 17:01:19 -08:00
|
|
|
f.write(chunk)
|
2015-02-11 14:20:02 -08:00
|
|
|
except Exception as e:
|
|
|
|
msg = 'Unable to write image to {0}. Error: {1}'.format(
|
2015-10-02 10:01:00 -07:00
|
|
|
image_location, str(e))
|
2015-02-11 14:20:02 -08:00
|
|
|
raise errors.ImageDownloadError(image_info['id'], msg)
|
2014-01-14 16:53:46 -08:00
|
|
|
|
2014-01-24 15:02:11 -08:00
|
|
|
totaltime = time.time() - starttime
|
2014-03-17 15:17:27 -07:00
|
|
|
LOG.info("Image downloaded from {0} in {1} seconds".format(image_location,
|
|
|
|
totaltime))
|
2014-01-24 14:20:14 -08:00
|
|
|
|
2015-08-26 12:47:43 -07:00
|
|
|
_verify_image(image_info, image_location)
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
|
|
|
def _verify_image(image_info, image_location):
|
2014-04-10 11:16:05 -07:00
|
|
|
checksum = image_info['checksum']
|
|
|
|
log_msg = 'Verifying image at {0} against MD5 checksum {1}'
|
|
|
|
LOG.debug(log_msg.format(image_location, checksum))
|
2014-12-25 17:25:36 +02:00
|
|
|
hash_ = hashlib.md5()
|
|
|
|
with open(image_location) as image:
|
|
|
|
while True:
|
|
|
|
data = image.read(IMAGE_CHUNK_SIZE)
|
|
|
|
if not data:
|
|
|
|
break
|
|
|
|
hash_.update(data)
|
|
|
|
hash_digest = hash_.hexdigest()
|
|
|
|
if hash_digest == checksum:
|
2014-04-10 11:16:05 -07:00
|
|
|
return True
|
2015-08-26 12:47:43 -07:00
|
|
|
|
|
|
|
LOG.error(errors.ImageChecksumError.details_str.format(
|
|
|
|
image_location, image_info['id'], checksum, hash_digest))
|
|
|
|
|
|
|
|
raise errors.ImageChecksumError(image_location, image_info['id'], checksum,
|
|
|
|
hash_digest)
|
2014-01-10 13:15:13 -08:00
|
|
|
|
|
|
|
|
2014-04-14 15:36:59 +04:00
|
|
|
def _validate_image_info(ext, image_info=None, **kwargs):
|
2014-02-20 12:06:24 -08:00
|
|
|
image_info = image_info or {}
|
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
for field in ['id', 'urls', 'checksum']:
|
2014-02-06 16:40:01 -08:00
|
|
|
if field not in image_info:
|
2014-03-11 13:31:19 -07:00
|
|
|
msg = 'Image is missing \'{0}\' field.'.format(field)
|
2014-02-06 16:40:01 -08:00
|
|
|
raise errors.InvalidCommandParamsError(msg)
|
|
|
|
|
|
|
|
if type(image_info['urls']) != list or not image_info['urls']:
|
|
|
|
raise errors.InvalidCommandParamsError(
|
|
|
|
'Image \'urls\' must be a list with at least one element.')
|
|
|
|
|
2014-04-10 11:16:05 -07:00
|
|
|
if (not isinstance(image_info['checksum'], six.string_types)
|
|
|
|
or not image_info['checksum']):
|
2014-02-06 16:40:01 -08:00
|
|
|
raise errors.InvalidCommandParamsError(
|
2014-04-10 11:16:05 -07:00
|
|
|
'Image \'checksum\' must be a non-empty string.')
|
2014-02-06 16:40:01 -08:00
|
|
|
|
|
|
|
|
2014-03-25 18:00:10 +04:00
|
|
|
class StandbyExtension(base.BaseAgentExtension):
|
2014-10-01 10:59:09 -07:00
|
|
|
def __init__(self, agent=None):
|
|
|
|
super(StandbyExtension, self).__init__(agent=agent)
|
2013-12-21 17:22:09 -08:00
|
|
|
|
2014-02-20 12:11:01 -08:00
|
|
|
self.cached_image_id = None
|
|
|
|
|
2014-09-11 10:49:33 +02:00
|
|
|
@base.async_command('cache_image', _validate_image_info)
|
2014-04-13 17:45:18 -07:00
|
|
|
def cache_image(self, image_info=None, force=False):
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Caching image %s', image_info['id'])
|
2014-12-19 13:24:21 -08:00
|
|
|
device = hardware.dispatch_to_managers('get_os_install_device')
|
2014-02-05 15:41:48 -08:00
|
|
|
|
2015-01-27 13:56:05 -08:00
|
|
|
result_msg = 'image ({0}) already present on device {1}'
|
|
|
|
|
2014-02-20 13:04:26 -08:00
|
|
|
if self.cached_image_id != image_info['id'] or force:
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Already had %s cached, overwriting',
|
|
|
|
self.cached_image_id)
|
2014-02-20 13:04:26 -08:00
|
|
|
_download_image(image_info)
|
|
|
|
_write_image(image_info, device)
|
|
|
|
self.cached_image_id = image_info['id']
|
2015-01-27 13:56:05 -08:00
|
|
|
result_msg = 'image ({0}) cached to device {1}'
|
|
|
|
|
2015-06-10 10:46:39 -07:00
|
|
|
msg = result_msg.format(image_info['id'], device)
|
|
|
|
LOG.info(msg)
|
|
|
|
return msg
|
2014-02-20 12:11:01 -08:00
|
|
|
|
2014-09-11 10:49:33 +02:00
|
|
|
@base.async_command('prepare_image', _validate_image_info)
|
2014-02-20 12:37:27 -08:00
|
|
|
def prepare_image(self,
|
|
|
|
image_info=None,
|
2014-04-10 11:16:05 -07:00
|
|
|
configdrive=None):
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Preparing image %s', image_info['id'])
|
2014-12-19 13:24:21 -08:00
|
|
|
device = hardware.dispatch_to_managers('get_os_install_device')
|
2014-02-05 15:41:48 -08:00
|
|
|
|
2014-02-20 12:11:01 -08:00
|
|
|
# don't write image again if already cached
|
|
|
|
if self.cached_image_id != image_info['id']:
|
2015-06-10 10:46:39 -07:00
|
|
|
LOG.debug('Already had %s cached, overwriting',
|
|
|
|
self.cached_image_id)
|
2014-02-20 12:11:01 -08:00
|
|
|
_download_image(image_info)
|
|
|
|
_write_image(image_info, device)
|
2014-02-20 12:22:13 -08:00
|
|
|
self.cached_image_id = image_info['id']
|
2014-02-05 15:41:48 -08:00
|
|
|
|
2014-06-27 17:14:33 +00:00
|
|
|
if configdrive is not None:
|
|
|
|
_write_configdrive_to_partition(configdrive, device)
|
2014-02-06 11:04:44 -08:00
|
|
|
|
2015-06-10 10:46:39 -07:00
|
|
|
msg = ('image ({0}) written to device {1}'.format(
|
|
|
|
image_info['id'], device))
|
|
|
|
LOG.info(msg)
|
|
|
|
return msg
|
2015-01-27 13:56:05 -08:00
|
|
|
|
2015-06-08 02:55:17 -07:00
|
|
|
def _run_shutdown_script(self, parameter):
|
|
|
|
script = _path_to_script('shell/shutdown.sh')
|
|
|
|
command = ['/bin/bash', script, parameter]
|
2014-02-06 11:04:44 -08:00
|
|
|
# this should never return if successful
|
2014-06-12 06:40:27 -07:00
|
|
|
try:
|
|
|
|
stdout, stderr = utils.execute(*command, check_exit_code=[0])
|
|
|
|
except processutils.ProcessExecutionError as e:
|
2014-06-12 06:47:16 -07:00
|
|
|
raise errors.SystemRebootError(e.exit_code, e.stdout, e.stderr)
|
2015-06-08 02:55:17 -07:00
|
|
|
|
|
|
|
@base.async_command('run_image')
|
|
|
|
def run_image(self):
|
|
|
|
LOG.info('Rebooting system')
|
|
|
|
self._run_shutdown_script('-r')
|
|
|
|
|
|
|
|
@base.async_command('power_off')
|
|
|
|
def power_off(self):
|
|
|
|
LOG.info('Powering off system')
|
|
|
|
self._run_shutdown_script('-h')
|