Re-raise same exception type in export_stream
tripleo-container-image-prepare can consume a lot of memory during the export. Previously, when a MemoryError was raised by python, the exception was hidden because there was no message in the exception and the code re-raised an IOError. This patch updates the code so that a MemoryError is specifically caught if one was thrown, and an accurate message is logged. Change-Id: I6de47600f430d3af5e3052ff456cd718a0df0d76
This commit is contained in:
parent
26bd0efd26
commit
9d7a9b83ed
@ -88,6 +88,12 @@ def export_stream(target_url, layer, layer_stream, verify_digest=True):
|
|||||||
length = 0
|
length = 0
|
||||||
calc_digest = hashlib.sha256()
|
calc_digest = hashlib.sha256()
|
||||||
|
|
||||||
|
def remove_layer(image, blob_path):
|
||||||
|
if os.path.isfile(blob_path):
|
||||||
|
os.remove(blob_path)
|
||||||
|
LOG.error('[%s] Broken layer found and removed %s' %
|
||||||
|
(image, blob_path))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(blob_path, 'wb') as f:
|
with open(blob_path, 'wb') as f:
|
||||||
count = 0
|
count = 0
|
||||||
@ -102,13 +108,15 @@ def export_stream(target_url, layer, layer_stream, verify_digest=True):
|
|||||||
length += len(chunk)
|
length += len(chunk)
|
||||||
LOG.debug('[%s] Written %i bytes for %s' %
|
LOG.debug('[%s] Written %i bytes for %s' %
|
||||||
(image, length, digest))
|
(image, length, digest))
|
||||||
|
except MemoryError as e:
|
||||||
|
memory_error = '[{}] Memory Error: {}'.format(image, str(e))
|
||||||
|
LOG.error(memory_error)
|
||||||
|
remove_layer(image, blob_path)
|
||||||
|
raise MemoryError(memory_error)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
write_error = '[{}] Write Failure: {}'.format(image, str(e))
|
write_error = '[{}] Write Failure: {}'.format(image, str(e))
|
||||||
LOG.error(write_error)
|
LOG.error(write_error)
|
||||||
if os.path.isfile(blob_path):
|
remove_layer(image, blob_path)
|
||||||
os.remove(blob_path)
|
|
||||||
LOG.error('[%s] Broken layer found and removed %s' %
|
|
||||||
(image, blob_path))
|
|
||||||
raise IOError(write_error)
|
raise IOError(write_error)
|
||||||
else:
|
else:
|
||||||
LOG.info('[%s] Layer written successfully %s' % (image, blob_path))
|
LOG.info('[%s] Layer written successfully %s' % (image, blob_path))
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import io
|
import io
|
||||||
import json
|
import json
|
||||||
|
import mock
|
||||||
import os
|
import os
|
||||||
import requests
|
import requests
|
||||||
import shutil
|
import shutil
|
||||||
@ -105,6 +106,23 @@ class TestImageExport(base.TestCase):
|
|||||||
with open(blob_path, 'rb') as f:
|
with open(blob_path, 'rb') as f:
|
||||||
self.assertEqual(blob_compressed, f.read())
|
self.assertEqual(blob_compressed, f.read())
|
||||||
|
|
||||||
|
@mock.patch('tripleo_common.image.image_export.open',
|
||||||
|
side_effect=MemoryError())
|
||||||
|
def test_export_stream_memory_error(self, mock_open):
|
||||||
|
blob_data = six.b('The Blob')
|
||||||
|
blob_compressed = zlib.compress(blob_data)
|
||||||
|
calc_digest = hashlib.sha256()
|
||||||
|
calc_digest.update(blob_compressed)
|
||||||
|
|
||||||
|
target_url = urlparse('docker://localhost:8787/t/nova-api:latest')
|
||||||
|
layer = {
|
||||||
|
'digest': 'sha256:somethingelse'
|
||||||
|
}
|
||||||
|
calc_digest = hashlib.sha256()
|
||||||
|
layer_stream = io.BytesIO(blob_compressed)
|
||||||
|
self.assertRaises(MemoryError, image_export.export_stream,
|
||||||
|
target_url, layer, layer_stream, verify_digest=False)
|
||||||
|
|
||||||
def test_export_stream_verify_failed(self):
|
def test_export_stream_verify_failed(self):
|
||||||
blob_data = six.b('The Blob')
|
blob_data = six.b('The Blob')
|
||||||
blob_compressed = zlib.compress(blob_data)
|
blob_compressed = zlib.compress(blob_data)
|
||||||
|
Loading…
Reference in New Issue
Block a user