Multihash Implementation for Glance

Adds the ability to compute a "multihash" (see the Glance spec
for what this is exactly).  To maintain backward compatability,
a new store_add_to_backend_with_multihash function is added.
Backward compatability for each store's add() method is achieved
by a back_compat_add wrapper.

Co-Authored-by: Scott McClymont <scott.mcclymont@verizonwireless.com>
Co-Authored-by: Brian Rosmaita <rosmaita.fossdev@gmail.com>

Change-Id: I063d0900b7dc7e0d94dfb685971eb9b17ed67c7b
Partially-implements: blueprint multihash
This commit is contained in:
Scott McClymont
2017-12-18 19:03:18 +00:00
committed by Brian Rosmaita
parent baa663ec5c
commit ba9808cebb
22 changed files with 1075 additions and 259 deletions

View File

@@ -645,8 +645,9 @@ class Store(glance_store.driver.Store):
"internal error.")) "internal error."))
return 0 return 0
@glance_store.driver.back_compat_add
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, context=None, def add(self, image_id, image_file, image_size, hashing_algo, context=None,
verifier=None): verifier=None):
""" """
Stores an image file with supplied identifier to the backend Stores an image file with supplied identifier to the backend
@@ -656,19 +657,21 @@ class Store(glance_store.driver.Store):
:param image_id: The opaque image identifier :param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object :param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes :param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: The request context :param context: The request context
:param verifier: An object used to verify signatures for images :param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, checksum :returns: tuple of: (1) URL in backing store, (2) bytes written,
and a dictionary with storage system specific information (3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
:raises: `glance_store.exceptions.Duplicate` if the image already :raises: `glance_store.exceptions.Duplicate` if the image already
existed exists
""" """
self._check_context(context, require_tenant=True) self._check_context(context, require_tenant=True)
client = get_cinderclient(self.conf, context, client = get_cinderclient(self.conf, context,
backend=self.backend_group) backend=self.backend_group)
os_hash_value = hashlib.new(str(hashing_algo))
checksum = hashlib.md5() checksum = hashlib.md5()
bytes_written = 0 bytes_written = 0
size_gb = int(math.ceil(float(image_size) / units.Gi)) size_gb = int(math.ceil(float(image_size) / units.Gi))
@@ -712,6 +715,7 @@ class Store(glance_store.driver.Store):
if not buf: if not buf:
need_extend = False need_extend = False
break break
os_hash_value.update(buf)
checksum.update(buf) checksum.update(buf)
if verifier: if verifier:
verifier.update(buf) verifier.update(buf)
@@ -757,6 +761,7 @@ class Store(glance_store.driver.Store):
volume.update_all_metadata(metadata) volume.update_all_metadata(metadata)
volume.update_readonly_flag(volume, True) volume.update_readonly_flag(volume, True)
hash_hex = os_hash_value.hexdigest()
checksum_hex = checksum.hexdigest() checksum_hex = checksum.hexdigest()
LOG.debug("Wrote %(bytes_written)d bytes to volume %(volume_id)s " LOG.debug("Wrote %(bytes_written)d bytes to volume %(volume_id)s "
@@ -769,8 +774,11 @@ class Store(glance_store.driver.Store):
if self.backend_group: if self.backend_group:
image_metadata['backend'] = u"%s" % self.backend_group image_metadata['backend'] = u"%s" % self.backend_group
return ('cinder://%s' % volume.id, bytes_written, return ('cinder://%s' % volume.id,
checksum_hex, image_metadata) bytes_written,
checksum_hex,
hash_hex,
image_metadata)
@capabilities.check @capabilities.check
def delete(self, location, context=None): def delete(self, location, context=None):

View File

@@ -659,8 +659,9 @@ class Store(glance_store.driver.Store):
return best_datadir return best_datadir
@glance_store.driver.back_compat_add
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, context=None, def add(self, image_id, image_file, image_size, hashing_algo, context=None,
verifier=None): verifier=None):
""" """
Stores an image file with supplied identifier to the backend Stores an image file with supplied identifier to the backend
@@ -670,12 +671,15 @@ class Store(glance_store.driver.Store):
:param image_id: The opaque image identifier :param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object :param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes :param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: The request context
:param verifier: An object used to verify signatures for images :param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, checksum :returns: tuple of: (1) URL in backing store, (2) bytes written,
and a dictionary with storage system specific information (3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
:raises: `glance_store.exceptions.Duplicate` if the image already :raises: `glance_store.exceptions.Duplicate` if the image already
existed exists
:note:: By default, the backend writes the image data to a file :note:: By default, the backend writes the image data to a file
`/<DATADIR>/<ID>`, where <DATADIR> is the value of `/<DATADIR>/<ID>`, where <DATADIR> is the value of
@@ -688,7 +692,7 @@ class Store(glance_store.driver.Store):
if os.path.exists(filepath): if os.path.exists(filepath):
raise exceptions.Duplicate(image=filepath) raise exceptions.Duplicate(image=filepath)
os_hash_value = hashlib.new(str(hashing_algo))
checksum = hashlib.md5() checksum = hashlib.md5()
bytes_written = 0 bytes_written = 0
try: try:
@@ -696,6 +700,7 @@ class Store(glance_store.driver.Store):
for buf in utils.chunkreadable(image_file, for buf in utils.chunkreadable(image_file,
self.WRITE_CHUNKSIZE): self.WRITE_CHUNKSIZE):
bytes_written += len(buf) bytes_written += len(buf)
os_hash_value.update(buf)
checksum.update(buf) checksum.update(buf)
if verifier: if verifier:
verifier.update(buf) verifier.update(buf)
@@ -711,14 +716,16 @@ class Store(glance_store.driver.Store):
with excutils.save_and_reraise_exception(): with excutils.save_and_reraise_exception():
self._delete_partial(filepath, image_id) self._delete_partial(filepath, image_id)
hash_hex = os_hash_value.hexdigest()
checksum_hex = checksum.hexdigest() checksum_hex = checksum.hexdigest()
metadata = self._get_metadata(filepath) metadata = self._get_metadata(filepath)
LOG.debug(_("Wrote %(bytes_written)d bytes to %(filepath)s with " LOG.debug(("Wrote %(bytes_written)d bytes to %(filepath)s with "
"checksum %(checksum_hex)s"), "checksum %(checksum_hex)s and multihash %(hash_hex)s"),
{'bytes_written': bytes_written, {'bytes_written': bytes_written,
'filepath': filepath, 'filepath': filepath,
'checksum_hex': checksum_hex}) 'checksum_hex': checksum_hex,
'hash_hex': hash_hex})
if self.backend_group: if self.backend_group:
fstore_perm = getattr( fstore_perm = getattr(
@@ -738,7 +745,11 @@ class Store(glance_store.driver.Store):
if self.backend_group: if self.backend_group:
metadata['backend'] = u"%s" % self.backend_group metadata['backend'] = u"%s" % self.backend_group
return ('file://%s' % filepath, bytes_written, checksum_hex, metadata) return ('file://%s' % filepath,
bytes_written,
checksum_hex,
hash_hex,
metadata)
@staticmethod @staticmethod
def _delete_partial(filepath, iid): def _delete_partial(filepath, iid):

View File

@@ -440,8 +440,9 @@ class Store(driver.Store):
# Such exception is not dangerous for us so it will be just logged # Such exception is not dangerous for us so it will be just logged
LOG.debug("Snapshot %s is unprotected already" % snap_name) LOG.debug("Snapshot %s is unprotected already" % snap_name)
@driver.back_compat_add
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, context=None, def add(self, image_id, image_file, image_size, hashing_algo, context=None,
verifier=None): verifier=None):
""" """
Stores an image file with supplied identifier to the backend Stores an image file with supplied identifier to the backend
@@ -451,14 +452,18 @@ class Store(driver.Store):
:param image_id: The opaque image identifier :param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object :param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes :param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: A context object
:param verifier: An object used to verify signatures for images :param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, checksum :returns: tuple of: (1) URL in backing store, (2) bytes written,
and a dictionary with storage system specific information (3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
:raises: `glance_store.exceptions.Duplicate` if the image already :raises: `glance_store.exceptions.Duplicate` if the image already
existed exists
""" """
checksum = hashlib.md5() checksum = hashlib.md5()
os_hash_value = hashlib.new(str(hashing_algo))
image_name = str(image_id) image_name = str(image_id)
with self.get_connection(conffile=self.conf_file, with self.get_connection(conffile=self.conf_file,
rados_id=self.user) as conn: rados_id=self.user) as conn:
@@ -502,6 +507,7 @@ class Store(driver.Store):
LOG.debug(_("writing chunk at offset %s") % LOG.debug(_("writing chunk at offset %s") %
(offset)) (offset))
offset += image.write(chunk, offset) offset += image.write(chunk, offset)
os_hash_value.update(chunk)
checksum.update(chunk) checksum.update(chunk)
if verifier: if verifier:
verifier.update(chunk) verifier.update(chunk)
@@ -534,7 +540,11 @@ class Store(driver.Store):
if self.backend_group: if self.backend_group:
metadata['backend'] = u"%s" % self.backend_group metadata['backend'] = u"%s" % self.backend_group
return (loc.get_uri(), image_size, checksum.hexdigest(), metadata) return (loc.get_uri(),
image_size,
checksum.hexdigest(),
os_hash_value.hexdigest(),
metadata)
@capabilities.check @capabilities.check
def delete(self, location, context=None): def delete(self, location, context=None):

View File

@@ -343,8 +343,9 @@ class Store(glance_store.driver.Store):
% image.name) % image.name)
return image.get_size() return image.get_size()
@glance_store.driver.back_compat_add
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, context=None, def add(self, image_id, image_file, image_size, hashing_algo, context=None,
verifier=None): verifier=None):
""" """
Stores an image file with supplied identifier to the backend Stores an image file with supplied identifier to the backend
@@ -354,11 +355,15 @@ class Store(glance_store.driver.Store):
:param image_id: The opaque image identifier :param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object :param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes :param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: A context object
:param verifier: An object used to verify signatures for images :param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, and checksum :returns: tuple of: (1) URL in backing store, (2) bytes written,
(3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
:raises: `glance_store.exceptions.Duplicate` if the image already :raises: `glance_store.exceptions.Duplicate` if the image already
existed exists
""" """
image = SheepdogImage(self.addr, self.port, image_id, image = SheepdogImage(self.addr, self.port, image_id,
@@ -377,6 +382,7 @@ class Store(glance_store.driver.Store):
try: try:
offset = 0 offset = 0
os_hash_value = hashlib.new(str(hashing_algo))
checksum = hashlib.md5() checksum = hashlib.md5()
chunks = utils.chunkreadable(image_file, self.WRITE_CHUNKSIZE) chunks = utils.chunkreadable(image_file, self.WRITE_CHUNKSIZE)
for chunk in chunks: for chunk in chunks:
@@ -389,6 +395,7 @@ class Store(glance_store.driver.Store):
image.resize(offset + chunk_length) image.resize(offset + chunk_length)
image.write(chunk, offset, chunk_length) image.write(chunk, offset, chunk_length)
offset += chunk_length offset += chunk_length
os_hash_value.update(chunk)
checksum.update(chunk) checksum.update(chunk)
if verifier: if verifier:
verifier.update(chunk) verifier.update(chunk)
@@ -402,7 +409,11 @@ class Store(glance_store.driver.Store):
if self.backend_group: if self.backend_group:
metadata['backend'] = u"%s" % self.backend_group metadata['backend'] = u"%s" % self.backend_group
return (location.get_uri(), offset, checksum.hexdigest(), metadata) return (location.get_uri(),
offset,
checksum.hexdigest(),
os_hash_value.hexdigest(),
metadata)
@capabilities.check @capabilities.check
def delete(self, location, context=None): def delete(self, location, context=None):

View File

@@ -90,10 +90,12 @@ class BufferedReader(object):
to ensure there is enough disk space available. to ensure there is enough disk space available.
""" """
def __init__(self, fd, checksum, total, verifier=None, backend_group=None): def __init__(self, fd, checksum, os_hash_value, total, verifier=None,
backend_group=None):
self.fd = fd self.fd = fd
self.total = total self.total = total
self.checksum = checksum self.checksum = checksum
self.os_hash_value = os_hash_value
self.verifier = verifier self.verifier = verifier
self.backend_group = backend_group self.backend_group = backend_group
# maintain a pointer to use to update checksum and verifier # maintain a pointer to use to update checksum and verifier
@@ -126,6 +128,7 @@ class BufferedReader(object):
update = self.update_position - self._tmpfile.tell() update = self.update_position - self._tmpfile.tell()
if update < 0: if update < 0:
self.checksum.update(result[update:]) self.checksum.update(result[update:])
self.os_hash_value.update(result[update:])
if self.verifier: if self.verifier:
self.verifier.update(result[update:]) self.verifier.update(result[update:])
self.update_position += abs(update) self.update_position += abs(update)

View File

@@ -906,9 +906,28 @@ class BaseStore(driver.Store):
LOG.exception(msg % {'container': container, LOG.exception(msg % {'container': container,
'chunk': chunk}) 'chunk': chunk})
@driver.back_compat_add
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, def add(self, image_id, image_file, image_size, hashing_algo,
context=None, verifier=None): context=None, verifier=None):
"""
Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information
about the stored image.
:param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, checksum,
multihash value, and a dictionary with storage system
specific information
:raises: `glance_store.exceptions.Duplicate` if something already
exists at this location
"""
os_hash_value = hashlib.new(str(hashing_algo))
location = self.create_location(image_id, context=context) location = self.create_location(image_id, context=context)
# initialize a manager with re-auth if image need to be splitted # initialize a manager with re-auth if image need to be splitted
need_chunks = (image_size == 0) or ( need_chunks = (image_size == 0) or (
@@ -925,17 +944,13 @@ class BaseStore(driver.Store):
if not need_chunks: if not need_chunks:
# Image size is known, and is less than large_object_size. # Image size is known, and is less than large_object_size.
# Send to Swift with regular PUT. # Send to Swift with regular PUT.
if verifier: checksum = hashlib.md5()
checksum = hashlib.md5() reader = ChunkReader(image_file, checksum,
reader = ChunkReader(image_file, checksum, os_hash_value, image_size,
image_size, verifier) verifier=verifier)
obj_etag = manager.get_connection().put_object( obj_etag = manager.get_connection().put_object(
location.container, location.obj, location.container, location.obj,
reader, content_length=image_size) reader, content_length=image_size)
else:
obj_etag = manager.get_connection().put_object(
location.container, location.obj,
image_file, content_length=image_size)
else: else:
# Write the image into Swift in chunks. # Write the image into Swift in chunks.
chunk_id = 1 chunk_id = 1
@@ -972,7 +987,8 @@ class BaseStore(driver.Store):
chunk_name = "%s-%05d" % (location.obj, chunk_id) chunk_name = "%s-%05d" % (location.obj, chunk_id)
with self.reader_class( with self.reader_class(
image_file, checksum, chunk_size, verifier, image_file, checksum, os_hash_value,
chunk_size, verifier,
backend_group=self.backend_group) as reader: backend_group=self.backend_group) as reader:
if reader.is_zero_size is True: if reader.is_zero_size is True:
LOG.debug('Not writing zero-length chunk.') LOG.debug('Not writing zero-length chunk.')
@@ -1047,7 +1063,8 @@ class BaseStore(driver.Store):
metadata['backend'] = u"%s" % self.backend_group metadata['backend'] = u"%s" % self.backend_group
return (location.get_uri(credentials_included=include_creds), return (location.get_uri(credentials_included=include_creds),
image_size, obj_etag, metadata) image_size, obj_etag, os_hash_value.hexdigest(),
metadata)
except swiftclient.ClientException as e: except swiftclient.ClientException as e:
if e.http_status == http_client.CONFLICT: if e.http_status == http_client.CONFLICT:
msg = _("Swift already has an image at this location") msg = _("Swift already has an image at this location")
@@ -1590,10 +1607,11 @@ class MultiTenantStore(BaseStore):
class ChunkReader(object): class ChunkReader(object):
def __init__(self, fd, checksum, total, verifier=None, def __init__(self, fd, checksum, os_hash_value, total, verifier=None,
backend_group=None): backend_group=None):
self.fd = fd self.fd = fd
self.checksum = checksum self.checksum = checksum
self.os_hash_value = os_hash_value
self.total = total self.total = total
self.verifier = verifier self.verifier = verifier
self.backend_group = backend_group self.backend_group = backend_group
@@ -1617,6 +1635,7 @@ class ChunkReader(object):
result = self.do_read(i) result = self.do_read(i)
self.bytes_read += len(result) self.bytes_read += len(result)
self.checksum.update(result) self.checksum.update(result)
self.os_hash_value.update(result)
if self.verifier: if self.verifier:
self.verifier.update(result) self.verifier.update(result)
return result return result

View File

@@ -258,16 +258,18 @@ def http_response_iterator(conn, response, size):
class _Reader(object): class _Reader(object):
def __init__(self, data, verifier=None): def __init__(self, data, hashing_algo, verifier=None):
self._size = 0 self._size = 0
self.data = data self.data = data
self.checksum = hashlib.md5() self.checksum = hashlib.md5()
self.os_hash_value = hashlib.new(str(hashing_algo))
self.verifier = verifier self.verifier = verifier
def read(self, size=None): def read(self, size=None):
result = self.data.read(size) result = self.data.read(size)
self._size += len(result) self._size += len(result)
self.checksum.update(result) self.checksum.update(result)
self.os_hash_value.update(result)
if self.verifier: if self.verifier:
self.verifier.update(result) self.verifier.update(result)
return result return result
@@ -554,8 +556,9 @@ class Store(glance_store.Store):
cookie = list(vim_cookies)[0] cookie = list(vim_cookies)[0]
return cookie.name + '=' + cookie.value return cookie.name + '=' + cookie.value
@glance_store.driver.back_compat_add
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, context=None, def add(self, image_id, image_file, image_size, hashing_algo, context=None,
verifier=None): verifier=None):
"""Stores an image file with supplied identifier to the backend """Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information storage system and returns a tuple containing information
@@ -564,17 +567,21 @@ class Store(glance_store.Store):
:param image_id: The opaque image identifier :param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object :param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes :param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: A context object
:param verifier: An object used to verify signatures for images :param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, checksum
and a dictionary with storage system specific information :returns: tuple of: (1) URL in backing store, (2) bytes written,
:raises: `glance.common.exceptions.Duplicate` if the image already (3) checksum, (4) multihash value, and (5) a dictionary
existed with storage system specific information
`glance.common.exceptions.UnexpectedStatus` if the upload :raises: `glance_store.exceptions.Duplicate` if the image already
request returned an unexpected status. The expected responses exists
are 201 Created and 200 OK. :raises: `glance.common.exceptions.UnexpectedStatus` if the upload
request returned an unexpected status. The expected responses
are 201 Created and 200 OK.
""" """
ds = self.select_datastore(image_size) ds = self.select_datastore(image_size)
image_file = _Reader(image_file, verifier) image_file = _Reader(image_file, hashing_algo, verifier)
headers = {} headers = {}
if image_size > 0: if image_size > 0:
headers.update({'Content-Length': six.text_type(image_size)}) headers.update({'Content-Length': six.text_type(image_size)})
@@ -638,8 +645,11 @@ class Store(glance_store.Store):
if self.backend_group: if self.backend_group:
metadata['backend'] = u"%s" % self.backend_group metadata['backend'] = u"%s" % self.backend_group
return (loc.get_uri(), image_file.size, return (loc.get_uri(),
image_file.checksum.hexdigest(), metadata) image_file.size,
image_file.checksum.hexdigest(),
image_file.os_hash_value.hexdigest(),
metadata)
@capabilities.check @capabilities.check
def get(self, location, offset=0, chunk_size=None, context=None): def get(self, location, offset=0, chunk_size=None, context=None):

View File

@@ -1,4 +1,5 @@
# Copyright 2010-2011 OpenStack Foundation # Copyright 2010-2011 OpenStack Foundation
# Copyright 2018 Verizon Wireless
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -13,6 +14,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import hashlib
import logging import logging
from oslo_config import cfg from oslo_config import cfg
@@ -26,7 +28,6 @@ from glance_store import exceptions
from glance_store.i18n import _ from glance_store.i18n import _
from glance_store import location from glance_store import location
CONF = cfg.CONF CONF = cfg.CONF
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@@ -438,6 +439,25 @@ def check_location_metadata(val, key=''):
% dict(key=key, type=type(val))) % dict(key=key, type=type(val)))
def _check_metadata(store, metadata):
if not isinstance(metadata, dict):
msg = (_("The storage driver %(driver)s returned invalid "
" metadata %(metadata)s. This must be a dictionary type")
% dict(driver=str(store), metadata=str(metadata)))
LOG.error(msg)
raise exceptions.BackendException(msg)
try:
check_location_metadata(metadata)
except exceptions.BackendException as e:
e_msg = (_("A bad metadata structure was returned from the "
"%(driver)s storage driver: %(metadata)s. %(e)s.") %
dict(driver=encodeutils.exception_to_unicode(store),
metadata=encodeutils.exception_to_unicode(metadata),
e=encodeutils.exception_to_unicode(e)))
LOG.error(e_msg)
raise exceptions.BackendException(e_msg)
def store_add_to_backend(image_id, data, size, store, context=None, def store_add_to_backend(image_id, data, size, store, context=None,
verifier=None): verifier=None):
""" """
@@ -461,25 +481,49 @@ def store_add_to_backend(image_id, data, size, store, context=None,
context=context, context=context,
verifier=verifier) verifier=verifier)
if metadata is not None: if metadata is not None:
if not isinstance(metadata, dict): _check_metadata(store, metadata)
msg = (_("The storage driver %(driver)s returned invalid "
" metadata %(metadata)s. This must be a dictionary type")
% dict(driver=str(store), metadata=str(metadata)))
LOG.error(msg)
raise exceptions.BackendException(msg)
try:
check_location_metadata(metadata)
except exceptions.BackendException as e:
e_msg = (_("A bad metadata structure was returned from the "
"%(driver)s storage driver: %(metadata)s. %(e)s.") %
dict(driver=encodeutils.exception_to_unicode(store),
metadata=encodeutils.exception_to_unicode(metadata),
e=encodeutils.exception_to_unicode(e)))
LOG.error(e_msg)
raise exceptions.BackendException(e_msg)
return (location, size, checksum, metadata) return (location, size, checksum, metadata)
def store_add_to_backend_with_multihash(
image_id, data, size, hashing_algo, store,
context=None, verifier=None):
"""
A wrapper around a call to each store's add() method that requires
a hashing_algo identifier and returns a 5-tuple including the
"multihash" computed using the specified hashing_algo. (This
is an enhanced version of store_add_to_backend(), which is left
as-is for backward compatibility.)
:param image_id: The image add to which data is added
:param data: The data to be stored
:param size: The length of the data in bytes
:param store: The store to which the data is being added
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: The request context
:param verifier: An object used to verify signatures for images
:return: The url location of the file,
the size amount of data,
the checksum of the data,
the multihash of the data,
the storage system's metadata dictionary for the location
:raises: ``glance_store.exceptions.BackendException``
``glance_store.exceptions.UnknownHashingAlgo``
"""
if hashing_algo not in hashlib.algorithms_available:
raise exceptions.UnknownHashingAlgo(algo=hashing_algo)
(location, size, checksum, multihash, metadata) = store.add(
image_id, data, size, hashing_algo, context=context, verifier=verifier)
if metadata is not None:
_check_metadata(store, metadata)
return (location, size, checksum, multihash, metadata)
def add_to_backend(conf, image_id, data, size, scheme=None, context=None, def add_to_backend(conf, image_id, data, size, scheme=None, context=None,
verifier=None): verifier=None):
if scheme is None: if scheme is None:

View File

@@ -1,5 +1,6 @@
# Copyright 2011 OpenStack Foundation # Copyright 2011 OpenStack Foundation
# Copyright 2012 RedHat Inc. # Copyright 2012 RedHat Inc.
# Copyright 2018 Verizon Wireless
# All Rights Reserved. # All Rights Reserved.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
@@ -16,12 +17,14 @@
"""Base class for all storage backends""" """Base class for all storage backends"""
from functools import wraps
import logging import logging
from oslo_config import cfg from oslo_config import cfg
from oslo_utils import encodeutils from oslo_utils import encodeutils
from oslo_utils import importutils from oslo_utils import importutils
from oslo_utils import units from oslo_utils import units
import six
from glance_store import capabilities from glance_store import capabilities
from glance_store import exceptions from glance_store import exceptions
@@ -144,9 +147,13 @@ class Store(capabilities.StoreCapability):
""" """
raise NotImplementedError raise NotImplementedError
# NOTE(rosmaita): use the @glance_store.driver.back_compat_add
# annotation on implementions for backward compatibility with
# pre-0.26.0 add(). Need backcompat because pre-0.26.0 returned
# a 4 tuple, this returns a 5-tuple
@capabilities.check @capabilities.check
def add(self, image_id, image_file, image_size, context=None, def add(self, image_id, image_file, image_size, hashing_algo,
verifier=None): context=None, verifier=None):
""" """
Stores an image file with supplied identifier to the backend Stores an image file with supplied identifier to the backend
storage system and returns a tuple containing information storage system and returns a tuple containing information
@@ -155,11 +162,15 @@ class Store(capabilities.StoreCapability):
:param image_id: The opaque image identifier :param image_id: The opaque image identifier
:param image_file: The image data to write, as a file-like object :param image_file: The image data to write, as a file-like object
:param image_size: The size of the image data to write, in bytes :param image_size: The size of the image data to write, in bytes
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: A context object
:param verifier: An object used to verify signatures for images
:returns: tuple of URL in backing store, bytes written, checksum :returns: tuple of: (1) URL in backing store, (2) bytes written,
and a dictionary with storage system specific information (3) checksum, (4) multihash value, and (5) a dictionary
with storage system specific information
:raises: `glance_store.exceptions.Duplicate` if the image already :raises: `glance_store.exceptions.Duplicate` if the image already
existed exists
""" """
raise NotImplementedError raise NotImplementedError
@@ -190,3 +201,82 @@ class Store(capabilities.StoreCapability):
write access for an image. write access for an image.
""" """
raise NotImplementedError raise NotImplementedError
def back_compat_add(store_add_fun):
"""
Provides backward compatibility for the 0.26.0+ Store.add() function.
In 0.26.0, the 'hashing_algo' parameter is introduced and Store.add()
returns a 5-tuple containing a computed 'multihash' value.
This wrapper behaves as follows:
If no hashing_algo identifier is supplied as an argument, the response
is the pre-0.26.0 4-tuple of::
(backend_url, bytes_written, checksum, metadata_dict)
If a hashing_algo is supplied, the response is a 5-tuple::
(backend_url, bytes_written, checksum, multihash, metadata_dict)
The wrapper detects the presence of a 'hashing_algo' argument both
by examining named arguments and positionally.
"""
@wraps(store_add_fun)
def add_adapter(*args, **kwargs):
"""
Wrapper for the store 'add' function. If no hashing_algo identifier
is supplied, the response is the pre-0.25.0 4-tuple of::
(backend_url, bytes_written, checksum, metadata_dict)
If a hashing_algo is supplied, the response is a 5-tuple::
(backend_url, bytes_written, checksum, multihash, metadata_dict)
"""
# strategy: assume this until we determine otherwise
back_compat_required = True
# specify info about 0.26.0 Store.add() call (can't introspect
# this because the add method is wrapped by the capabilities
# check)
p_algo = 4
max_args = 7
num_args = len(args)
num_kwargs = len(kwargs)
if num_args + num_kwargs == max_args:
# everything is present, including hashing_algo
back_compat_required = False
elif ('hashing_algo' in kwargs or
(num_args >= p_algo + 1 and isinstance(args[p_algo],
six.string_types))):
# there is a hashing_algo argument present
back_compat_required = False
else:
# this is a pre-0.26.0-style call, so let's figure out
# whether to insert the hashing_algo in the args or kwargs
if kwargs and 'image_' in ''.join(kwargs):
# if any of the image_* is named, everything after it
# must be named as well, so slap the algo into kwargs
kwargs['hashing_algo'] = 'md5'
else:
args = args[:p_algo] + ('md5',) + args[p_algo:]
# business time
(backend_url,
bytes_written,
checksum,
multihash,
metadata_dict) = store_add_fun(*args, **kwargs)
if back_compat_required:
return (backend_url, bytes_written, checksum, metadata_dict)
return (backend_url, bytes_written, checksum, multihash,
metadata_dict)
return add_adapter

View File

@@ -81,6 +81,10 @@ class NotFound(GlanceStoreException):
message = _("Image %(image)s not found") message = _("Image %(image)s not found")
class UnknownHashingAlgo(GlanceStoreException):
message = _("Unknown hashing algorithm identifier: %(algo)s")
class UnknownScheme(GlanceStoreException): class UnknownScheme(GlanceStoreException):
message = _("Unknown scheme '%(scheme)s' found in URI") message = _("Unknown scheme '%(scheme)s' found in URI")

View File

@@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import hashlib
import logging import logging
from oslo_config import cfg from oslo_config import cfg
@@ -280,6 +281,25 @@ def add(conf, image_id, data, size, backend, context=None,
verifier) verifier)
def _check_metadata(store, metadata):
if not isinstance(metadata, dict):
msg = (_("The storage driver %(driver)s returned invalid "
" metadata %(metadata)s. This must be a dictionary type")
% dict(driver=str(store), metadata=str(metadata)))
LOG.error(msg)
raise exceptions.BackendException(msg)
try:
check_location_metadata(metadata)
except exceptions.BackendException as e:
e_msg = (_("A bad metadata structure was returned from the "
"%(driver)s storage driver: %(metadata)s. %(e)s.") %
dict(driver=encodeutils.exception_to_unicode(store),
metadata=encodeutils.exception_to_unicode(metadata),
e=encodeutils.exception_to_unicode(e)))
LOG.error(e_msg)
raise exceptions.BackendException(e_msg)
def store_add_to_backend(image_id, data, size, store, context=None, def store_add_to_backend(image_id, data, size, store, context=None,
verifier=None): verifier=None):
""" """
@@ -305,25 +325,49 @@ def store_add_to_backend(image_id, data, size, store, context=None,
verifier=verifier) verifier=verifier)
if metadata is not None: if metadata is not None:
if not isinstance(metadata, dict): _check_metadata(store, metadata)
msg = (_("The storage driver %(driver)s returned invalid "
" metadata %(metadata)s. This must be a dictionary type")
% dict(driver=str(store), metadata=str(metadata)))
LOG.error(msg)
raise exceptions.BackendException(msg)
try:
check_location_metadata(metadata)
except exceptions.BackendException as e:
e_msg = (_("A bad metadata structure was returned from the "
"%(driver)s storage driver: %(metadata)s. %(e)s.") %
dict(driver=encodeutils.exception_to_unicode(store),
metadata=encodeutils.exception_to_unicode(metadata),
e=encodeutils.exception_to_unicode(e)))
LOG.error(e_msg)
raise exceptions.BackendException(e_msg)
return (location, size, checksum, metadata) return (location, size, checksum, metadata)
def store_add_to_backend_with_multihash(
image_id, data, size, hashing_algo, store,
context=None, verifier=None):
"""
A wrapper around a call to each store's add() method that requires
a hashing_algo identifier and returns a 5-tuple including the
"multihash" computed using the specified hashing_algo. (This
is an enhanced version of store_add_to_backend(), which is left
as-is for backward compatibility.)
:param image_id: The image add to which data is added
:param data: The data to be stored
:param size: The length of the data in bytes
:param store: The store to which the data is being added
:param hashing_algo: A hashlib algorithm identifier (string)
:param context: The request context
:param verifier: An object used to verify signatures for images
:return: The url location of the file,
the size amount of data,
the checksum of the data,
the multihash of the data,
the storage system's metadata dictionary for the location
:raises: ``glance_store.exceptions.BackendException``
``glance_store.exceptions.UnknownHashingAlgo``
"""
if hashing_algo not in hashlib.algorithms_available:
raise exceptions.UnknownHashingAlgo(algo=hashing_algo)
(location, size, checksum, multihash, metadata) = store.add(
image_id, data, size, hashing_algo, context=context, verifier=verifier)
if metadata is not None:
_check_metadata(store, metadata)
return (location, size, checksum, multihash, metadata)
def check_location_metadata(val, key=''): def check_location_metadata(val, key=''):
if isinstance(val, dict): if isinstance(val, dict):
for key in val: for key in val:

View File

@@ -31,11 +31,14 @@ class TestStoreAddToBackend(base.StoreBaseTest):
self.size = len(self.data) self.size = len(self.data)
self.location = "file:///ab/cde/fgh" self.location = "file:///ab/cde/fgh"
self.checksum = "md5" self.checksum = "md5"
self.multihash = 'multihash'
self.default_hash_algo = 'md5'
self.hash_algo = 'sha256'
def _bad_metadata(self, in_metadata): def _bad_metadata(self, in_metadata):
mstore = mock.Mock() mstore = mock.Mock()
mstore.add.return_value = (self.location, self.size, mstore.add.return_value = (self.location, self.size, self.checksum,
self.checksum, in_metadata) in_metadata)
mstore.__str__ = lambda self: "hello" mstore.__str__ = lambda self: "hello"
mstore.__unicode__ = lambda self: "hello" mstore.__unicode__ = lambda self: "hello"
@@ -47,13 +50,31 @@ class TestStoreAddToBackend(base.StoreBaseTest):
mstore) mstore)
mstore.add.assert_called_once_with(self.image_id, mock.ANY, mstore.add.assert_called_once_with(self.image_id, mock.ANY,
self.size, context=None, self.size,
verifier=None) context=None, verifier=None)
newstore = mock.Mock()
newstore.add.return_value = (self.location, self.size, self.checksum,
self.multihash, in_metadata)
newstore.__str__ = lambda self: "hello"
newstore.__unicode__ = lambda self: "hello"
self.assertRaises(exceptions.BackendException,
backend.store_add_to_backend_with_multihash,
self.image_id,
self.data,
self.size,
self.hash_algo,
newstore)
newstore.add.assert_called_once_with(self.image_id, mock.ANY,
self.size, self.hash_algo,
context=None, verifier=None)
def _good_metadata(self, in_metadata): def _good_metadata(self, in_metadata):
mstore = mock.Mock() mstore = mock.Mock()
mstore.add.return_value = (self.location, self.size, mstore.add.return_value = (self.location, self.size, self.checksum,
self.checksum, in_metadata) in_metadata)
(location, (location,
size, size,
@@ -72,6 +93,30 @@ class TestStoreAddToBackend(base.StoreBaseTest):
self.assertEqual(self.checksum, checksum) self.assertEqual(self.checksum, checksum)
self.assertEqual(in_metadata, metadata) self.assertEqual(in_metadata, metadata)
newstore = mock.Mock()
newstore.add.return_value = (self.location, self.size, self.checksum,
self.multihash, in_metadata)
(location,
size,
checksum,
multihash,
metadata) = backend.store_add_to_backend_with_multihash(
self.image_id,
self.data,
self.size,
self.hash_algo,
newstore)
newstore.add.assert_called_once_with(self.image_id, mock.ANY,
self.size, self.hash_algo,
context=None, verifier=None)
self.assertEqual(self.location, location)
self.assertEqual(self.size, size)
self.assertEqual(self.checksum, checksum)
self.assertEqual(self.multihash, multihash)
self.assertEqual(in_metadata, metadata)
def test_empty(self): def test_empty(self):
metadata = {} metadata = {}
self._good_metadata(metadata) self._good_metadata(metadata)

View File

@@ -61,6 +61,7 @@ class TestCinderStore(base.StoreBaseTest,
user='fake_user', user='fake_user',
auth_token='fake_token', auth_token='fake_token',
tenant='fake_tenant') tenant='fake_tenant')
self.hash_algo = 'sha256'
def test_get_cinderclient(self): def test_get_cinderclient(self):
cc = cinder.get_cinderclient(self.conf, self.context) cc = cinder.get_cinderclient(self.conf, self.context)
@@ -290,6 +291,7 @@ class TestCinderStore(base.StoreBaseTest,
expected_file_contents = b"*" * expected_size expected_file_contents = b"*" * expected_size
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = 'cinder://%s' % fake_volume.id expected_location = 'cinder://%s' % fake_volume.id
fake_client = FakeObject(auth_token=None, management_url=None) fake_client = FakeObject(auth_token=None, management_url=None)
fake_volume.manager.get.return_value = fake_volume fake_volume.manager.get.return_value = fake_volume
@@ -306,14 +308,13 @@ class TestCinderStore(base.StoreBaseTest,
side_effect=fake_open): side_effect=fake_open):
mock_cc.return_value = FakeObject(client=fake_client, mock_cc.return_value = FakeObject(client=fake_client,
volumes=fake_volumes) volumes=fake_volumes)
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, expected_image_id, image_file, expected_size, self.hash_algo,
expected_size, self.context, verifier)
self.context,
verifier)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_size, size) self.assertEqual(expected_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
fake_volumes.create.assert_called_once_with( fake_volumes.create.assert_called_once_with(
1, 1,
name='image-%s' % expected_image_id, name='image-%s' % expected_image_id,

View File

@@ -0,0 +1,375 @@
# Copyright 2018 Verizon Wireless
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import hashlib
from oslotest import base
import glance_store.driver as driver
class _FakeStore(object):
@driver.back_compat_add
def add(self, image_id, image_file, image_size, hashing_algo,
context=None, verifier=None):
"""This is a 0.26.0+ add, returns a 5-tuple"""
hasher = hashlib.new(hashing_algo)
# assume 'image_file' will be bytes for these tests
hasher.update(image_file)
backend_url = "backend://%s" % image_id
bytes_written = len(image_file)
checksum = hashlib.md5(image_file).hexdigest()
multihash = hasher.hexdigest()
metadata_dict = {"verifier_obj":
verifier.name if verifier else None,
"context_obj":
context.name if context else None}
return (backend_url, bytes_written, checksum, multihash, metadata_dict)
class _FakeContext(object):
name = 'context'
class _FakeVerifier(object):
name = 'verifier'
class TestBackCompatWrapper(base.BaseTestCase):
def setUp(self):
super(TestBackCompatWrapper, self).setUp()
self.fake_store = _FakeStore()
self.fake_context = _FakeContext()
self.fake_verifier = _FakeVerifier()
self.img_id = '1234'
self.img_file = b'0123456789'
self.img_size = 10
self.img_checksum = hashlib.md5(self.img_file).hexdigest()
self.hashing_algo = 'sha256'
self.img_sha256 = hashlib.sha256(self.img_file).hexdigest()
def test_old_style_3_args(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertIsNone(x[3]['context_obj'])
self.assertIsNone(x[3]['verifier_obj'])
def test_old_style_4_args(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.fake_context)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertIsNone(x[3]['verifier_obj'])
def test_old_style_5_args(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.fake_context, self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertEqual('verifier', x[3]['verifier_obj'])
def test_old_style_3_args_kw_context(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
context=self.fake_context)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertIsNone(x[3]['verifier_obj'])
def test_old_style_3_args_kw_verifier(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertIsNone(x[3]['context_obj'])
self.assertEqual('verifier', x[3]['verifier_obj'])
def test_old_style_4_args_kw_verifier(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.fake_context, verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertEqual('verifier', x[3]['verifier_obj'])
def test_old_style_3_args_kws_context_verifier(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
context=self.fake_context,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertEqual('verifier', x[3]['verifier_obj'])
def test_old_style_all_kw_in_order(self):
x = self.fake_store.add(image_id=self.img_id,
image_file=self.img_file,
image_size=self.img_size,
context=self.fake_context,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertEqual('verifier', x[3]['verifier_obj'])
def test_old_style_all_kw_random_order(self):
x = self.fake_store.add(image_file=self.img_file,
context=self.fake_context,
image_size=self.img_size,
verifier=self.fake_verifier,
image_id=self.img_id)
self.assertEqual(tuple, type(x))
self.assertEqual(4, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertTrue(dict, type(x[3]))
self.assertEqual('context', x[3]['context_obj'])
self.assertEqual('verifier', x[3]['verifier_obj'])
def test_new_style_6_args(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.hashing_algo, self.fake_context,
self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_3_args_kw_hash(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
hashing_algo=self.hashing_algo)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertIsNone(x[4]['context_obj'])
self.assertIsNone(x[4]['verifier_obj'])
def test_new_style_3_args_kws_context_hash(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
context=self.fake_context,
hashing_algo=self.hashing_algo)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertIsNone(x[4]['verifier_obj'])
def test_new_style_3_args_kws_verifier_hash(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
hashing_algo=self.hashing_algo,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertIsNone(x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_3_args_kws_hash_context_verifier(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
hashing_algo=self.hashing_algo,
context=self.fake_context,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_4_args(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.hashing_algo)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertIsNone(x[4]['context_obj'])
self.assertIsNone(x[4]['verifier_obj'])
def test_new_style_4_args_kw_context(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.hashing_algo, context=self.fake_context)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertIsNone(x[4]['verifier_obj'])
def test_new_style_4_args_kws_verifier_context(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.hashing_algo,
context=self.fake_context,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_5_args_kw_verifier(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.hashing_algo, self.fake_context,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_6_args_no_kw(self):
x = self.fake_store.add(self.img_id, self.img_file, self.img_size,
self.hashing_algo, self.fake_context,
self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_all_kw_in_order(self):
x = self.fake_store.add(image_id=self.img_id,
image_file=self.img_file,
image_size=self.img_size,
hashing_algo=self.hashing_algo,
context=self.fake_context,
verifier=self.fake_verifier)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_new_style_all_kw_random_order(self):
x = self.fake_store.add(hashing_algo=self.hashing_algo,
image_file=self.img_file,
context=self.fake_context,
image_size=self.img_size,
verifier=self.fake_verifier,
image_id=self.img_id)
self.assertEqual(tuple, type(x))
self.assertEqual(5, len(x))
self.assertIn(self.img_id, x[0])
self.assertEqual(self.img_size, x[1])
self.assertEqual(self.img_checksum, x[2])
self.assertEqual(self.img_sha256, x[3])
self.assertTrue(dict, type(x[4]))
self.assertEqual('context', x[4]['context_obj'])
self.assertEqual('verifier', x[4]['verifier_obj'])
def test_neg_too_few_args(self):
self.assertRaises(TypeError,
self.fake_store.add,
self.img_id,
self.img_file)
def test_neg_too_few_kw_args(self):
self.assertRaises(TypeError,
self.fake_store.add,
self.img_file,
self.img_size,
self.fake_context,
self.fake_verifier,
image_id=self.img_id)
def test_neg_bogus_kw_args(self):
self.assertRaises(TypeError,
self.fake_store.add,
thrashing_algo=self.hashing_algo,
image_file=self.img_file,
context=self.fake_context,
image_size=self.img_size,
verifier=self.fake_verifier,
image_id=self.img_id)

View File

@@ -51,6 +51,7 @@ class TestStore(base.StoreBaseTest,
group="glance_store") group="glance_store")
self.store.configure() self.store.configure()
self.register_store_schemes(self.store, 'file') self.register_store_schemes(self.store, 'file')
self.hash_algo = 'sha256'
def tearDown(self): def tearDown(self):
"""Clear the test environment.""" """Clear the test environment."""
@@ -74,7 +75,7 @@ class TestStore(base.StoreBaseTest,
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
self.store.FILESYSTEM_STORE_METADATA = in_metadata self.store.FILESYSTEM_STORE_METADATA = in_metadata
return self.store.add(expected_image_id, image_file, return self.store.add(expected_image_id, image_file,
expected_file_size) expected_file_size, self.hash_algo)
def test_get(self): def test_get(self):
"""Test a "normal" retrieval of an image in chunks.""" """Test a "normal" retrieval of an image in chunks."""
@@ -83,9 +84,8 @@ class TestStore(base.StoreBaseTest,
file_contents = b"chunk00000remainder" file_contents = b"chunk00000remainder"
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, image_id, image_file, len(file_contents), self.hash_algo)
len(file_contents))
# Now read it back... # Now read it back...
uri = "file:///%s/%s" % (self.test_dir, image_id) uri = "file:///%s/%s" % (self.test_dir, image_id)
@@ -110,9 +110,8 @@ class TestStore(base.StoreBaseTest,
file_contents = b"chunk00000remainder" file_contents = b"chunk00000remainder"
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, image_id, image_file, len(file_contents), self.hash_algo)
len(file_contents))
# Now read it back... # Now read it back...
uri = "file:///%s/%s" % (self.test_dir, image_id) uri = "file:///%s/%s" % (self.test_dir, image_id)
@@ -157,17 +156,18 @@ class TestStore(base.StoreBaseTest,
expected_file_size = 5 * units.Ki # 5K expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (self.test_dir, expected_location = "file://%s/%s" % (self.test_dir,
expected_image_id) expected_image_id)
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, expected_image_id, image_file, expected_file_size, self.hash_algo)
expected_file_size)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_file_size, size) self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
uri = "file:///%s/%s" % (self.test_dir, expected_image_id) uri = "file:///%s/%s" % (self.test_dir, expected_image_id)
loc = location.get_location_from_uri(uri, conf=self.conf) loc = location.get_location_from_uri(uri, conf=self.conf)
@@ -191,26 +191,30 @@ class TestStore(base.StoreBaseTest,
file_contents = b"*" * file_size file_contents = b"*" * file_size
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
self.store.add(image_id, image_file, file_size, verifier=verifier) self.store.add(image_id, image_file, file_size, self.hash_algo,
verifier=verifier)
verifier.update.assert_called_with(file_contents) verifier.update.assert_called_with(file_contents)
def test_add_check_metadata_with_invalid_mountpoint_location(self): def test_add_check_metadata_with_invalid_mountpoint_location(self):
in_metadata = [{'id': 'abcdefg', in_metadata = [{'id': 'abcdefg',
'mountpoint': '/xyz/images'}] 'mountpoint': '/xyz/images'}]
location, size, checksum, metadata = self._store_image(in_metadata) location, size, checksum, multihash, metadata = self._store_image(
in_metadata)
self.assertEqual({}, metadata) self.assertEqual({}, metadata)
def test_add_check_metadata_list_with_invalid_mountpoint_locations(self): def test_add_check_metadata_list_with_invalid_mountpoint_locations(self):
in_metadata = [{'id': 'abcdefg', 'mountpoint': '/xyz/images'}, in_metadata = [{'id': 'abcdefg', 'mountpoint': '/xyz/images'},
{'id': 'xyz1234', 'mountpoint': '/pqr/images'}] {'id': 'xyz1234', 'mountpoint': '/pqr/images'}]
location, size, checksum, metadata = self._store_image(in_metadata) location, size, checksum, multihash, metadata = self._store_image(
in_metadata)
self.assertEqual({}, metadata) self.assertEqual({}, metadata)
def test_add_check_metadata_list_with_valid_mountpoint_locations(self): def test_add_check_metadata_list_with_valid_mountpoint_locations(self):
in_metadata = [{'id': 'abcdefg', 'mountpoint': '/tmp'}, in_metadata = [{'id': 'abcdefg', 'mountpoint': '/tmp'},
{'id': 'xyz1234', 'mountpoint': '/xyz'}] {'id': 'xyz1234', 'mountpoint': '/xyz'}]
location, size, checksum, metadata = self._store_image(in_metadata) location, size, checksum, multihash, metadata = self._store_image(
in_metadata)
self.assertEqual(in_metadata[0], metadata) self.assertEqual(in_metadata[0], metadata)
def test_add_check_metadata_bad_nosuch_file(self): def test_add_check_metadata_bad_nosuch_file(self):
@@ -224,9 +228,8 @@ class TestStore(base.StoreBaseTest,
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
location, size, checksum, metadata = self.store.add(expected_image_id, location, size, checksum, multihash, metadata = self.store.add(
image_file, expected_image_id, image_file, expected_file_size, self.hash_algo)
expected_file_size)
self.assertEqual(metadata, {}) self.assertEqual(metadata, {})
@@ -241,13 +244,12 @@ class TestStore(base.StoreBaseTest,
file_contents = b"*" * file_size file_contents = b"*" * file_size
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
location, size, checksum, _ = self.store.add(image_id, location, size, checksum, multihash, _ = self.store.add(
image_file, image_id, image_file, file_size, self.hash_algo)
file_size)
image_file = six.BytesIO(b"nevergonnamakeit") image_file = six.BytesIO(b"nevergonnamakeit")
self.assertRaises(exceptions.Duplicate, self.assertRaises(exceptions.Duplicate,
self.store.add, self.store.add,
image_id, image_file, 0) image_id, image_file, 0, self.hash_algo)
def _do_test_add_write_failure(self, errno, exception): def _do_test_add_write_failure(self, errno, exception):
filesystem.ChunkedFile.CHUNKSIZE = units.Ki filesystem.ChunkedFile.CHUNKSIZE = units.Ki
@@ -264,7 +266,7 @@ class TestStore(base.StoreBaseTest,
self.assertRaises(exception, self.assertRaises(exception,
self.store.add, self.store.add,
image_id, image_file, 0) image_id, image_file, 0, self.hash_algo)
self.assertFalse(os.path.exists(path)) self.assertFalse(os.path.exists(path))
def test_add_storage_full(self): def test_add_storage_full(self):
@@ -316,7 +318,7 @@ class TestStore(base.StoreBaseTest,
self.assertRaises(AttributeError, self.assertRaises(AttributeError,
self.store.add, self.store.add,
image_id, image_file, 0) image_id, image_file, 0, self.hash_algo)
self.assertFalse(os.path.exists(path)) self.assertFalse(os.path.exists(path))
def test_delete(self): def test_delete(self):
@@ -329,9 +331,8 @@ class TestStore(base.StoreBaseTest,
file_contents = b"*" * file_size file_contents = b"*" * file_size
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, image_id, image_file, file_size, self.hash_algo)
file_size)
# Now check that we can delete it # Now check that we can delete it
uri = "file:///%s/%s" % (self.test_dir, image_id) uri = "file:///%s/%s" % (self.test_dir, image_id)
@@ -362,9 +363,8 @@ class TestStore(base.StoreBaseTest,
file_contents = b"*" * file_size file_contents = b"*" * file_size
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
loc, size, checksum, _ = self.store.add(image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, image_id, image_file, file_size, self.hash_algo)
file_size)
uri = "file:///%s/%s" % (self.test_dir, image_id) uri = "file:///%s/%s" % (self.test_dir, image_id)
loc = location.get_location_from_uri(uri, conf=self.conf) loc = location.get_location_from_uri(uri, conf=self.conf)
@@ -523,17 +523,18 @@ class TestStore(base.StoreBaseTest,
expected_file_size = 5 * units.Ki # 5K expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store_map[1], expected_location = "file://%s/%s" % (store_map[1],
expected_image_id) expected_image_id)
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, expected_image_id, image_file, expected_file_size, self.hash_algo)
expected_file_size)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_file_size, size) self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
loc = location.get_location_from_uri(expected_location, loc = location.get_location_from_uri(expected_location,
conf=self.conf) conf=self.conf)
@@ -569,17 +570,18 @@ class TestStore(base.StoreBaseTest,
expected_file_size = 5 * units.Ki # 5K expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store_map[1], expected_location = "file://%s/%s" % (store_map[1],
expected_image_id) expected_image_id)
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_file, expected_image_id, image_file, expected_file_size, self.hash_algo)
expected_file_size)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_file_size, size) self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
loc = location.get_location_from_uri(expected_location, loc = location.get_location_from_uri(expected_location,
conf=self.conf) conf=self.conf)
@@ -623,9 +625,12 @@ class TestStore(base.StoreBaseTest,
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
self.assertRaises(exceptions.StorageFull, self.store.add, self.assertRaises(exceptions.StorageFull,
expected_image_id, image_file, self.store.add,
expected_file_size) expected_image_id,
image_file,
expected_file_size,
self.hash_algo)
def test_configure_add_with_file_perm(self): def test_configure_add_with_file_perm(self):
""" """
@@ -675,17 +680,18 @@ class TestStore(base.StoreBaseTest,
expected_file_size = 5 * units.Ki # 5K expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store, expected_location = "file://%s/%s" % (store,
expected_image_id) expected_image_id)
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
location, size, checksum, _ = self.store.add(expected_image_id, location, size, checksum, multihash, _ = self.store.add(
image_file, expected_image_id, image_file, expected_file_size, self.hash_algo)
expected_file_size)
self.assertEqual(expected_location, location) self.assertEqual(expected_location, location)
self.assertEqual(expected_file_size, size) self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
# -rwx--x--x for store directory # -rwx--x--x for store directory
self.assertEqual(0o711, stat.S_IMODE(os.stat(store)[stat.ST_MODE])) self.assertEqual(0o711, stat.S_IMODE(os.stat(store)[stat.ST_MODE]))
@@ -716,17 +722,18 @@ class TestStore(base.StoreBaseTest,
expected_file_size = 5 * units.Ki # 5K expected_file_size = 5 * units.Ki # 5K
expected_file_contents = b"*" * expected_file_size expected_file_contents = b"*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest() expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_multihash = hashlib.sha256(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (store, expected_location = "file://%s/%s" % (store,
expected_image_id) expected_image_id)
image_file = six.BytesIO(expected_file_contents) image_file = six.BytesIO(expected_file_contents)
location, size, checksum, _ = self.store.add(expected_image_id, location, size, checksum, multihash, _ = self.store.add(
image_file, expected_image_id, image_file, expected_file_size, self.hash_algo)
expected_file_size)
self.assertEqual(expected_location, location) self.assertEqual(expected_location, location)
self.assertEqual(expected_file_size, size) self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
# -rwx------ for store directory # -rwx------ for store directory
self.assertEqual(0o700, stat.S_IMODE(os.stat(store)[stat.ST_MODE])) self.assertEqual(0o700, stat.S_IMODE(os.stat(store)[stat.ST_MODE]))

View File

@@ -89,6 +89,7 @@ class TestMultiStore(base.MultiStoreBaseTest,
"vmware1": "vmware", "vmware1": "vmware",
"vmware2": "vmware" "vmware2": "vmware"
} }
self.hash_algo = 'sha256'
self.conf = self._CONF self.conf = self._CONF
self.conf(args=[]) self.conf(args=[])
self.conf.register_opt(cfg.DictOpt('enabled_backends')) self.conf.register_opt(cfg.DictOpt('enabled_backends'))
@@ -244,11 +245,11 @@ class TestMultiStore(base.MultiStoreBaseTest,
image = six.BytesIO(contents) image = six.BytesIO(contents)
with mock.patch('requests.Session.request') as HttpConn: with mock.patch('requests.Session.request') as HttpConn:
HttpConn.return_value = utils.fake_response() HttpConn.return_value = utils.fake_response()
location, size, checksum, metadata = self.store.add( location, size, checksum, multihash, metadata = self.store.add(
image_id, image, size, verifier=verifier) image_id, image, size, self.hash_algo, verifier=verifier)
self.assertEqual("vmware1", metadata["backend"]) self.assertEqual("vmware1", metadata["backend"])
fake_reader.assert_called_with(image, verifier) fake_reader.assert_called_with(image, self.hash_algo, verifier)
@mock.patch.object(vm_store.Store, 'select_datastore') @mock.patch.object(vm_store.Store, 'select_datastore')
@mock.patch('glance_store._drivers.vmware_datastore._Reader') @mock.patch('glance_store._drivers.vmware_datastore._Reader')
@@ -261,11 +262,11 @@ class TestMultiStore(base.MultiStoreBaseTest,
image = six.BytesIO(contents) image = six.BytesIO(contents)
with mock.patch('requests.Session.request') as HttpConn: with mock.patch('requests.Session.request') as HttpConn:
HttpConn.return_value = utils.fake_response() HttpConn.return_value = utils.fake_response()
location, size, checksum, metadata = self.store.add( location, size, checksum, multihash, metadata = self.store.add(
image_id, image, 0, verifier=verifier) image_id, image, 0, self.hash_algo, verifier=verifier)
self.assertEqual("vmware1", metadata["backend"]) self.assertEqual("vmware1", metadata["backend"])
fake_reader.assert_called_with(image, verifier) fake_reader.assert_called_with(image, self.hash_algo, verifier)
@mock.patch('oslo_vmware.api.VMwareAPISession') @mock.patch('oslo_vmware.api.VMwareAPISession')
def test_delete(self, mock_api_session): def test_delete(self, mock_api_session):
@@ -326,27 +327,31 @@ class TestMultiStore(base.MultiStoreBaseTest,
content = b'XXX' content = b'XXX'
image = six.BytesIO(content) image = six.BytesIO(content)
expected_checksum = hashlib.md5(content).hexdigest() expected_checksum = hashlib.md5(content).hexdigest()
reader = vm_store._Reader(image) expected_multihash = hashlib.sha256(content).hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read() ret = reader.read()
self.assertEqual(content, ret) self.assertEqual(content, ret)
self.assertEqual(expected_checksum, reader.checksum.hexdigest()) self.assertEqual(expected_checksum, reader.checksum.hexdigest())
self.assertEqual(expected_multihash, reader.os_hash_value.hexdigest())
self.assertEqual(len(content), reader.size) self.assertEqual(len(content), reader.size)
def test_reader_partial(self): def test_reader_partial(self):
content = b'XXX' content = b'XXX'
image = six.BytesIO(content) image = six.BytesIO(content)
expected_checksum = hashlib.md5(b'X').hexdigest() expected_checksum = hashlib.md5(b'X').hexdigest()
reader = vm_store._Reader(image) expected_multihash = hashlib.sha256(b'X').hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read(1) ret = reader.read(1)
self.assertEqual(b'X', ret) self.assertEqual(b'X', ret)
self.assertEqual(expected_checksum, reader.checksum.hexdigest()) self.assertEqual(expected_checksum, reader.checksum.hexdigest())
self.assertEqual(expected_multihash, reader.os_hash_value.hexdigest())
self.assertEqual(1, reader.size) self.assertEqual(1, reader.size)
def test_reader_with_verifier(self): def test_reader_with_verifier(self):
content = b'XXX' content = b'XXX'
image = six.BytesIO(content) image = six.BytesIO(content)
verifier = mock.MagicMock(name='mock_verifier') verifier = mock.MagicMock(name='mock_verifier')
reader = vm_store._Reader(image, verifier) reader = vm_store._Reader(image, self.hash_algo, verifier)
reader.read() reader.read()
verifier.update.assert_called_with(content) verifier.update.assert_called_with(content)

View File

@@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import hashlib
import mock import mock
from oslo_utils import units from oslo_utils import units
import six import six
@@ -183,13 +184,15 @@ class TestStore(base.StoreBaseTest,
# Provide enough data to get more than one chunk iteration. # Provide enough data to get more than one chunk iteration.
self.data_len = 3 * units.Ki self.data_len = 3 * units.Ki
self.data_iter = six.BytesIO(b'*' * self.data_len) self.data_iter = six.BytesIO(b'*' * self.data_len)
self.hash_algo = 'sha256'
def test_add_w_image_size_zero(self): def test_add_w_image_size_zero(self):
"""Assert that correct size is returned even though 0 was provided.""" """Assert that correct size is returned even though 0 was provided."""
self.store.chunk_size = units.Ki self.store.chunk_size = units.Ki
with mock.patch.object(rbd_store.rbd.Image, 'resize') as resize: with mock.patch.object(rbd_store.rbd.Image, 'resize') as resize:
with mock.patch.object(rbd_store.rbd.Image, 'write') as write: with mock.patch.object(rbd_store.rbd.Image, 'write') as write:
ret = self.store.add('fake_image_id', self.data_iter, 0) ret = self.store.add(
'fake_image_id', self.data_iter, 0, self.hash_algo)
self.assertTrue(resize.called) self.assertTrue(resize.called)
self.assertTrue(write.called) self.assertTrue(write.called)
@@ -216,8 +219,10 @@ class TestStore(base.StoreBaseTest,
delete.side_effect = _fake_delete_image delete.side_effect = _fake_delete_image
enter.side_effect = _fake_enter enter.side_effect = _fake_enter
self.assertRaises(exceptions.NotFound, self.store.add, self.assertRaises(exceptions.NotFound,
'fake_image_id', self.data_iter, self.data_len) self.store.add,
'fake_image_id', self.data_iter, self.data_len,
self.hash_algo)
self.called_commands_expected = ['create', 'delete'] self.called_commands_expected = ['create', 'delete']
@@ -230,8 +235,10 @@ class TestStore(base.StoreBaseTest,
with mock.patch.object(self.store, '_create_image') as create_image: with mock.patch.object(self.store, '_create_image') as create_image:
create_image.side_effect = _fake_create_image create_image.side_effect = _fake_create_image
self.assertRaises(exceptions.Duplicate, self.store.add, self.assertRaises(exceptions.Duplicate,
'fake_image_id', self.data_iter, self.data_len) self.store.add,
'fake_image_id', self.data_iter, self.data_len,
self.hash_algo)
self.called_commands_expected = ['create'] self.called_commands_expected = ['create']
def test_add_with_verifier(self): def test_add_with_verifier(self):
@@ -244,10 +251,27 @@ class TestStore(base.StoreBaseTest,
image_file = six.BytesIO(file_contents) image_file = six.BytesIO(file_contents)
with mock.patch.object(rbd_store.rbd.Image, 'write'): with mock.patch.object(rbd_store.rbd.Image, 'write'):
self.store.add(image_id, image_file, file_size, verifier=verifier) self.store.add(image_id, image_file, file_size, self.hash_algo,
verifier=verifier)
verifier.update.assert_called_with(file_contents) verifier.update.assert_called_with(file_contents)
def test_add_checksums(self):
self.store.chunk_size = units.Ki
image_id = 'fake_image_id'
file_size = 5 * units.Ki # 5K
file_contents = b"*" * file_size
image_file = six.BytesIO(file_contents)
expected_checksum = hashlib.md5(file_contents).hexdigest()
expected_multihash = hashlib.sha256(file_contents).hexdigest()
with mock.patch.object(rbd_store.rbd.Image, 'write'):
loc, size, checksum, multihash, _ = self.store.add(
image_id, image_file, file_size, self.hash_algo)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
def test_delete(self): def test_delete(self):
def _fake_remove(*args, **kwargs): def _fake_remove(*args, **kwargs):
self.called_commands_actual.append('remove') self.called_commands_actual.append('remove')

View File

@@ -13,6 +13,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
import hashlib
import mock import mock
from oslo_concurrency import processutils from oslo_concurrency import processutils
from oslo_utils import units from oslo_utils import units
@@ -87,19 +88,26 @@ class TestSheepdogStore(base.StoreBaseTest,
self.store_specs = {'image': '6bd59e6e-c410-11e5-ab67-0a73f1fda51b', self.store_specs = {'image': '6bd59e6e-c410-11e5-ab67-0a73f1fda51b',
'addr': '127.0.0.1', 'addr': '127.0.0.1',
'port': 7000} 'port': 7000}
self.hash_algo = 'sha256'
@mock.patch.object(sheepdog.SheepdogImage, 'write') @mock.patch.object(sheepdog.SheepdogImage, 'write')
@mock.patch.object(sheepdog.SheepdogImage, 'create') @mock.patch.object(sheepdog.SheepdogImage, 'create')
@mock.patch.object(sheepdog.SheepdogImage, 'exist') @mock.patch.object(sheepdog.SheepdogImage, 'exist')
def test_add_image(self, mock_exist, mock_create, mock_write): def test_add_image(self, mock_exist, mock_create, mock_write):
data = six.BytesIO(b'xx') content = b'xx'
data = six.BytesIO(content)
mock_exist.return_value = False mock_exist.return_value = False
expected_checksum = hashlib.md5(content).hexdigest()
expected_multihash = hashlib.sha256(content).hexdigest()
(uri, size, checksum, loc) = self.store.add('fake_image_id', data, 2) (uri, size, checksum, multihash, loc) = self.store.add(
'fake_image_id', data, 2, self.hash_algo)
mock_exist.assert_called_once_with() mock_exist.assert_called_once_with()
mock_create.assert_called_once_with(2) mock_create.assert_called_once_with(2)
mock_write.assert_called_once_with(b'xx', 0, 2) mock_write.assert_called_once_with(b'xx', 0, 2)
self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
@mock.patch.object(sheepdog.SheepdogImage, 'write') @mock.patch.object(sheepdog.SheepdogImage, 'write')
@mock.patch.object(sheepdog.SheepdogImage, 'exist') @mock.patch.object(sheepdog.SheepdogImage, 'exist')
@@ -108,7 +116,7 @@ class TestSheepdogStore(base.StoreBaseTest,
mock_exist.return_value = False mock_exist.return_value = False
self.assertRaises(exceptions.Forbidden, self.store.add, self.assertRaises(exceptions.Forbidden, self.store.add,
'fake_image_id', data, 'test') 'fake_image_id', data, 'test', self.hash_algo)
mock_exist.assert_called_once_with() mock_exist.assert_called_once_with()
self.assertEqual(mock_write.call_count, 0) self.assertEqual(mock_write.call_count, 0)
@@ -124,7 +132,7 @@ class TestSheepdogStore(base.StoreBaseTest,
mock_write.side_effect = exceptions.BackendException mock_write.side_effect = exceptions.BackendException
self.assertRaises(exceptions.BackendException, self.store.add, self.assertRaises(exceptions.BackendException, self.store.add,
'fake_image_id', data, 2) 'fake_image_id', data, 2, self.hash_algo)
mock_exist.assert_called_once_with() mock_exist.assert_called_once_with()
mock_create.assert_called_once_with(2) mock_create.assert_called_once_with(2)
@@ -140,7 +148,7 @@ class TestSheepdogStore(base.StoreBaseTest,
cmd.side_effect = _fake_run_command cmd.side_effect = _fake_run_command
data = six.BytesIO(b'xx') data = six.BytesIO(b'xx')
self.assertRaises(exceptions.Duplicate, self.store.add, self.assertRaises(exceptions.Duplicate, self.store.add,
'fake_image_id', data, 2) 'fake_image_id', data, 2, self.hash_algo)
def test_get(self): def test_get(self):
def _fake_run_command(command, data, *params): def _fake_run_command(command, data, *params):
@@ -204,6 +212,7 @@ class TestSheepdogStore(base.StoreBaseTest,
with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd: with mock.patch.object(sheepdog.SheepdogImage, '_run_command') as cmd:
cmd.side_effect = _fake_run_command cmd.side_effect = _fake_run_command
self.store.add(image_id, image_file, file_size, verifier=verifier) self.store.add(image_id, image_file, file_size, self.hash_algo,
verifier=verifier)
verifier.update.assert_called_with(file_contents) verifier.update.assert_called_with(file_contents)

View File

@@ -54,6 +54,7 @@ FAKE_UUID2 = lambda: str(uuid.uuid4())
Store = swift.Store Store = swift.Store
FIVE_KB = 5 * units.Ki FIVE_KB = 5 * units.Ki
FIVE_GB = 5 * units.Gi FIVE_GB = 5 * units.Gi
HASH_ALGO = 'sha256'
MAX_SWIFT_OBJECT_SIZE = FIVE_GB MAX_SWIFT_OBJECT_SIZE = FIVE_GB
SWIFT_PUT_OBJECT_CALLS = 0 SWIFT_PUT_OBJECT_CALLS = 0
SWIFT_CONF = {'swift_store_auth_address': 'localhost:8080', SWIFT_CONF = {'swift_store_auth_address': 'localhost:8080',
@@ -389,6 +390,8 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_multihash = hashlib.sha256(
expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4()) expected_image_id = str(uuid.uuid4())
loc = "swift+https://tenant%%3Auser1:key@localhost:8080/glance/%s" loc = "swift+https://tenant%%3Auser1:key@localhost:8080/glance/%s"
expected_location = loc % (expected_image_id) expected_location = loc % (expected_image_id)
@@ -397,13 +400,14 @@ class SwiftTests(object):
global SWIFT_PUT_OBJECT_CALLS global SWIFT_PUT_OBJECT_CALLS
SWIFT_PUT_OBJECT_CALLS = 0 SWIFT_PUT_OBJECT_CALLS = 0
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_swift, expected_image_id, image_swift, expected_swift_size,
expected_swift_size) HASH_ALGO)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size) self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
# Expecting a single object to be created on Swift i.e. no chunking. # Expecting a single object to be created on Swift i.e. no chunking.
self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS) self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS)
@@ -435,9 +439,8 @@ class SwiftTests(object):
expected_location = loc % (expected_image_id) expected_location = loc % (expected_image_id)
location, size, checksum, arg = self.store.add(expected_image_id, location, size, checksum, multihash, arg = self.store.add(
image_swift, expected_image_id, image_swift, expected_swift_size, HASH_ALGO)
expected_swift_size)
self.assertEqual(expected_location, location) self.assertEqual(expected_location, location)
@mock.patch('glance_store._drivers.swift.utils' @mock.patch('glance_store._drivers.swift.utils'
@@ -478,10 +481,9 @@ class SwiftTests(object):
service_catalog=service_catalog) service_catalog=service_catalog)
store = swift.MultiTenantStore(self.conf) store = swift.MultiTenantStore(self.conf)
store.configure() store.configure()
loc, size, checksum, _ = store.add(expected_image_id, loc, size, checksum, multihash, _ = store.add(
image_swift, expected_image_id, image_swift, expected_swift_size, HASH_ALGO,
expected_swift_size, context=ctxt)
context=ctxt)
# ensure that image add uses user's context # ensure that image add uses user's context
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
@@ -509,6 +511,8 @@ class SwiftTests(object):
expected_swift_contents = b"*" * expected_swift_size expected_swift_contents = b"*" * expected_swift_size
expected_checksum = \ expected_checksum = \
hashlib.md5(expected_swift_contents).hexdigest() hashlib.md5(expected_swift_contents).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
image_swift = six.BytesIO(expected_swift_contents) image_swift = six.BytesIO(expected_swift_contents)
@@ -520,13 +524,13 @@ class SwiftTests(object):
self.mock_keystone_client() self.mock_keystone_client()
self.store = Store(self.conf) self.store = Store(self.conf)
self.store.configure() self.store.configure()
loc, size, checksum, _ = self.store.add(image_id, loc, size, checksum, multihash, _ = self.store.add(
image_swift, image_id, image_swift, expected_swift_size, HASH_ALGO)
expected_swift_size)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size) self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS) self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS)
loc = location.get_location_from_uri(expected_location, loc = location.get_location_from_uri(expected_location,
@@ -564,7 +568,7 @@ class SwiftTests(object):
# simply used self.assertRaises here # simply used self.assertRaises here
exception_caught = False exception_caught = False
try: try:
self.store.add(str(uuid.uuid4()), image_swift, 0) self.store.add(str(uuid.uuid4()), image_swift, 0, HASH_ALGO)
except exceptions.BackendException as e: except exceptions.BackendException as e:
exception_caught = True exception_caught = True
self.assertIn("container noexist does not exist in Swift", self.assertIn("container noexist does not exist in Swift",
@@ -583,6 +587,8 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4()) expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/noexist/%s' loc = 'swift+config://ref1/noexist/%s'
expected_location = loc % (expected_image_id) expected_location = loc % (expected_image_id)
@@ -599,13 +605,13 @@ class SwiftTests(object):
self.mock_keystone_client() self.mock_keystone_client()
self.store = Store(self.conf) self.store = Store(self.conf)
self.store.configure() self.store.configure()
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_swift, expected_image_id, image_swift, expected_swift_size, HASH_ALGO)
expected_swift_size)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size) self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS) self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS)
loc = location.get_location_from_uri(expected_location, conf=self.conf) loc = location.get_location_from_uri(expected_location, conf=self.conf)
@@ -627,6 +633,8 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4()) expected_image_id = str(uuid.uuid4())
container = 'randomname_' + expected_image_id[:2] container = 'randomname_' + expected_image_id[:2]
loc = 'swift+config://ref1/%s/%s' loc = 'swift+config://ref1/%s/%s'
@@ -646,13 +654,13 @@ class SwiftTests(object):
self.store = Store(self.conf) self.store = Store(self.conf)
self.store.configure() self.store.configure()
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_swift, expected_image_id, image_swift, expected_swift_size, HASH_ALGO)
expected_swift_size)
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size) self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS) self.assertEqual(1, SWIFT_PUT_OBJECT_CALLS)
loc = location.get_location_from_uri(expected_location, conf=self.conf) loc = location.get_location_from_uri(expected_location, conf=self.conf)
@@ -696,7 +704,7 @@ class SwiftTests(object):
# simply used self.assertRaises here # simply used self.assertRaises here
exception_caught = False exception_caught = False
try: try:
self.store.add(expected_image_id, image_swift, 0) self.store.add(expected_image_id, image_swift, 0, HASH_ALGO)
except exceptions.BackendException as e: except exceptions.BackendException as e:
exception_caught = True exception_caught = True
expected_msg = "container %s does not exist in Swift" expected_msg = "container %s does not exist in Swift"
@@ -726,7 +734,7 @@ class SwiftTests(object):
try: try:
self.store.large_object_size = custom_size self.store.large_object_size = custom_size
self.store.large_object_chunk_size = custom_size self.store.large_object_chunk_size = custom_size
self.store.add(image_id, image_swift, swift_size, self.store.add(image_id, image_swift, swift_size, HASH_ALGO,
verifier=verifier) verifier=verifier)
finally: finally:
self.store.large_object_chunk_size = orig_temp_size self.store.large_object_chunk_size = orig_temp_size
@@ -773,7 +781,7 @@ class SwiftTests(object):
try: try:
self.store.large_object_size = custom_size self.store.large_object_size = custom_size
self.store.large_object_chunk_size = custom_size self.store.large_object_chunk_size = custom_size
self.store.add(image_id, image_swift, swift_size, self.store.add(image_id, image_swift, swift_size, HASH_ALGO,
verifier=verifier) verifier=verifier)
finally: finally:
self.store.large_object_chunk_size = orig_temp_size self.store.large_object_chunk_size = orig_temp_size
@@ -828,10 +836,9 @@ class SwiftTests(object):
service_catalog=service_catalog) service_catalog=service_catalog)
store = swift.MultiTenantStore(self.conf) store = swift.MultiTenantStore(self.conf)
store.configure() store.configure()
location, size, checksum, _ = store.add(expected_image_id, location, size, checksum, multihash, _ = store.add(
image_swift, expected_image_id, image_swift, expected_swift_size, HASH_ALGO,
expected_swift_size, context=ctxt)
context=ctxt)
self.assertEqual(expected_location, location) self.assertEqual(expected_location, location)
@mock.patch('glance_store._drivers.swift.utils' @mock.patch('glance_store._drivers.swift.utils'
@@ -847,6 +854,8 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4()) expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s' loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id) expected_location = loc % (expected_image_id)
@@ -862,9 +871,8 @@ class SwiftTests(object):
try: try:
self.store.large_object_size = units.Ki self.store.large_object_size = units.Ki
self.store.large_object_chunk_size = units.Ki self.store.large_object_chunk_size = units.Ki
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_swift, expected_image_id, image_swift, expected_swift_size, HASH_ALGO)
expected_swift_size)
finally: finally:
self.store.large_object_chunk_size = orig_temp_size self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size self.store.large_object_size = orig_max_size
@@ -872,6 +880,7 @@ class SwiftTests(object):
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size) self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
# Expecting 6 objects to be created on Swift -- 5 chunks and 1 # Expecting 6 objects to be created on Swift -- 5 chunks and 1
# manifest. # manifest.
self.assertEqual(6, SWIFT_PUT_OBJECT_CALLS) self.assertEqual(6, SWIFT_PUT_OBJECT_CALLS)
@@ -899,6 +908,8 @@ class SwiftTests(object):
expected_swift_size = FIVE_KB expected_swift_size = FIVE_KB
expected_swift_contents = b"*" * expected_swift_size expected_swift_contents = b"*" * expected_swift_size
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest() expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
expected_multihash = \
hashlib.sha256(expected_swift_contents).hexdigest()
expected_image_id = str(uuid.uuid4()) expected_image_id = str(uuid.uuid4())
loc = 'swift+config://ref1/glance/%s' loc = 'swift+config://ref1/glance/%s'
expected_location = loc % (expected_image_id) expected_location = loc % (expected_image_id)
@@ -920,9 +931,8 @@ class SwiftTests(object):
MAX_SWIFT_OBJECT_SIZE = units.Ki MAX_SWIFT_OBJECT_SIZE = units.Ki
self.store.large_object_size = units.Ki self.store.large_object_size = units.Ki
self.store.large_object_chunk_size = units.Ki self.store.large_object_chunk_size = units.Ki
loc, size, checksum, _ = self.store.add(expected_image_id, loc, size, checksum, multihash, _ = self.store.add(
image_swift, expected_image_id, image_swift, 0, HASH_ALGO)
0)
finally: finally:
self.store.large_object_chunk_size = orig_temp_size self.store.large_object_chunk_size = orig_temp_size
self.store.large_object_size = orig_max_size self.store.large_object_size = orig_max_size
@@ -931,6 +941,7 @@ class SwiftTests(object):
self.assertEqual(expected_location, loc) self.assertEqual(expected_location, loc)
self.assertEqual(expected_swift_size, size) self.assertEqual(expected_swift_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
# Expecting 6 calls to put_object -- 5 chunks, and the manifest. # Expecting 6 calls to put_object -- 5 chunks, and the manifest.
self.assertEqual(6, SWIFT_PUT_OBJECT_CALLS) self.assertEqual(6, SWIFT_PUT_OBJECT_CALLS)
@@ -952,7 +963,7 @@ class SwiftTests(object):
image_swift = six.BytesIO(b"nevergonnamakeit") image_swift = six.BytesIO(b"nevergonnamakeit")
self.assertRaises(exceptions.Duplicate, self.assertRaises(exceptions.Duplicate,
self.store.add, self.store.add,
FAKE_UUID, image_swift, 0) FAKE_UUID, image_swift, 0, HASH_ALGO)
def _option_required(self, key): def _option_required(self, key):
conf = self.getConfig() conf = self.getConfig()
@@ -1743,7 +1754,7 @@ class TestMultiTenantStoreContext(base.StoreBaseTest):
store.configure() store.configure()
content = b'Some data' content = b'Some data'
pseudo_file = six.BytesIO(content) pseudo_file = six.BytesIO(content)
store.add('123', pseudo_file, len(content), store.add('123', pseudo_file, len(content), HASH_ALGO,
context=self.ctx) context=self.ctx)
self.assertEqual(b'0123', self.assertEqual(b'0123',
head_req.last_request.headers['X-Auth-Token']) head_req.last_request.headers['X-Auth-Token'])
@@ -1878,22 +1889,28 @@ class TestChunkReader(base.StoreBaseTest):
repeated creation of the ChunkReader object repeated creation of the ChunkReader object
""" """
CHUNKSIZE = 100 CHUNKSIZE = 100
checksum = hashlib.md5() data = b'*' * units.Ki
expected_checksum = hashlib.md5(data).hexdigest()
expected_multihash = hashlib.sha256(data).hexdigest()
data_file = tempfile.NamedTemporaryFile() data_file = tempfile.NamedTemporaryFile()
data_file.write(b'*' * units.Ki) data_file.write(data)
data_file.flush() data_file.flush()
infile = open(data_file.name, 'rb') infile = open(data_file.name, 'rb')
bytes_read = 0 bytes_read = 0
checksum = hashlib.md5()
os_hash_value = hashlib.sha256()
while True: while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE) cr = swift.ChunkReader(infile, checksum, os_hash_value, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE) chunk = cr.read(CHUNKSIZE)
if len(chunk) == 0: if len(chunk) == 0:
self.assertEqual(True, cr.is_zero_size) self.assertEqual(True, cr.is_zero_size)
break break
bytes_read += len(chunk) bytes_read += len(chunk)
self.assertEqual(units.Ki, bytes_read) self.assertEqual(units.Ki, bytes_read)
self.assertEqual('fb10c6486390bec8414be90a93dfff3b', self.assertEqual(expected_checksum,
cr.checksum.hexdigest()) cr.checksum.hexdigest())
self.assertEqual(expected_multihash,
cr.os_hash_value.hexdigest())
data_file.close() data_file.close()
infile.close() infile.close()
@@ -1902,21 +1919,24 @@ class TestChunkReader(base.StoreBaseTest):
Replicate what goes on in the Swift driver with the Replicate what goes on in the Swift driver with the
repeated creation of the ChunkReader object repeated creation of the ChunkReader object
""" """
expected_checksum = hashlib.md5(b'').hexdigest()
expected_multihash = hashlib.sha256(b'').hexdigest()
CHUNKSIZE = 100 CHUNKSIZE = 100
checksum = hashlib.md5() checksum = hashlib.md5()
os_hash_value = hashlib.sha256()
data_file = tempfile.NamedTemporaryFile() data_file = tempfile.NamedTemporaryFile()
infile = open(data_file.name, 'rb') infile = open(data_file.name, 'rb')
bytes_read = 0 bytes_read = 0
while True: while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE) cr = swift.ChunkReader(infile, checksum, os_hash_value, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE) chunk = cr.read(CHUNKSIZE)
if len(chunk) == 0: if len(chunk) == 0:
break break
bytes_read += len(chunk) bytes_read += len(chunk)
self.assertEqual(True, cr.is_zero_size) self.assertEqual(True, cr.is_zero_size)
self.assertEqual(0, bytes_read) self.assertEqual(0, bytes_read)
self.assertEqual('d41d8cd98f00b204e9800998ecf8427e', self.assertEqual(expected_checksum, cr.checksum.hexdigest())
cr.checksum.hexdigest()) self.assertEqual(expected_multihash, cr.os_hash_value.hexdigest())
data_file.close() data_file.close()
infile.close() infile.close()
@@ -1999,10 +2019,13 @@ class TestBufferedReader(base.StoreBaseTest):
self.infile.seek(0) self.infile.seek(0)
self.checksum = hashlib.md5() self.checksum = hashlib.md5()
self.hash_algo = HASH_ALGO
self.os_hash_value = hashlib.sha256()
self.verifier = mock.MagicMock(name='mock_verifier') self.verifier = mock.MagicMock(name='mock_verifier')
total = 7 # not the full 10 byte string - defines segment boundary total = 7 # not the full 10 byte string - defines segment boundary
self.reader = buffered.BufferedReader(self.infile, self.checksum, self.reader = buffered.BufferedReader(self.infile, self.checksum,
total, self.verifier) self.os_hash_value, total,
self.verifier)
self.addCleanup(self.conf.reset) self.addCleanup(self.conf.reset)
def tearDown(self): def tearDown(self):
@@ -2053,51 +2076,76 @@ class TestBufferedReader(base.StoreBaseTest):
self.reader.seek(2) self.reader.seek(2)
self.assertEqual(b'34567', self.reader.read(10)) self.assertEqual(b'34567', self.reader.read(10))
def test_checksum(self): def test_checksums(self):
# the md5 checksum is updated only once on a full segment read # checksums are updated only once on a full segment read
expected_csum = hashlib.md5() expected_csum = hashlib.md5()
expected_csum.update(b'1234567') expected_csum.update(b'1234567')
expected_multihash = hashlib.sha256()
expected_multihash.update(b'1234567')
self.reader.read(7) self.reader.read(7)
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
def test_checksum_updated_only_once_w_full_segment_read(self): def test_checksum_updated_only_once_w_full_segment_read(self):
# Test that the checksum is updated only once when a full segment read # Test that checksums are updated only once when a full segment read
# is followed by a seek and partial reads. # is followed by a seek and partial reads.
expected_csum = hashlib.md5() expected_csum = hashlib.md5()
expected_csum.update(b'1234567') expected_csum.update(b'1234567')
expected_multihash = hashlib.sha256()
expected_multihash.update(b'1234567')
self.reader.read(7) # attempted read of the entire chunk self.reader.read(7) # attempted read of the entire chunk
self.reader.seek(4) # seek back due to possible partial failure self.reader.seek(4) # seek back due to possible partial failure
self.reader.read(1) # read one more byte self.reader.read(1) # read one more byte
# checksum was updated just once during the first attempted full read # checksum was updated just once during the first attempted full read
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
def test_checksum_updates_during_partial_segment_reads(self): def test_checksum_updates_during_partial_segment_reads(self):
# Test to check that checksum is updated with only the bytes it has # Test to check that checksums are updated with only the bytes
# not seen when the number of bytes being read is changed # not seen when the number of bytes being read is changed
expected_csum = hashlib.md5() expected_csum = hashlib.md5()
expected_multihash = hashlib.sha256()
self.reader.read(4) self.reader.read(4)
expected_csum.update(b'1234') expected_csum.update(b'1234')
expected_multihash.update(b'1234')
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
self.reader.seek(0) # possible failure self.reader.seek(0) # possible failure
self.reader.read(2) self.reader.read(2)
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
self.reader.read(4) # checksum missing two bytes self.reader.read(4) # checksum missing two bytes
expected_csum.update(b'56') expected_csum.update(b'56')
expected_multihash.update(b'56')
# checksum updated with only the bytes it did not see # checksum updated with only the bytes it did not see
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
def test_checksum_rolling_calls(self): def test_checksum_rolling_calls(self):
# Test that the checksum continues on to the next segment # Test that the checksum continues on to the next segment
expected_csum = hashlib.md5() expected_csum = hashlib.md5()
expected_multihash = hashlib.sha256()
self.reader.read(7) self.reader.read(7)
expected_csum.update(b'1234567') expected_csum.update(b'1234567')
expected_multihash.update(b'1234567')
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
# another reader to complete reading the image file # another reader to complete reading the image file
reader1 = buffered.BufferedReader(self.infile, self.checksum, 3, reader1 = buffered.BufferedReader(self.infile, self.checksum,
self.os_hash_value, 3,
self.reader.verifier) self.reader.verifier)
reader1.read(3) reader1.read(3)
expected_csum.update(b'890') expected_csum.update(b'890')
expected_multihash.update(b'890')
self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest()) self.assertEqual(expected_csum.hexdigest(), self.checksum.hexdigest())
self.assertEqual(expected_multihash.hexdigest(),
self.os_hash_value.hexdigest())
def test_verifier(self): def test_verifier(self):
# Test that the verifier is updated only once on a full segment read. # Test that the verifier is updated only once on a full segment read.
@@ -2132,7 +2180,10 @@ class TestBufferedReader(base.StoreBaseTest):
self.verifier.update.assert_called_once_with(b'1234567') self.verifier.update.assert_called_once_with(b'1234567')
self.assertEqual(1, self.verifier.update.call_count) self.assertEqual(1, self.verifier.update.call_count)
# another reader to complete reading the image file # another reader to complete reading the image file
reader1 = buffered.BufferedReader(self.infile, self.checksum, 3, reader1 = buffered.BufferedReader(self.infile,
self.checksum,
self.os_hash_value,
3,
self.reader.verifier) self.reader.verifier)
reader1.read(3) reader1.read(3)
self.verifier.update.assert_called_with(b'890') self.verifier.update.assert_called_with(b'890')
@@ -2147,7 +2198,9 @@ class TestBufferedReader(base.StoreBaseTest):
infile.seek(0) infile.seek(0)
total = 7 total = 7
checksum = hashlib.md5() checksum = hashlib.md5()
self.reader = buffered.BufferedReader(infile, checksum, total) os_hash_value = hashlib.sha256()
self.reader = buffered.BufferedReader(
infile, checksum, os_hash_value, total)
self.reader.read(0) # read into buffer self.reader.read(0) # read into buffer
self.assertEqual(b'12', self.reader.read(7)) self.assertEqual(b'12', self.reader.read(7))

View File

@@ -2065,22 +2065,28 @@ class TestChunkReader(base.MultiStoreBaseTest):
repeated creation of the ChunkReader object repeated creation of the ChunkReader object
""" """
CHUNKSIZE = 100 CHUNKSIZE = 100
checksum = hashlib.md5() data = b'*' * units.Ki
expected_checksum = hashlib.md5(data).hexdigest()
expected_multihash = hashlib.sha256(data).hexdigest()
data_file = tempfile.NamedTemporaryFile() data_file = tempfile.NamedTemporaryFile()
data_file.write(b'*' * units.Ki) data_file.write(data)
data_file.flush() data_file.flush()
infile = open(data_file.name, 'rb') infile = open(data_file.name, 'rb')
bytes_read = 0 bytes_read = 0
checksum = hashlib.md5()
os_hash_value = hashlib.sha256()
while True: while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE) cr = swift.ChunkReader(infile, checksum, os_hash_value, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE) chunk = cr.read(CHUNKSIZE)
if len(chunk) == 0: if len(chunk) == 0:
self.assertEqual(True, cr.is_zero_size) self.assertEqual(True, cr.is_zero_size)
break break
bytes_read += len(chunk) bytes_read += len(chunk)
self.assertEqual(units.Ki, bytes_read) self.assertEqual(units.Ki, bytes_read)
self.assertEqual('fb10c6486390bec8414be90a93dfff3b', self.assertEqual(expected_checksum,
cr.checksum.hexdigest()) cr.checksum.hexdigest())
self.assertEqual(expected_multihash,
cr.os_hash_value.hexdigest())
data_file.close() data_file.close()
infile.close() infile.close()
@@ -2089,21 +2095,24 @@ class TestChunkReader(base.MultiStoreBaseTest):
Replicate what goes on in the Swift driver with the Replicate what goes on in the Swift driver with the
repeated creation of the ChunkReader object repeated creation of the ChunkReader object
""" """
expected_checksum = hashlib.md5(b'').hexdigest()
expected_multihash = hashlib.sha256(b'').hexdigest()
CHUNKSIZE = 100 CHUNKSIZE = 100
checksum = hashlib.md5() checksum = hashlib.md5()
os_hash_value = hashlib.sha256()
data_file = tempfile.NamedTemporaryFile() data_file = tempfile.NamedTemporaryFile()
infile = open(data_file.name, 'rb') infile = open(data_file.name, 'rb')
bytes_read = 0 bytes_read = 0
while True: while True:
cr = swift.ChunkReader(infile, checksum, CHUNKSIZE) cr = swift.ChunkReader(infile, checksum, os_hash_value, CHUNKSIZE)
chunk = cr.read(CHUNKSIZE) chunk = cr.read(CHUNKSIZE)
if len(chunk) == 0: if len(chunk) == 0:
break break
bytes_read += len(chunk) bytes_read += len(chunk)
self.assertEqual(True, cr.is_zero_size) self.assertEqual(True, cr.is_zero_size)
self.assertEqual(0, bytes_read) self.assertEqual(0, bytes_read)
self.assertEqual('d41d8cd98f00b204e9800998ecf8427e', self.assertEqual(expected_checksum, cr.checksum.hexdigest())
cr.checksum.hexdigest()) self.assertEqual(expected_multihash, cr.os_hash_value.hexdigest())
data_file.close() data_file.close()
infile.close() infile.close()

View File

@@ -101,6 +101,8 @@ class TestStore(base.StoreBaseTest,
self.store.store_image_dir = ( self.store.store_image_dir = (
VMWARE_DS['vmware_store_image_dir']) VMWARE_DS['vmware_store_image_dir'])
self.hash_algo = 'sha256'
def _mock_http_connection(self): def _mock_http_connection(self):
return mock.patch('six.moves.http_client.HTTPConnection') return mock.patch('six.moves.http_client.HTTPConnection')
@@ -145,30 +147,35 @@ class TestStore(base.StoreBaseTest,
expected_contents = b"*" * expected_size expected_contents = b"*" * expected_size
hash_code = hashlib.md5(expected_contents) hash_code = hashlib.md5(expected_contents)
expected_checksum = hash_code.hexdigest() expected_checksum = hash_code.hexdigest()
sha256_code = hashlib.sha256(expected_contents)
expected_multihash = sha256_code.hexdigest()
fake_size.__get__ = mock.Mock(return_value=expected_size) fake_size.__get__ = mock.Mock(return_value=expected_size)
expected_cookie = 'vmware_soap_session=fake-uuid' expected_cookie = 'vmware_soap_session=fake-uuid'
fake_cookie.return_value = expected_cookie fake_cookie.return_value = expected_cookie
expected_headers = {'Content-Length': six.text_type(expected_size), expected_headers = {'Content-Length': six.text_type(expected_size),
'Cookie': expected_cookie} 'Cookie': expected_cookie}
with mock.patch('hashlib.md5') as md5: with mock.patch('hashlib.md5') as md5:
md5.return_value = hash_code with mock.patch('hashlib.new') as fake_new:
expected_location = format_location( md5.return_value = hash_code
VMWARE_DS['vmware_server_host'], fake_new.return_value = sha256_code
VMWARE_DS['vmware_store_image_dir'], expected_location = format_location(
expected_image_id, VMWARE_DS['vmware_server_host'],
VMWARE_DS['vmware_datastores']) VMWARE_DS['vmware_store_image_dir'],
image = six.BytesIO(expected_contents) expected_image_id,
with mock.patch('requests.Session.request') as HttpConn: VMWARE_DS['vmware_datastores'])
HttpConn.return_value = utils.fake_response() image = six.BytesIO(expected_contents)
location, size, checksum, _ = self.store.add(expected_image_id, with mock.patch('requests.Session.request') as HttpConn:
image, HttpConn.return_value = utils.fake_response()
expected_size) location, size, checksum, multihash, _ = self.store.add(
_, kwargs = HttpConn.call_args expected_image_id, image, expected_size,
self.assertEqual(expected_headers, kwargs['headers']) self.hash_algo)
_, kwargs = HttpConn.call_args
self.assertEqual(expected_headers, kwargs['headers'])
self.assertEqual(utils.sort_url_by_qs_keys(expected_location), self.assertEqual(utils.sort_url_by_qs_keys(expected_location),
utils.sort_url_by_qs_keys(location)) utils.sort_url_by_qs_keys(location))
self.assertEqual(expected_size, size) self.assertEqual(expected_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
@mock.patch.object(vm_store.Store, 'select_datastore') @mock.patch.object(vm_store.Store, 'select_datastore')
@mock.patch.object(vm_store._Reader, 'size') @mock.patch.object(vm_store._Reader, 'size')
@@ -185,23 +192,28 @@ class TestStore(base.StoreBaseTest,
expected_contents = b"*" * expected_size expected_contents = b"*" * expected_size
hash_code = hashlib.md5(expected_contents) hash_code = hashlib.md5(expected_contents)
expected_checksum = hash_code.hexdigest() expected_checksum = hash_code.hexdigest()
sha256_code = hashlib.sha256(expected_contents)
expected_multihash = sha256_code.hexdigest()
fake_size.__get__ = mock.Mock(return_value=expected_size) fake_size.__get__ = mock.Mock(return_value=expected_size)
with mock.patch('hashlib.md5') as md5: with mock.patch('hashlib.md5') as md5:
md5.return_value = hash_code with mock.patch('hashlib.new') as fake_new:
expected_location = format_location( md5.return_value = hash_code
VMWARE_DS['vmware_server_host'], fake_new.return_value = sha256_code
VMWARE_DS['vmware_store_image_dir'], expected_location = format_location(
expected_image_id, VMWARE_DS['vmware_server_host'],
VMWARE_DS['vmware_datastores']) VMWARE_DS['vmware_store_image_dir'],
image = six.BytesIO(expected_contents) expected_image_id,
with mock.patch('requests.Session.request') as HttpConn: VMWARE_DS['vmware_datastores'])
HttpConn.return_value = utils.fake_response() image = six.BytesIO(expected_contents)
location, size, checksum, _ = self.store.add(expected_image_id, with mock.patch('requests.Session.request') as HttpConn:
image, 0) HttpConn.return_value = utils.fake_response()
location, size, checksum, multihash, _ = self.store.add(
expected_image_id, image, 0, self.hash_algo)
self.assertEqual(utils.sort_url_by_qs_keys(expected_location), self.assertEqual(utils.sort_url_by_qs_keys(expected_location),
utils.sort_url_by_qs_keys(location)) utils.sort_url_by_qs_keys(location))
self.assertEqual(expected_size, size) self.assertEqual(expected_size, size)
self.assertEqual(expected_checksum, checksum) self.assertEqual(expected_checksum, checksum)
self.assertEqual(expected_multihash, multihash)
@mock.patch.object(vm_store.Store, 'select_datastore') @mock.patch.object(vm_store.Store, 'select_datastore')
@mock.patch('glance_store._drivers.vmware_datastore._Reader') @mock.patch('glance_store._drivers.vmware_datastore._Reader')
@@ -214,9 +226,10 @@ class TestStore(base.StoreBaseTest,
image = six.BytesIO(contents) image = six.BytesIO(contents)
with mock.patch('requests.Session.request') as HttpConn: with mock.patch('requests.Session.request') as HttpConn:
HttpConn.return_value = utils.fake_response() HttpConn.return_value = utils.fake_response()
self.store.add(image_id, image, size, verifier=verifier) self.store.add(image_id, image, size, self.hash_algo,
verifier=verifier)
fake_reader.assert_called_with(image, verifier) fake_reader.assert_called_with(image, self.hash_algo, verifier)
@mock.patch.object(vm_store.Store, 'select_datastore') @mock.patch.object(vm_store.Store, 'select_datastore')
@mock.patch('glance_store._drivers.vmware_datastore._Reader') @mock.patch('glance_store._drivers.vmware_datastore._Reader')
@@ -229,9 +242,10 @@ class TestStore(base.StoreBaseTest,
image = six.BytesIO(contents) image = six.BytesIO(contents)
with mock.patch('requests.Session.request') as HttpConn: with mock.patch('requests.Session.request') as HttpConn:
HttpConn.return_value = utils.fake_response() HttpConn.return_value = utils.fake_response()
self.store.add(image_id, image, 0, verifier=verifier) self.store.add(image_id, image, 0, self.hash_algo,
verifier=verifier)
fake_reader.assert_called_with(image, verifier) fake_reader.assert_called_with(image, self.hash_algo, verifier)
@mock.patch('oslo_vmware.api.VMwareAPISession') @mock.patch('oslo_vmware.api.VMwareAPISession')
def test_delete(self, mock_api_session): def test_delete(self, mock_api_session):
@@ -290,27 +304,31 @@ class TestStore(base.StoreBaseTest,
content = b'XXX' content = b'XXX'
image = six.BytesIO(content) image = six.BytesIO(content)
expected_checksum = hashlib.md5(content).hexdigest() expected_checksum = hashlib.md5(content).hexdigest()
reader = vm_store._Reader(image) expected_multihash = hashlib.sha256(content).hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read() ret = reader.read()
self.assertEqual(content, ret) self.assertEqual(content, ret)
self.assertEqual(expected_checksum, reader.checksum.hexdigest()) self.assertEqual(expected_checksum, reader.checksum.hexdigest())
self.assertEqual(expected_multihash, reader.os_hash_value.hexdigest())
self.assertEqual(len(content), reader.size) self.assertEqual(len(content), reader.size)
def test_reader_partial(self): def test_reader_partial(self):
content = b'XXX' content = b'XXX'
image = six.BytesIO(content) image = six.BytesIO(content)
expected_checksum = hashlib.md5(b'X').hexdigest() expected_checksum = hashlib.md5(b'X').hexdigest()
reader = vm_store._Reader(image) expected_multihash = hashlib.sha256(b'X').hexdigest()
reader = vm_store._Reader(image, self.hash_algo)
ret = reader.read(1) ret = reader.read(1)
self.assertEqual(b'X', ret) self.assertEqual(b'X', ret)
self.assertEqual(expected_checksum, reader.checksum.hexdigest()) self.assertEqual(expected_checksum, reader.checksum.hexdigest())
self.assertEqual(expected_multihash, reader.os_hash_value.hexdigest())
self.assertEqual(1, reader.size) self.assertEqual(1, reader.size)
def test_reader_with_verifier(self): def test_reader_with_verifier(self):
content = b'XXX' content = b'XXX'
image = six.BytesIO(content) image = six.BytesIO(content)
verifier = mock.MagicMock(name='mock_verifier') verifier = mock.MagicMock(name='mock_verifier')
reader = vm_store._Reader(image, verifier) reader = vm_store._Reader(image, self.hash_algo, verifier)
reader.read() reader.read()
verifier.update.assert_called_with(content) verifier.update.assert_called_with(content)
@@ -399,7 +417,8 @@ class TestStore(base.StoreBaseTest,
HttpConn.return_value = utils.fake_response(status_code=401) HttpConn.return_value = utils.fake_response(status_code=401)
self.assertRaises(exceptions.BackendException, self.assertRaises(exceptions.BackendException,
self.store.add, self.store.add,
expected_image_id, image, expected_size) expected_image_id, image, expected_size,
self.hash_algo)
@mock.patch.object(vm_store.Store, 'select_datastore') @mock.patch.object(vm_store.Store, 'select_datastore')
@mock.patch.object(api, 'VMwareAPISession') @mock.patch.object(api, 'VMwareAPISession')
@@ -415,7 +434,8 @@ class TestStore(base.StoreBaseTest,
no_response_body=True) no_response_body=True)
self.assertRaises(exceptions.BackendException, self.assertRaises(exceptions.BackendException,
self.store.add, self.store.add,
expected_image_id, image, expected_size) expected_image_id, image, expected_size,
self.hash_algo)
@mock.patch.object(api, 'VMwareAPISession') @mock.patch.object(api, 'VMwareAPISession')
def test_reset_session(self, mock_api_session): def test_reset_session(self, mock_api_session):
@@ -456,7 +476,8 @@ class TestStore(base.StoreBaseTest,
HttpConn.request.side_effect = IOError HttpConn.request.side_effect = IOError
self.assertRaises(exceptions.BackendException, self.assertRaises(exceptions.BackendException,
self.store.add, self.store.add,
expected_image_id, image, expected_size) expected_image_id, image, expected_size,
self.hash_algo)
def test_qs_sort_with_literal_question_mark(self): def test_qs_sort_with_literal_question_mark(self):
url = 'scheme://example.com/path?key2=val2&key1=val1?sort=true' url = 'scheme://example.com/path?key2=val2&key1=val1?sort=true'

View File

@@ -0,0 +1,13 @@
---
prelude: >
This release adds support for Glance multihash computation.
features:
- |
A new function, ``store_add_to_backend_with_multihash``, has been
added. This function wraps each store's ``add`` method to provide
consumers with a constant interface. It is similar to the existing
``store_add_to_backend`` function but requires the caller to
specify an additional ``hashing_algo`` argument whose value is
a hashlib algorithm identifier. The function returns a 5-tuple
containing a ``multihash`` value, which is a hexdigest of the
stored data computed using the specified hashing algorithm.