Use oslo_utils.units where appropriate
Use units from oslo_utils where appropriate instead of using magic numbers. Changes from patch Ib08b8d8843b72966e2cf87f741b1cc0eea0672e5 are also included. Co-Authored-By: Gorka Eguileor <geguileo@redhat.com> Co-Authored-By: James Page <james.page@canonical.com> Change-Id: I082ea91cb06e49659495cae9072b263eccda76a1
This commit is contained in:
parent
c1e8697f12
commit
886f7f8419
@ -18,6 +18,7 @@ from cinderclient import exceptions as cinder_exception
|
|||||||
from cinderclient import service_catalog
|
from cinderclient import service_catalog
|
||||||
from cinderclient.v2 import client as cinderclient
|
from cinderclient.v2 import client as cinderclient
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
from oslo_utils import units
|
||||||
|
|
||||||
from glance_store import capabilities
|
from glance_store import capabilities
|
||||||
from glance_store.common import utils
|
from glance_store.common import utils
|
||||||
@ -173,7 +174,7 @@ class Store(glance_store.driver.Store):
|
|||||||
volume = get_cinderclient(self.conf,
|
volume = get_cinderclient(self.conf,
|
||||||
context).volumes.get(loc.volume_id)
|
context).volumes.get(loc.volume_id)
|
||||||
# GB unit convert to byte
|
# GB unit convert to byte
|
||||||
return volume.size * (1024 ** 3)
|
return volume.size * units.Gi
|
||||||
except cinder_exception.NotFound as e:
|
except cinder_exception.NotFound as e:
|
||||||
reason = _("Failed to get image size due to "
|
reason = _("Failed to get image size due to "
|
||||||
"volume can not be found: %s") % self.volume_id
|
"volume can not be found: %s") % self.volume_id
|
||||||
|
@ -24,6 +24,7 @@ import math
|
|||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
|
from oslo_utils import units
|
||||||
|
|
||||||
from glance_store import capabilities
|
from glance_store import capabilities
|
||||||
from glance_store.common import utils
|
from glance_store.common import utils
|
||||||
@ -193,7 +194,7 @@ class Store(driver.Store):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
chunk = self.conf.glance_store.rbd_store_chunk_size
|
chunk = self.conf.glance_store.rbd_store_chunk_size
|
||||||
self.chunk_size = chunk * (1024 ** 2)
|
self.chunk_size = chunk * units.Mi
|
||||||
self.READ_CHUNKSIZE = self.chunk_size
|
self.READ_CHUNKSIZE = self.chunk_size
|
||||||
self.WRITE_CHUNKSIZE = self.READ_CHUNKSIZE
|
self.WRITE_CHUNKSIZE = self.READ_CHUNKSIZE
|
||||||
|
|
||||||
@ -369,7 +370,7 @@ class Store(driver.Store):
|
|||||||
length = offset + chunk_length
|
length = offset + chunk_length
|
||||||
bytes_written += chunk_length
|
bytes_written += chunk_length
|
||||||
LOG.debug(_("resizing image to %s KiB") %
|
LOG.debug(_("resizing image to %s KiB") %
|
||||||
(length / 1024))
|
(length / units.Ki))
|
||||||
image.resize(length)
|
image.resize(length)
|
||||||
LOG.debug(_("writing chunk at offset %s") %
|
LOG.debug(_("writing chunk at offset %s") %
|
||||||
(offset))
|
(offset))
|
||||||
|
@ -22,6 +22,7 @@ import math
|
|||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
from oslo.utils import excutils
|
from oslo.utils import excutils
|
||||||
|
from oslo_utils import units
|
||||||
import six
|
import six
|
||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
import swiftclient
|
import swiftclient
|
||||||
@ -43,9 +44,9 @@ LOG = logging.getLogger(__name__)
|
|||||||
_LI = i18n._LI
|
_LI = i18n._LI
|
||||||
|
|
||||||
DEFAULT_CONTAINER = 'glance'
|
DEFAULT_CONTAINER = 'glance'
|
||||||
DEFAULT_LARGE_OBJECT_SIZE = 5 * 1024 # 5GB
|
DEFAULT_LARGE_OBJECT_SIZE = 5 * units.Ki # 5GB
|
||||||
DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 200 # 200M
|
DEFAULT_LARGE_OBJECT_CHUNK_SIZE = 200 # 200M
|
||||||
ONE_MB = 1000 * 1024
|
ONE_MB = units.k * units.Ki # Here we used the mixed meaning of MB
|
||||||
|
|
||||||
_SWIFT_OPTS = [
|
_SWIFT_OPTS = [
|
||||||
cfg.StrOpt('swift_store_auth_version', default='2',
|
cfg.StrOpt('swift_store_auth_version', default='2',
|
||||||
|
@ -20,6 +20,7 @@ import logging
|
|||||||
|
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
from oslo.utils import importutils
|
from oslo.utils import importutils
|
||||||
|
from oslo_utils import units
|
||||||
|
|
||||||
from glance_store import capabilities
|
from glance_store import capabilities
|
||||||
from glance_store.common import utils
|
from glance_store.common import utils
|
||||||
@ -33,7 +34,7 @@ LOG = logging.getLogger(__name__)
|
|||||||
class Store(capabilities.StoreCapability):
|
class Store(capabilities.StoreCapability):
|
||||||
|
|
||||||
OPTIONS = None
|
OPTIONS = None
|
||||||
READ_CHUNKSIZE = 16 * (1024 * 1024) # 16M
|
READ_CHUNKSIZE = 16 * units.Mi # 16M
|
||||||
WRITE_CHUNKSIZE = READ_CHUNKSIZE
|
WRITE_CHUNKSIZE = READ_CHUNKSIZE
|
||||||
|
|
||||||
def __init__(self, conf):
|
def __init__(self, conf):
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
|
||||||
|
from oslo_utils import units
|
||||||
|
|
||||||
import glance_store
|
import glance_store
|
||||||
from glance_store._drivers import cinder
|
from glance_store._drivers import cinder
|
||||||
from glance_store import exceptions
|
from glance_store import exceptions
|
||||||
@ -70,7 +72,7 @@ class TestCinderStore(base.StoreBaseTest,
|
|||||||
loc = location.get_location_from_uri(uri, conf=self.conf)
|
loc = location.get_location_from_uri(uri, conf=self.conf)
|
||||||
image_size = self.store.get_size(loc, context=fake_context)
|
image_size = self.store.get_size(loc, context=fake_context)
|
||||||
self.assertEqual(image_size,
|
self.assertEqual(image_size,
|
||||||
fake_volumes.values()[0].size * (1024 ** 3))
|
fake_volumes.values()[0].size * units.Gi)
|
||||||
|
|
||||||
def test_cinder_delete_raise_error(self):
|
def test_cinder_delete_raise_error(self):
|
||||||
uri = 'cinder://12345678-9012-3455-6789-012345678901'
|
uri = 'cinder://12345678-9012-3455-6789-012345678901'
|
||||||
|
@ -39,9 +39,6 @@ from glance_store.tests import base
|
|||||||
from tests.unit import test_store_capabilities
|
from tests.unit import test_store_capabilities
|
||||||
|
|
||||||
|
|
||||||
KB = 1024
|
|
||||||
|
|
||||||
|
|
||||||
class TestStore(base.StoreBaseTest,
|
class TestStore(base.StoreBaseTest,
|
||||||
test_store_capabilities.TestStoreCapabilitiesChecking):
|
test_store_capabilities.TestStoreCapabilitiesChecking):
|
||||||
|
|
||||||
@ -155,9 +152,9 @@ class TestStore(base.StoreBaseTest,
|
|||||||
|
|
||||||
def test_add(self):
|
def test_add(self):
|
||||||
"""Test that we can add an image via the filesystem backend."""
|
"""Test that we can add an image via the filesystem backend."""
|
||||||
ChunkedFile.CHUNKSIZE = 1024
|
ChunkedFile.CHUNKSIZE = units.Ki
|
||||||
expected_image_id = str(uuid.uuid4())
|
expected_image_id = str(uuid.uuid4())
|
||||||
expected_file_size = 5 * KB # 5K
|
expected_file_size = 5 * units.Ki # 5K
|
||||||
expected_file_contents = "*" * expected_file_size
|
expected_file_contents = "*" * expected_file_size
|
||||||
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
|
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
|
||||||
expected_location = "file://%s/%s" % (self.test_dir,
|
expected_location = "file://%s/%s" % (self.test_dir,
|
||||||
@ -225,9 +222,9 @@ class TestStore(base.StoreBaseTest,
|
|||||||
Tests that adding an image with an existing identifier
|
Tests that adding an image with an existing identifier
|
||||||
raises an appropriate exception
|
raises an appropriate exception
|
||||||
"""
|
"""
|
||||||
ChunkedFile.CHUNKSIZE = 1024
|
ChunkedFile.CHUNKSIZE = units.Ki
|
||||||
image_id = str(uuid.uuid4())
|
image_id = str(uuid.uuid4())
|
||||||
file_size = 5 * KB # 5K
|
file_size = 5 * units.Ki # 5K
|
||||||
file_contents = "*" * file_size
|
file_contents = "*" * file_size
|
||||||
image_file = StringIO.StringIO(file_contents)
|
image_file = StringIO.StringIO(file_contents)
|
||||||
|
|
||||||
@ -240,9 +237,9 @@ class TestStore(base.StoreBaseTest,
|
|||||||
image_id, image_file, 0)
|
image_id, image_file, 0)
|
||||||
|
|
||||||
def _do_test_add_write_failure(self, errno, exception):
|
def _do_test_add_write_failure(self, errno, exception):
|
||||||
ChunkedFile.CHUNKSIZE = 1024
|
ChunkedFile.CHUNKSIZE = units.Ki
|
||||||
image_id = str(uuid.uuid4())
|
image_id = str(uuid.uuid4())
|
||||||
file_size = 5 * KB # 5K
|
file_size = 5 * units.Ki # 5K
|
||||||
file_contents = "*" * file_size
|
file_contents = "*" * file_size
|
||||||
path = os.path.join(self.test_dir, image_id)
|
path = os.path.join(self.test_dir, image_id)
|
||||||
image_file = StringIO.StringIO(file_contents)
|
image_file = StringIO.StringIO(file_contents)
|
||||||
@ -291,9 +288,9 @@ class TestStore(base.StoreBaseTest,
|
|||||||
Tests the partial image file is cleaned up after a read
|
Tests the partial image file is cleaned up after a read
|
||||||
failure.
|
failure.
|
||||||
"""
|
"""
|
||||||
ChunkedFile.CHUNKSIZE = 1024
|
ChunkedFile.CHUNKSIZE = units.Ki
|
||||||
image_id = str(uuid.uuid4())
|
image_id = str(uuid.uuid4())
|
||||||
file_size = 5 * KB # 5K
|
file_size = 5 * units.Ki # 5K
|
||||||
file_contents = "*" * file_size
|
file_contents = "*" * file_size
|
||||||
path = os.path.join(self.test_dir, image_id)
|
path = os.path.join(self.test_dir, image_id)
|
||||||
image_file = StringIO.StringIO(file_contents)
|
image_file = StringIO.StringIO(file_contents)
|
||||||
@ -315,7 +312,7 @@ class TestStore(base.StoreBaseTest,
|
|||||||
"""
|
"""
|
||||||
# First add an image
|
# First add an image
|
||||||
image_id = str(uuid.uuid4())
|
image_id = str(uuid.uuid4())
|
||||||
file_size = 5 * KB # 5K
|
file_size = 5 * units.Ki # 5K
|
||||||
file_contents = "*" * file_size
|
file_contents = "*" * file_size
|
||||||
image_file = StringIO.StringIO(file_contents)
|
image_file = StringIO.StringIO(file_contents)
|
||||||
|
|
||||||
@ -468,7 +465,7 @@ class TestStore(base.StoreBaseTest,
|
|||||||
self.store.configure()
|
self.store.configure()
|
||||||
|
|
||||||
"""Test that we can add an image via the filesystem backend"""
|
"""Test that we can add an image via the filesystem backend"""
|
||||||
ChunkedFile.CHUNKSIZE = 1024
|
ChunkedFile.CHUNKSIZE = units.Ki
|
||||||
expected_image_id = str(uuid.uuid4())
|
expected_image_id = str(uuid.uuid4())
|
||||||
expected_file_size = 5 * units.Ki # 5K
|
expected_file_size = 5 * units.Ki # 5K
|
||||||
expected_file_contents = "*" * expected_file_size
|
expected_file_contents = "*" * expected_file_size
|
||||||
@ -520,7 +517,7 @@ class TestStore(base.StoreBaseTest,
|
|||||||
with mock.patch.object(self.store, '_get_capacity_info') as capacity:
|
with mock.patch.object(self.store, '_get_capacity_info') as capacity:
|
||||||
capacity.return_value = 0
|
capacity.return_value = 0
|
||||||
|
|
||||||
ChunkedFile.CHUNKSIZE = 1024
|
ChunkedFile.CHUNKSIZE = units.Ki
|
||||||
expected_image_id = str(uuid.uuid4())
|
expected_image_id = str(uuid.uuid4())
|
||||||
expected_file_size = 5 * units.Ki # 5K
|
expected_file_size = 5 * units.Ki # 5K
|
||||||
expected_file_contents = "*" * expected_file_size
|
expected_file_contents = "*" * expected_file_size
|
||||||
@ -573,7 +570,7 @@ class TestStore(base.StoreBaseTest,
|
|||||||
|
|
||||||
self.store.configure_add()
|
self.store.configure_add()
|
||||||
|
|
||||||
Store.WRITE_CHUNKSIZE = 1024
|
Store.WRITE_CHUNKSIZE = units.Ki
|
||||||
expected_image_id = str(uuid.uuid4())
|
expected_image_id = str(uuid.uuid4())
|
||||||
expected_file_size = 5 * units.Ki # 5K
|
expected_file_size = 5 * units.Ki # 5K
|
||||||
expected_file_contents = "*" * expected_file_size
|
expected_file_contents = "*" * expected_file_size
|
||||||
@ -614,7 +611,7 @@ class TestStore(base.StoreBaseTest,
|
|||||||
|
|
||||||
self.store.configure_add()
|
self.store.configure_add()
|
||||||
|
|
||||||
Store.WRITE_CHUNKSIZE = 1024
|
Store.WRITE_CHUNKSIZE = units.Ki
|
||||||
expected_image_id = str(uuid.uuid4())
|
expected_image_id = str(uuid.uuid4())
|
||||||
expected_file_size = 5 * units.Ki # 5K
|
expected_file_size = 5 * units.Ki # 5K
|
||||||
expected_file_contents = "*" * expected_file_size
|
expected_file_contents = "*" * expected_file_size
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
import StringIO
|
import StringIO
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
|
from oslo_utils import units
|
||||||
|
|
||||||
from glance_store._drivers import rbd as rbd_store
|
from glance_store._drivers import rbd as rbd_store
|
||||||
from glance_store import exceptions
|
from glance_store import exceptions
|
||||||
@ -163,12 +164,12 @@ class TestStore(base.StoreBaseTest,
|
|||||||
self.location = rbd_store.StoreLocation(self.store_specs,
|
self.location = rbd_store.StoreLocation(self.store_specs,
|
||||||
self.conf)
|
self.conf)
|
||||||
# Provide enough data to get more than one chunk iteration.
|
# Provide enough data to get more than one chunk iteration.
|
||||||
self.data_len = 3 * 1024
|
self.data_len = 3 * units.Ki
|
||||||
self.data_iter = StringIO.StringIO('*' * self.data_len)
|
self.data_iter = StringIO.StringIO('*' * self.data_len)
|
||||||
|
|
||||||
def test_add_w_image_size_zero(self):
|
def test_add_w_image_size_zero(self):
|
||||||
"""Assert that correct size is returned even though 0 was provided."""
|
"""Assert that correct size is returned even though 0 was provided."""
|
||||||
self.store.chunk_size = 1024
|
self.store.chunk_size = units.Ki
|
||||||
with mock.patch.object(rbd_store.rbd.Image, 'resize') as resize:
|
with mock.patch.object(rbd_store.rbd.Image, 'resize') as resize:
|
||||||
with mock.patch.object(rbd_store.rbd.Image, 'write') as write:
|
with mock.patch.object(rbd_store.rbd.Image, 'write') as write:
|
||||||
ret = self.store.add('fake_image_id', self.data_iter, 0)
|
ret = self.store.add('fake_image_id', self.data_iter, 0)
|
||||||
|
@ -56,7 +56,7 @@ class FakeKey(object):
|
|||||||
self.data = None
|
self.data = None
|
||||||
self.size = 0
|
self.size = 0
|
||||||
self.etag = None
|
self.etag = None
|
||||||
self.BufferSize = 1024
|
self.BufferSize = units.Ki
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
pass
|
pass
|
||||||
|
@ -662,8 +662,8 @@ class SwiftTests(object):
|
|||||||
orig_max_size = self.store.large_object_size
|
orig_max_size = self.store.large_object_size
|
||||||
orig_temp_size = self.store.large_object_chunk_size
|
orig_temp_size = self.store.large_object_chunk_size
|
||||||
try:
|
try:
|
||||||
self.store.large_object_size = 1024
|
self.store.large_object_size = units.Ki
|
||||||
self.store.large_object_chunk_size = 1024
|
self.store.large_object_chunk_size = units.Ki
|
||||||
loc, size, checksum, _ = self.store.add(expected_image_id,
|
loc, size, checksum, _ = self.store.add(expected_image_id,
|
||||||
image_swift,
|
image_swift,
|
||||||
expected_swift_size)
|
expected_swift_size)
|
||||||
@ -719,9 +719,9 @@ class SwiftTests(object):
|
|||||||
global MAX_SWIFT_OBJECT_SIZE
|
global MAX_SWIFT_OBJECT_SIZE
|
||||||
orig_max_swift_object_size = MAX_SWIFT_OBJECT_SIZE
|
orig_max_swift_object_size = MAX_SWIFT_OBJECT_SIZE
|
||||||
try:
|
try:
|
||||||
MAX_SWIFT_OBJECT_SIZE = 1024
|
MAX_SWIFT_OBJECT_SIZE = units.Ki
|
||||||
self.store.large_object_size = 1024
|
self.store.large_object_size = units.Ki
|
||||||
self.store.large_object_chunk_size = 1024
|
self.store.large_object_chunk_size = units.Ki
|
||||||
loc, size, checksum, _ = self.store.add(expected_image_id,
|
loc, size, checksum, _ = self.store.add(expected_image_id,
|
||||||
image_swift, 0)
|
image_swift, 0)
|
||||||
finally:
|
finally:
|
||||||
@ -1446,7 +1446,7 @@ class TestChunkReader(base.StoreBaseTest):
|
|||||||
bytes_read += len(chunk)
|
bytes_read += len(chunk)
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
self.assertEqual(1024, bytes_read)
|
self.assertEqual(units.Ki, bytes_read)
|
||||||
data_file.close()
|
data_file.close()
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user