Add tests w/ some fixes, although they don't run yet
This commit is contained in:
parent
50fca911c1
commit
28d0258873
@ -18,10 +18,11 @@ import sys
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import utils
|
||||
from glance.store.common import exception
|
||||
from glance.store.common import utils
|
||||
import glance.context
|
||||
import glance.domain.proxy
|
||||
from glance.openstack.common.gettextutils import _
|
||||
from glance.openstack.common import importutils
|
||||
import glance.openstack.common.log as logging
|
||||
from glance.store import location
|
||||
|
@ -24,8 +24,8 @@ import urlparse
|
||||
|
||||
from oslo.config import cfg
|
||||
|
||||
from glance.common import exception
|
||||
from glance.common import utils
|
||||
from glance.store.common import exception
|
||||
from glance.store.common import utils
|
||||
from glance.openstack.common import jsonutils
|
||||
import glance.openstack.common.log as logging
|
||||
import glance.store
|
||||
@ -49,8 +49,7 @@ CONF.register_opts(filesystem_opts)
|
||||
|
||||
|
||||
class StoreLocation(glance.store.location.StoreLocation):
|
||||
|
||||
"""Class describing a Filesystem URI"""
|
||||
"""Class describing a Filesystem URI."""
|
||||
|
||||
def process_specs(self):
|
||||
self.scheme = self.specs.get('scheme', 'file')
|
||||
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
53
tests/unit/base.py
Normal file
53
tests/unit/base.py
Normal file
@ -0,0 +1,53 @@
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# Copyright 2014 Red Hat, Inc
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import fixtures
|
||||
from oslo.config import cfg
|
||||
import testtools
|
||||
|
||||
import glance.store as store
|
||||
from glance.store import location
|
||||
|
||||
|
||||
class StoreBaseTest(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(StoreBaseTest, self).setUp()
|
||||
self.conf = cfg.ConfigOpts()
|
||||
self.conf.parse_args(args=[])
|
||||
|
||||
# Ensure stores + locations cleared
|
||||
location.SCHEME_TO_CLS_MAP = {}
|
||||
|
||||
store.create_stores()
|
||||
self.addCleanup(setattr, location, 'SCHEME_TO_CLS_MAP', dict())
|
||||
self.test_dir = self.useFixture(fixtures.TempDir()).path
|
||||
|
||||
def config(self, **kw):
|
||||
"""Override some configuration values.
|
||||
|
||||
The keyword arguments are the names of configuration options to
|
||||
override and their values.
|
||||
|
||||
If a group argument is supplied, the overrides are applied to
|
||||
the specified configuration option group.
|
||||
|
||||
All overrides are automatically cleared at the end of the current
|
||||
test by the fixtures cleanup process.
|
||||
"""
|
||||
group = kw.pop('group', None)
|
||||
for k, v in kw.iteritems():
|
||||
self.conf.set_override(k, v, group)
|
83
tests/unit/test_cinder_store.py
Normal file
83
tests/unit/test_cinder_store.py
Normal file
@ -0,0 +1,83 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import stubout
|
||||
|
||||
from cinderclient.v2 import client as cinderclient
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance.openstack.common import units
|
||||
import glance.store.cinder as cinder
|
||||
from glance.store.location import get_location_from_uri
|
||||
from glance.tests.unit import base
|
||||
|
||||
|
||||
class FakeObject(object):
|
||||
def __init__(self, **kwargs):
|
||||
for name, value in kwargs.iteritems():
|
||||
setattr(self, name, value)
|
||||
|
||||
|
||||
class TestCinderStore(base.StoreClearingUnitTest):
|
||||
|
||||
def setUp(self):
|
||||
self.config(default_store='cinder',
|
||||
known_stores=['glance.store.cinder.Store'])
|
||||
super(TestCinderStore, self).setUp()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
|
||||
def test_cinder_configure_add(self):
|
||||
store = cinder.Store()
|
||||
self.assertRaises(exception.BadStoreConfiguration,
|
||||
store.configure_add)
|
||||
store = cinder.Store(context=None)
|
||||
self.assertRaises(exception.BadStoreConfiguration,
|
||||
store.configure_add)
|
||||
store = cinder.Store(context=FakeObject(service_catalog=None))
|
||||
self.assertRaises(exception.BadStoreConfiguration,
|
||||
store.configure_add)
|
||||
store = cinder.Store(context=FakeObject(service_catalog=
|
||||
'fake_service_catalog'))
|
||||
store.configure_add()
|
||||
|
||||
def test_cinder_get_size(self):
|
||||
fake_client = FakeObject(auth_token=None, management_url=None)
|
||||
fake_volumes = {'12345678-9012-3455-6789-012345678901':
|
||||
FakeObject(size=5)}
|
||||
|
||||
class FakeCinderClient(FakeObject):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FakeCinderClient, self).__init__(client=fake_client,
|
||||
volumes=fake_volumes)
|
||||
|
||||
self.stubs.Set(cinderclient, 'Client', FakeCinderClient)
|
||||
|
||||
fake_sc = [{u'endpoints': [{u'publicURL': u'foo_public_url'}],
|
||||
u'endpoints_links': [],
|
||||
u'name': u'cinder',
|
||||
u'type': u'volume'}]
|
||||
fake_context = FakeObject(service_catalog=fake_sc,
|
||||
user='fake_uer',
|
||||
auth_tok='fake_token',
|
||||
tenant='fake_tenant')
|
||||
|
||||
uri = 'cinder://%s' % fake_volumes.keys()[0]
|
||||
loc = get_location_from_uri(uri)
|
||||
store = cinder.Store(context=fake_context)
|
||||
image_size = store.get_size(loc)
|
||||
self.assertEqual(image_size,
|
||||
fake_volumes.values()[0].size * units.Gi)
|
||||
self.assertEqual(fake_client.auth_token, 'fake_token')
|
||||
self.assertEqual(fake_client.management_url, 'foo_public_url')
|
295
tests/unit/test_filesystem_store.py
Normal file
295
tests/unit/test_filesystem_store.py
Normal file
@ -0,0 +1,295 @@
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Tests the filesystem backend store"""
|
||||
|
||||
import __builtin__
|
||||
import errno
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import StringIO
|
||||
import uuid
|
||||
|
||||
|
||||
from glance.store.common import exception
|
||||
|
||||
from glance.store.filesystem import ChunkedFile
|
||||
from glance.store.filesystem import Store
|
||||
from glance.store.location import get_location_from_uri
|
||||
from glance.store.tests.unit import base
|
||||
|
||||
|
||||
class TestStore(base.StoreBaseTest):
|
||||
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment."""
|
||||
super(TestStore, self).setUp()
|
||||
self.orig_chunksize = ChunkedFile.CHUNKSIZE
|
||||
ChunkedFile.CHUNKSIZE = 10
|
||||
self.store = Store()
|
||||
|
||||
def tearDown(self):
|
||||
"""Clear the test environment."""
|
||||
super(TestStore, self).tearDown()
|
||||
ChunkedFile.CHUNKSIZE = self.orig_chunksize
|
||||
|
||||
def test_get(self):
|
||||
"""Test a "normal" retrieval of an image in chunks."""
|
||||
# First add an image...
|
||||
image_id = str(uuid.uuid4())
|
||||
file_contents = "chunk00000remainder"
|
||||
image_file = StringIO.StringIO(file_contents)
|
||||
|
||||
location, size, checksum, _ = self.store.add(image_id,
|
||||
image_file,
|
||||
len(file_contents))
|
||||
|
||||
# Now read it back...
|
||||
uri = "file:///%s/%s" % (self.test_dir, image_id)
|
||||
loc = get_location_from_uri(uri)
|
||||
(image_file, image_size) = self.store.get(loc)
|
||||
|
||||
expected_data = "chunk00000remainder"
|
||||
expected_num_chunks = 2
|
||||
data = ""
|
||||
num_chunks = 0
|
||||
|
||||
for chunk in image_file:
|
||||
num_chunks += 1
|
||||
data += chunk
|
||||
self.assertEqual(expected_data, data)
|
||||
self.assertEqual(expected_num_chunks, num_chunks)
|
||||
|
||||
def test_get_non_existing(self):
|
||||
"""
|
||||
Test that trying to retrieve a file that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
loc = get_location_from_uri("file:///%s/non-existing" % self.test_dir)
|
||||
self.assertRaises(exception.NotFound,
|
||||
self.store.get,
|
||||
loc)
|
||||
|
||||
def test_add(self):
|
||||
"""Test that we can add an image via the filesystem backend"""
|
||||
ChunkedFile.CHUNKSIZE = 1024
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_file_size = 5 * units.Ki # 5K
|
||||
expected_file_contents = "*" * expected_file_size
|
||||
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
|
||||
expected_location = "file://%s/%s" % (self.test_dir,
|
||||
expected_image_id)
|
||||
image_file = StringIO.StringIO(expected_file_contents)
|
||||
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_file,
|
||||
expected_file_size)
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_file_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
|
||||
uri = "file:///%s/%s" % (self.test_dir, expected_image_id)
|
||||
loc = get_location_from_uri(uri)
|
||||
(new_image_file, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = ""
|
||||
new_image_file_size = 0
|
||||
|
||||
for chunk in new_image_file:
|
||||
new_image_file_size += len(chunk)
|
||||
new_image_contents += chunk
|
||||
|
||||
self.assertEqual(expected_file_contents, new_image_contents)
|
||||
self.assertEqual(expected_file_size, new_image_file_size)
|
||||
|
||||
def test_add_check_metadata_success(self):
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
in_metadata = {'akey': u'some value', 'list': [u'1', u'2', u'3']}
|
||||
jsonfilename = os.path.join(self.test_dir,
|
||||
"storage_metadata.%s" % expected_image_id)
|
||||
|
||||
self.config(filesystem_store_metadata_file=jsonfilename)
|
||||
with open(jsonfilename, 'w') as fptr:
|
||||
json.dump(in_metadata, fptr)
|
||||
expected_file_size = 10
|
||||
expected_file_contents = "*" * expected_file_size
|
||||
image_file = StringIO.StringIO(expected_file_contents)
|
||||
|
||||
location, size, checksum, metadata = self.store.add(expected_image_id,
|
||||
image_file,
|
||||
expected_file_size)
|
||||
|
||||
self.assertEqual(metadata, in_metadata)
|
||||
|
||||
def test_add_check_metadata_bad_data(self):
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
in_metadata = {'akey': 10} # only unicode is allowed
|
||||
jsonfilename = os.path.join(self.test_dir,
|
||||
"storage_metadata.%s" % expected_image_id)
|
||||
|
||||
self.config(filesystem_store_metadata_file=jsonfilename)
|
||||
with open(jsonfilename, 'w') as fptr:
|
||||
json.dump(in_metadata, fptr)
|
||||
expected_file_size = 10
|
||||
expected_file_contents = "*" * expected_file_size
|
||||
image_file = StringIO.StringIO(expected_file_contents)
|
||||
|
||||
location, size, checksum, metadata = self.store.add(expected_image_id,
|
||||
image_file,
|
||||
expected_file_size)
|
||||
|
||||
self.assertEqual(metadata, {})
|
||||
|
||||
def test_add_check_metadata_bad_nosuch_file(self):
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
jsonfilename = os.path.join(self.test_dir,
|
||||
"storage_metadata.%s" % expected_image_id)
|
||||
|
||||
self.config(filesystem_store_metadata_file=jsonfilename)
|
||||
expected_file_size = 10
|
||||
expected_file_contents = "*" * expected_file_size
|
||||
image_file = StringIO.StringIO(expected_file_contents)
|
||||
|
||||
location, size, checksum, metadata = self.store.add(expected_image_id,
|
||||
image_file,
|
||||
expected_file_size)
|
||||
|
||||
self.assertEqual(metadata, {})
|
||||
|
||||
def test_add_already_existing(self):
|
||||
"""
|
||||
Tests that adding an image with an existing identifier
|
||||
raises an appropriate exception
|
||||
"""
|
||||
ChunkedFile.CHUNKSIZE = 1024
|
||||
image_id = str(uuid.uuid4())
|
||||
file_size = 5 * units.Ki # 5K
|
||||
file_contents = "*" * file_size
|
||||
image_file = StringIO.StringIO(file_contents)
|
||||
|
||||
location, size, checksum, _ = self.store.add(image_id,
|
||||
image_file,
|
||||
file_size)
|
||||
image_file = StringIO.StringIO("nevergonnamakeit")
|
||||
self.assertRaises(exception.Duplicate,
|
||||
self.store.add,
|
||||
image_id, image_file, 0)
|
||||
|
||||
def _do_test_add_write_failure(self, errno, exception):
|
||||
ChunkedFile.CHUNKSIZE = 1024
|
||||
image_id = str(uuid.uuid4())
|
||||
file_size = 5 * units.Ki # 5K
|
||||
file_contents = "*" * file_size
|
||||
path = os.path.join(self.test_dir, image_id)
|
||||
image_file = StringIO.StringIO(file_contents)
|
||||
|
||||
m = mox.Mox()
|
||||
m.StubOutWithMock(__builtin__, 'open')
|
||||
e = IOError()
|
||||
e.errno = errno
|
||||
open(path, 'wb').AndRaise(e)
|
||||
m.ReplayAll()
|
||||
|
||||
try:
|
||||
self.assertRaises(exception,
|
||||
self.store.add,
|
||||
image_id, image_file, 0)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
finally:
|
||||
m.VerifyAll()
|
||||
m.UnsetStubs()
|
||||
|
||||
def test_add_storage_full(self):
|
||||
"""
|
||||
Tests that adding an image without enough space on disk
|
||||
raises an appropriate exception
|
||||
"""
|
||||
self._do_test_add_write_failure(errno.ENOSPC, exception.StorageFull)
|
||||
|
||||
def test_add_file_too_big(self):
|
||||
"""
|
||||
Tests that adding an excessively large image file
|
||||
raises an appropriate exception
|
||||
"""
|
||||
self._do_test_add_write_failure(errno.EFBIG, exception.StorageFull)
|
||||
|
||||
def test_add_storage_write_denied(self):
|
||||
"""
|
||||
Tests that adding an image with insufficient filestore permissions
|
||||
raises an appropriate exception
|
||||
"""
|
||||
self._do_test_add_write_failure(errno.EACCES,
|
||||
exception.StorageWriteDenied)
|
||||
|
||||
def test_add_other_failure(self):
|
||||
"""
|
||||
Tests that a non-space-related IOError does not raise a
|
||||
StorageFull exception.
|
||||
"""
|
||||
self._do_test_add_write_failure(errno.ENOTDIR, IOError)
|
||||
|
||||
def test_add_cleanup_on_read_failure(self):
|
||||
"""
|
||||
Tests the partial image file is cleaned up after a read
|
||||
failure.
|
||||
"""
|
||||
ChunkedFile.CHUNKSIZE = 1024
|
||||
image_id = str(uuid.uuid4())
|
||||
file_size = 5 * units.Ki # 5K
|
||||
file_contents = "*" * file_size
|
||||
path = os.path.join(self.test_dir, image_id)
|
||||
image_file = StringIO.StringIO(file_contents)
|
||||
|
||||
def fake_Error(size):
|
||||
raise AttributeError()
|
||||
|
||||
self.stubs.Set(image_file, 'read', fake_Error)
|
||||
|
||||
self.assertRaises(AttributeError,
|
||||
self.store.add,
|
||||
image_id, image_file, 0)
|
||||
self.assertFalse(os.path.exists(path))
|
||||
|
||||
def test_delete(self):
|
||||
"""
|
||||
Test we can delete an existing image in the filesystem store
|
||||
"""
|
||||
# First add an image
|
||||
image_id = str(uuid.uuid4())
|
||||
file_size = 5 * units.Ki # 5K
|
||||
file_contents = "*" * file_size
|
||||
image_file = StringIO.StringIO(file_contents)
|
||||
|
||||
location, size, checksum, _ = self.store.add(image_id,
|
||||
image_file,
|
||||
file_size)
|
||||
|
||||
# Now check that we can delete it
|
||||
uri = "file:///%s/%s" % (self.test_dir, image_id)
|
||||
loc = get_location_from_uri(uri)
|
||||
self.store.delete(loc)
|
||||
|
||||
self.assertRaises(exception.NotFound, self.store.get, loc)
|
||||
|
||||
def test_delete_non_existing(self):
|
||||
"""
|
||||
Test that trying to delete a file that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
loc = get_location_from_uri("file:///tmp/glance-tests/non-existing")
|
||||
self.assertRaises(exception.NotFound,
|
||||
self.store.delete,
|
||||
loc)
|
97
tests/unit/test_gridfs_store.py
Normal file
97
tests/unit/test_gridfs_store.py
Normal file
@ -0,0 +1,97 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import StringIO
|
||||
|
||||
import stubout
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance.store.common import utils
|
||||
from glance.store.gridfs import Store
|
||||
from glance.tests.unit import base
|
||||
try:
|
||||
import gridfs
|
||||
import pymongo
|
||||
except ImportError:
|
||||
pymongo = None
|
||||
|
||||
|
||||
GRIDFS_CONF = {'verbose': True,
|
||||
'debug': True,
|
||||
'default_store': 'gridfs',
|
||||
'mongodb_store_uri': 'mongodb://fake_store_uri',
|
||||
'mongodb_store_db': 'fake_store_db'}
|
||||
|
||||
|
||||
def stub_out_gridfs(stubs):
|
||||
class FakeMongoClient(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def __getitem__(self, key):
|
||||
return None
|
||||
|
||||
class FakeGridFS(object):
|
||||
image_data = {}
|
||||
called_commands = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def exists(self, image_id):
|
||||
self.called_commands.append('exists')
|
||||
return False
|
||||
|
||||
def put(self, image_file, _id):
|
||||
self.called_commands.append('put')
|
||||
data = None
|
||||
while True:
|
||||
data = image_file.read(64)
|
||||
if data:
|
||||
self.image_data[_id] = \
|
||||
self.image_data.setdefault(_id, '') + data
|
||||
else:
|
||||
break
|
||||
|
||||
def delete(self, _id):
|
||||
self.called_commands.append('delete')
|
||||
|
||||
if pymongo is not None:
|
||||
stubs.Set(pymongo, 'MongoClient', FakeMongoClient)
|
||||
stubs.Set(gridfs, 'GridFS', FakeGridFS)
|
||||
|
||||
|
||||
class TestStore(base.StoreClearingUnitTest):
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment"""
|
||||
self.config(**GRIDFS_CONF)
|
||||
super(TestStore, self).setUp()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
stub_out_gridfs(self.stubs)
|
||||
self.store = Store()
|
||||
self.addCleanup(self.stubs.UnsetAll)
|
||||
|
||||
def test_cleanup_when_add_image_exception(self):
|
||||
if pymongo is None:
|
||||
msg = 'GridFS store can not add images, skip test.'
|
||||
self.skipTest(msg)
|
||||
|
||||
self.assertRaises(exception.ImageSizeLimitExceeded,
|
||||
self.store.add,
|
||||
'fake_image_id',
|
||||
utils.LimitingReader(StringIO.StringIO('xx'), 1),
|
||||
2)
|
||||
self.assertEqual(self.store.fs.called_commands,
|
||||
['exists', 'put', 'delete'])
|
188
tests/unit/test_http_store.py
Normal file
188
tests/unit/test_http_store.py
Normal file
@ -0,0 +1,188 @@
|
||||
# Copyright 2010-2011 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from six.moves import xrange
|
||||
import stubout
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance import context
|
||||
from glance.db.sqlalchemy import api as db_api
|
||||
from glance.registry.client.v1.api import configure_registry_client
|
||||
from glance.store import (delete_from_backend,
|
||||
safe_delete_from_backend)
|
||||
from glance.store.http import Store, MAX_REDIRECTS
|
||||
from glance.store.location import get_location_from_uri
|
||||
from glance.tests.unit import base
|
||||
from glance.tests import utils, stubs as test_stubs
|
||||
|
||||
|
||||
# The response stack is used to return designated responses in order;
|
||||
# however when it's empty a default 200 OK response is returned from
|
||||
# FakeHTTPConnection below.
|
||||
FAKE_RESPONSE_STACK = []
|
||||
|
||||
|
||||
def stub_out_http_backend(stubs):
|
||||
"""
|
||||
Stubs out the httplib.HTTPRequest.getresponse to return
|
||||
faked-out data instead of grabbing actual contents of a resource
|
||||
|
||||
The stubbed getresponse() returns an iterator over
|
||||
the data "I am a teapot, short and stout\n"
|
||||
|
||||
:param stubs: Set of stubout stubs
|
||||
"""
|
||||
|
||||
class FakeHTTPConnection(object):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def getresponse(self):
|
||||
if len(FAKE_RESPONSE_STACK):
|
||||
return FAKE_RESPONSE_STACK.pop()
|
||||
return utils.FakeHTTPResponse()
|
||||
|
||||
def request(self, *_args, **_kwargs):
|
||||
pass
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def fake_get_conn_class(self, *args, **kwargs):
|
||||
return FakeHTTPConnection
|
||||
|
||||
stubs.Set(Store, '_get_conn_class', fake_get_conn_class)
|
||||
|
||||
|
||||
def stub_out_registry_image_update(stubs):
|
||||
"""
|
||||
Stubs an image update on the registry.
|
||||
|
||||
:param stubs: Set of stubout stubs
|
||||
"""
|
||||
test_stubs.stub_out_registry_server(stubs)
|
||||
|
||||
def fake_image_update(ctx, image_id, values, purge_props=False):
|
||||
return {'properties': {}}
|
||||
|
||||
stubs.Set(db_api, 'image_update', fake_image_update)
|
||||
|
||||
|
||||
class TestHttpStore(base.StoreClearingUnitTest):
|
||||
|
||||
def setUp(self):
|
||||
global FAKE_RESPONSE_STACK
|
||||
FAKE_RESPONSE_STACK = []
|
||||
self.config(default_store='http',
|
||||
known_stores=['glance.store.http.Store'])
|
||||
super(TestHttpStore, self).setUp()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
stub_out_http_backend(self.stubs)
|
||||
Store.CHUNKSIZE = 2
|
||||
self.store = Store()
|
||||
configure_registry_client()
|
||||
|
||||
def test_http_get(self):
|
||||
uri = "http://netloc/path/to/file.tar.gz"
|
||||
expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s',
|
||||
'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n']
|
||||
loc = get_location_from_uri(uri)
|
||||
(image_file, image_size) = self.store.get(loc)
|
||||
self.assertEqual(image_size, 31)
|
||||
chunks = [c for c in image_file]
|
||||
self.assertEqual(chunks, expected_returns)
|
||||
|
||||
def test_http_get_redirect(self):
|
||||
# Add two layers of redirects to the response stack, which will
|
||||
# return the default 200 OK with the expected data after resolving
|
||||
# both redirects.
|
||||
redirect_headers_1 = {"location": "http://example.com/teapot.img"}
|
||||
redirect_resp_1 = utils.FakeHTTPResponse(status=302,
|
||||
headers=redirect_headers_1)
|
||||
redirect_headers_2 = {"location": "http://example.com/teapot_real.img"}
|
||||
redirect_resp_2 = utils.FakeHTTPResponse(status=301,
|
||||
headers=redirect_headers_2)
|
||||
FAKE_RESPONSE_STACK.append(redirect_resp_1)
|
||||
FAKE_RESPONSE_STACK.append(redirect_resp_2)
|
||||
|
||||
uri = "http://netloc/path/to/file.tar.gz"
|
||||
expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s',
|
||||
'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n']
|
||||
loc = get_location_from_uri(uri)
|
||||
(image_file, image_size) = self.store.get(loc)
|
||||
self.assertEqual(image_size, 31)
|
||||
|
||||
chunks = [c for c in image_file]
|
||||
self.assertEqual(chunks, expected_returns)
|
||||
|
||||
def test_http_get_max_redirects(self):
|
||||
# Add more than MAX_REDIRECTS redirects to the response stack
|
||||
redirect_headers = {"location": "http://example.com/teapot.img"}
|
||||
redirect_resp = utils.FakeHTTPResponse(status=302,
|
||||
headers=redirect_headers)
|
||||
for i in xrange(MAX_REDIRECTS + 2):
|
||||
FAKE_RESPONSE_STACK.append(redirect_resp)
|
||||
|
||||
uri = "http://netloc/path/to/file.tar.gz"
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.MaxRedirectsExceeded, self.store.get, loc)
|
||||
|
||||
def test_http_get_redirect_invalid(self):
|
||||
redirect_headers = {"location": "http://example.com/teapot.img"}
|
||||
redirect_resp = utils.FakeHTTPResponse(status=307,
|
||||
headers=redirect_headers)
|
||||
FAKE_RESPONSE_STACK.append(redirect_resp)
|
||||
|
||||
uri = "http://netloc/path/to/file.tar.gz"
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.BadStoreUri, self.store.get, loc)
|
||||
|
||||
def test_http_get_not_found(self):
|
||||
not_found_resp = utils.FakeHTTPResponse(status=404,
|
||||
data="404 Not Found")
|
||||
FAKE_RESPONSE_STACK.append(not_found_resp)
|
||||
|
||||
uri = "http://netloc/path/to/file.tar.gz"
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.BadStoreUri, self.store.get, loc)
|
||||
|
||||
def test_https_get(self):
|
||||
uri = "https://netloc/path/to/file.tar.gz"
|
||||
expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s',
|
||||
'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n']
|
||||
loc = get_location_from_uri(uri)
|
||||
(image_file, image_size) = self.store.get(loc)
|
||||
self.assertEqual(image_size, 31)
|
||||
|
||||
chunks = [c for c in image_file]
|
||||
self.assertEqual(chunks, expected_returns)
|
||||
|
||||
def test_http_delete_raise_error(self):
|
||||
uri = "https://netloc/path/to/file.tar.gz"
|
||||
loc = get_location_from_uri(uri)
|
||||
ctx = context.RequestContext()
|
||||
self.assertRaises(NotImplementedError, self.store.delete, loc)
|
||||
self.assertRaises(exception.StoreDeleteNotSupported,
|
||||
delete_from_backend, ctx, uri)
|
||||
|
||||
def test_http_schedule_delete_swallows_error(self):
|
||||
uri = "https://netloc/path/to/file.tar.gz"
|
||||
ctx = context.RequestContext()
|
||||
stub_out_registry_image_update(self.stubs)
|
||||
try:
|
||||
safe_delete_from_backend(ctx, uri, 'image_id')
|
||||
except exception.StoreDeleteNotSupported:
|
||||
self.fail('StoreDeleteNotSupported should be swallowed')
|
167
tests/unit/test_rbd_store.py
Normal file
167
tests/unit/test_rbd_store.py
Normal file
@ -0,0 +1,167 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import StringIO
|
||||
|
||||
import mock
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance.store.common import utils
|
||||
from glance.openstack.common import units
|
||||
from glance.store.location import Location
|
||||
import glance.store.rbd as rbd_store
|
||||
from glance.store.rbd import StoreLocation
|
||||
from glance.tests.unit import base
|
||||
from glance.tests.unit.fake_rados import mock_rados
|
||||
from glance.tests.unit.fake_rados import mock_rbd
|
||||
|
||||
|
||||
class TestStore(base.StoreClearingUnitTest):
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment"""
|
||||
super(TestStore, self).setUp()
|
||||
self.stubs.Set(rbd_store, 'rados', mock_rados)
|
||||
self.stubs.Set(rbd_store, 'rbd', mock_rbd)
|
||||
self.store = rbd_store.Store()
|
||||
self.store.chunk_size = 2
|
||||
self.called_commands_actual = []
|
||||
self.called_commands_expected = []
|
||||
self.store_specs = {'image': 'fake_image',
|
||||
'snapshot': 'fake_snapshot'}
|
||||
self.location = StoreLocation(self.store_specs)
|
||||
# Provide enough data to get more than one chunk iteration.
|
||||
self.data_len = 3 * units.Ki
|
||||
self.data_iter = StringIO.StringIO('*' * self.data_len)
|
||||
|
||||
def test_add_w_image_size_zero(self):
|
||||
"""Assert that correct size is returned even though 0 was provided."""
|
||||
self.store.chunk_size = units.Ki
|
||||
with mock.patch.object(rbd_store.rbd.Image, 'resize') as resize:
|
||||
with mock.patch.object(rbd_store.rbd.Image, 'write') as write:
|
||||
ret = self.store.add('fake_image_id', self.data_iter, 0)
|
||||
|
||||
resize.assert_called()
|
||||
write.assert_called()
|
||||
self.assertEqual(ret[1], self.data_len)
|
||||
|
||||
def test_add_w_rbd_image_exception(self):
|
||||
def _fake_create_image(*args, **kwargs):
|
||||
self.called_commands_actual.append('create')
|
||||
return self.location
|
||||
|
||||
def _fake_delete_image(*args, **kwargs):
|
||||
self.called_commands_actual.append('delete')
|
||||
|
||||
def _fake_enter(*args, **kwargs):
|
||||
raise exception.NotFound("")
|
||||
|
||||
self.stubs.Set(self.store, '_create_image', _fake_create_image)
|
||||
self.stubs.Set(self.store, '_delete_image', _fake_delete_image)
|
||||
self.stubs.Set(mock_rbd.Image, '__enter__', _fake_enter)
|
||||
|
||||
self.assertRaises(exception.NotFound, self.store.add,
|
||||
'fake_image_id', self.data_iter, self.data_len)
|
||||
|
||||
self.called_commands_expected = ['create', 'delete']
|
||||
|
||||
def test_add_duplicate_image(self):
|
||||
def _fake_create_image(*args, **kwargs):
|
||||
self.called_commands_actual.append('create')
|
||||
raise mock_rbd.ImageExists()
|
||||
|
||||
self.stubs.Set(self.store, '_create_image', _fake_create_image)
|
||||
self.assertRaises(exception.Duplicate, self.store.add,
|
||||
'fake_image_id', self.data_iter, self.data_len)
|
||||
self.called_commands_expected = ['create']
|
||||
|
||||
def test_delete(self):
|
||||
def _fake_remove(*args, **kwargs):
|
||||
self.called_commands_actual.append('remove')
|
||||
|
||||
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
|
||||
self.store.delete(Location('test_rbd_store', StoreLocation,
|
||||
self.location.get_uri()))
|
||||
self.called_commands_expected = ['remove']
|
||||
|
||||
def test__delete_image(self):
|
||||
def _fake_remove(*args, **kwargs):
|
||||
self.called_commands_actual.append('remove')
|
||||
|
||||
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
|
||||
self.store._delete_image(self.location)
|
||||
self.called_commands_expected = ['remove']
|
||||
|
||||
def test__delete_image_w_snap(self):
|
||||
def _fake_unprotect_snap(*args, **kwargs):
|
||||
self.called_commands_actual.append('unprotect_snap')
|
||||
|
||||
def _fake_remove_snap(*args, **kwargs):
|
||||
self.called_commands_actual.append('remove_snap')
|
||||
|
||||
def _fake_remove(*args, **kwargs):
|
||||
self.called_commands_actual.append('remove')
|
||||
|
||||
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
|
||||
self.stubs.Set(mock_rbd.Image, 'unprotect_snap', _fake_unprotect_snap)
|
||||
self.stubs.Set(mock_rbd.Image, 'remove_snap', _fake_remove_snap)
|
||||
self.store._delete_image(self.location, snapshot_name='snap')
|
||||
|
||||
self.called_commands_expected = ['unprotect_snap', 'remove_snap',
|
||||
'remove']
|
||||
|
||||
def test__delete_image_w_snap_exc_image_not_found(self):
|
||||
def _fake_unprotect_snap(*args, **kwargs):
|
||||
self.called_commands_actual.append('unprotect_snap')
|
||||
raise mock_rbd.ImageNotFound()
|
||||
|
||||
self.stubs.Set(mock_rbd.Image, 'unprotect_snap', _fake_unprotect_snap)
|
||||
self.assertRaises(exception.NotFound, self.store._delete_image,
|
||||
self.location, snapshot_name='snap')
|
||||
|
||||
self.called_commands_expected = ['unprotect_snap']
|
||||
|
||||
def test__delete_image_exc_image_not_found(self):
|
||||
def _fake_remove(*args, **kwargs):
|
||||
self.called_commands_actual.append('remove')
|
||||
raise mock_rbd.ImageNotFound()
|
||||
|
||||
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
|
||||
self.assertRaises(exception.NotFound, self.store._delete_image,
|
||||
self.location, snapshot_name='snap')
|
||||
|
||||
self.called_commands_expected = ['remove']
|
||||
|
||||
def test_image_size_exceeded_exception(self):
|
||||
def _fake_write(*args, **kwargs):
|
||||
if 'write' not in self.called_commands_actual:
|
||||
self.called_commands_actual.append('write')
|
||||
raise exception.ImageSizeLimitExceeded
|
||||
|
||||
def _fake_delete_image(*args, **kwargs):
|
||||
self.called_commands_actual.append('delete')
|
||||
|
||||
self.stubs.Set(mock_rbd.Image, 'write', _fake_write)
|
||||
self.stubs.Set(self.store, '_delete_image', _fake_delete_image)
|
||||
data = utils.LimitingReader(self.data_iter, self.data_len)
|
||||
self.assertRaises(exception.ImageSizeLimitExceeded,
|
||||
self.store.add, 'fake_image_id',
|
||||
data, self.data_len + 1)
|
||||
|
||||
self.called_commands_expected = ['write', 'delete']
|
||||
|
||||
def tearDown(self):
|
||||
self.assertEqual(self.called_commands_actual,
|
||||
self.called_commands_expected)
|
||||
super(TestStore, self).tearDown()
|
413
tests/unit/test_s3_store.py
Normal file
413
tests/unit/test_s3_store.py
Normal file
@ -0,0 +1,413 @@
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Tests the S3 backend store"""
|
||||
|
||||
import hashlib
|
||||
import StringIO
|
||||
import uuid
|
||||
|
||||
import boto.s3.connection
|
||||
import stubout
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance.openstack.common import units
|
||||
|
||||
from glance.store.location import get_location_from_uri
|
||||
import glance.store.s3
|
||||
from glance.store.s3 import Store, get_s3_location
|
||||
from glance.store import UnsupportedBackend
|
||||
from glance.tests.unit import base
|
||||
|
||||
|
||||
FAKE_UUID = str(uuid.uuid4())
|
||||
|
||||
FIVE_KB = 5 * units.Ki
|
||||
S3_CONF = {'verbose': True,
|
||||
'debug': True,
|
||||
'default_store': 's3',
|
||||
's3_store_access_key': 'user',
|
||||
's3_store_secret_key': 'key',
|
||||
's3_store_host': 'localhost:8080',
|
||||
's3_store_bucket': 'glance'}
|
||||
|
||||
|
||||
# We stub out as little as possible to ensure that the code paths
|
||||
# between glance.store.s3 and boto.s3.connection are tested
|
||||
# thoroughly
|
||||
def stub_out_s3(stubs):
|
||||
|
||||
class FakeKey:
|
||||
"""
|
||||
Acts like a ``boto.s3.key.Key``
|
||||
"""
|
||||
def __init__(self, bucket, name):
|
||||
self.bucket = bucket
|
||||
self.name = name
|
||||
self.data = None
|
||||
self.size = 0
|
||||
self.BufferSize = 1024
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def exists(self):
|
||||
return self.bucket.exists(self.name)
|
||||
|
||||
def delete(self):
|
||||
self.bucket.delete(self.name)
|
||||
|
||||
def compute_md5(self, data):
|
||||
chunk = data.read(self.BufferSize)
|
||||
checksum = hashlib.md5()
|
||||
while chunk:
|
||||
checksum.update(chunk)
|
||||
chunk = data.read(self.BufferSize)
|
||||
checksum_hex = checksum.hexdigest()
|
||||
return checksum_hex, None
|
||||
|
||||
def set_contents_from_file(self, fp, replace=False, **kwargs):
|
||||
self.data = StringIO.StringIO()
|
||||
for bytes in fp:
|
||||
self.data.write(bytes)
|
||||
self.size = self.data.len
|
||||
# Reset the buffer to start
|
||||
self.data.seek(0)
|
||||
self.read = self.data.read
|
||||
|
||||
def get_file(self):
|
||||
return self.data
|
||||
|
||||
class FakeBucket:
|
||||
"""
|
||||
Acts like a ``boto.s3.bucket.Bucket``
|
||||
"""
|
||||
def __init__(self, name, keys=None):
|
||||
self.name = name
|
||||
self.keys = keys or {}
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def exists(self, key):
|
||||
return key in self.keys
|
||||
|
||||
def delete(self, key):
|
||||
del self.keys[key]
|
||||
|
||||
def get_key(self, key_name, **kwargs):
|
||||
key = self.keys.get(key_name)
|
||||
if not key:
|
||||
return FakeKey(self, key_name)
|
||||
return key
|
||||
|
||||
def new_key(self, key_name):
|
||||
new_key = FakeKey(self, key_name)
|
||||
self.keys[key_name] = new_key
|
||||
return new_key
|
||||
|
||||
fixture_buckets = {'glance': FakeBucket('glance')}
|
||||
b = fixture_buckets['glance']
|
||||
k = b.new_key(FAKE_UUID)
|
||||
k.set_contents_from_file(StringIO.StringIO("*" * FIVE_KB))
|
||||
|
||||
def fake_connection_constructor(self, *args, **kwargs):
|
||||
host = kwargs.get('host')
|
||||
if host.startswith('http://') or host.startswith('https://'):
|
||||
raise UnsupportedBackend(host)
|
||||
|
||||
def fake_get_bucket(conn, bucket_id):
|
||||
bucket = fixture_buckets.get(bucket_id)
|
||||
if not bucket:
|
||||
bucket = FakeBucket(bucket_id)
|
||||
return bucket
|
||||
|
||||
stubs.Set(boto.s3.connection.S3Connection,
|
||||
'__init__', fake_connection_constructor)
|
||||
stubs.Set(boto.s3.connection.S3Connection,
|
||||
'get_bucket', fake_get_bucket)
|
||||
|
||||
|
||||
def format_s3_location(user, key, authurl, bucket, obj):
|
||||
"""
|
||||
Helper method that returns a S3 store URI given
|
||||
the component pieces.
|
||||
"""
|
||||
scheme = 's3'
|
||||
if authurl.startswith('https://'):
|
||||
scheme = 's3+https'
|
||||
authurl = authurl[8:]
|
||||
elif authurl.startswith('http://'):
|
||||
authurl = authurl[7:]
|
||||
authurl = authurl.strip('/')
|
||||
return "%s://%s:%s@%s/%s/%s" % (scheme, user, key, authurl,
|
||||
bucket, obj)
|
||||
|
||||
|
||||
class TestStore(base.StoreClearingUnitTest):
|
||||
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment"""
|
||||
self.config(**S3_CONF)
|
||||
super(TestStore, self).setUp()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
stub_out_s3(self.stubs)
|
||||
self.store = Store()
|
||||
self.addCleanup(self.stubs.UnsetAll)
|
||||
|
||||
def test_get(self):
|
||||
"""Test a "normal" retrieval of an image in chunks"""
|
||||
loc = get_location_from_uri(
|
||||
"s3://user:key@auth_address/glance/%s" % FAKE_UUID)
|
||||
(image_s3, image_size) = self.store.get(loc)
|
||||
|
||||
self.assertEqual(image_size, FIVE_KB)
|
||||
|
||||
expected_data = "*" * FIVE_KB
|
||||
data = ""
|
||||
|
||||
for chunk in image_s3:
|
||||
data += chunk
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
def test_get_calling_format_path(self):
|
||||
"""Test a "normal" retrieval of an image in chunks"""
|
||||
self.config(s3_store_bucket_url_format='path')
|
||||
|
||||
def fake_S3Connection_init(*args, **kwargs):
|
||||
expected_cls = boto.s3.connection.OrdinaryCallingFormat
|
||||
self.assertIsInstance(kwargs.get('calling_format'), expected_cls)
|
||||
|
||||
self.stubs.Set(boto.s3.connection.S3Connection, '__init__',
|
||||
fake_S3Connection_init)
|
||||
|
||||
loc = get_location_from_uri(
|
||||
"s3://user:key@auth_address/glance/%s" % FAKE_UUID)
|
||||
(image_s3, image_size) = self.store.get(loc)
|
||||
|
||||
def test_get_calling_format_default(self):
|
||||
"""Test a "normal" retrieval of an image in chunks"""
|
||||
|
||||
def fake_S3Connection_init(*args, **kwargs):
|
||||
expected_cls = boto.s3.connection.SubdomainCallingFormat
|
||||
self.assertIsInstance(kwargs.get('calling_format'), expected_cls)
|
||||
|
||||
self.stubs.Set(boto.s3.connection.S3Connection, '__init__',
|
||||
fake_S3Connection_init)
|
||||
|
||||
loc = get_location_from_uri(
|
||||
"s3://user:key@auth_address/glance/%s" % FAKE_UUID)
|
||||
(image_s3, image_size) = self.store.get(loc)
|
||||
|
||||
def test_get_non_existing(self):
|
||||
"""
|
||||
Test that trying to retrieve a s3 that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
uri = "s3://user:key@auth_address/badbucket/%s" % FAKE_UUID
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.NotFound, self.store.get, loc)
|
||||
|
||||
uri = "s3://user:key@auth_address/glance/noexist"
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.NotFound, self.store.get, loc)
|
||||
|
||||
def test_add(self):
|
||||
"""Test that we can add an image via the s3 backend"""
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_s3_size = FIVE_KB
|
||||
expected_s3_contents = "*" * expected_s3_size
|
||||
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
|
||||
expected_location = format_s3_location(
|
||||
S3_CONF['s3_store_access_key'],
|
||||
S3_CONF['s3_store_secret_key'],
|
||||
S3_CONF['s3_store_host'],
|
||||
S3_CONF['s3_store_bucket'],
|
||||
expected_image_id)
|
||||
image_s3 = StringIO.StringIO(expected_s3_contents)
|
||||
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_s3,
|
||||
expected_s3_size)
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_s3_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_s3, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = StringIO.StringIO()
|
||||
for chunk in new_image_s3:
|
||||
new_image_contents.write(chunk)
|
||||
new_image_s3_size = new_image_contents.len
|
||||
|
||||
self.assertEqual(expected_s3_contents, new_image_contents.getvalue())
|
||||
self.assertEqual(expected_s3_size, new_image_s3_size)
|
||||
|
||||
def test_add_host_variations(self):
|
||||
"""
|
||||
Test that having http(s):// in the s3serviceurl in config
|
||||
options works as expected.
|
||||
"""
|
||||
variations = ['http://localhost:80',
|
||||
'http://localhost',
|
||||
'http://localhost/v1',
|
||||
'http://localhost/v1/',
|
||||
'https://localhost',
|
||||
'https://localhost:8080',
|
||||
'https://localhost/v1',
|
||||
'https://localhost/v1/',
|
||||
'localhost',
|
||||
'localhost:8080/v1']
|
||||
for variation in variations:
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_s3_size = FIVE_KB
|
||||
expected_s3_contents = "*" * expected_s3_size
|
||||
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
|
||||
new_conf = S3_CONF.copy()
|
||||
new_conf['s3_store_host'] = variation
|
||||
expected_location = format_s3_location(
|
||||
new_conf['s3_store_access_key'],
|
||||
new_conf['s3_store_secret_key'],
|
||||
new_conf['s3_store_host'],
|
||||
new_conf['s3_store_bucket'],
|
||||
expected_image_id)
|
||||
image_s3 = StringIO.StringIO(expected_s3_contents)
|
||||
|
||||
self.config(**new_conf)
|
||||
self.store = Store()
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_s3,
|
||||
expected_s3_size)
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_s3_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_s3, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = new_image_s3.getvalue()
|
||||
new_image_s3_size = len(new_image_s3)
|
||||
|
||||
self.assertEqual(expected_s3_contents, new_image_contents)
|
||||
self.assertEqual(expected_s3_size, new_image_s3_size)
|
||||
|
||||
def test_add_already_existing(self):
|
||||
"""
|
||||
Tests that adding an image with an existing identifier
|
||||
raises an appropriate exception
|
||||
"""
|
||||
image_s3 = StringIO.StringIO("nevergonnamakeit")
|
||||
self.assertRaises(exception.Duplicate,
|
||||
self.store.add,
|
||||
FAKE_UUID, image_s3, 0)
|
||||
|
||||
def _option_required(self, key):
|
||||
conf = S3_CONF.copy()
|
||||
conf[key] = None
|
||||
|
||||
try:
|
||||
self.config(**conf)
|
||||
self.store = Store()
|
||||
return self.store.add == self.store.add_disabled
|
||||
except Exception:
|
||||
return False
|
||||
return False
|
||||
|
||||
def test_no_access_key(self):
|
||||
"""
|
||||
Tests that options without access key disables the add method
|
||||
"""
|
||||
self.assertTrue(self._option_required('s3_store_access_key'))
|
||||
|
||||
def test_no_secret_key(self):
|
||||
"""
|
||||
Tests that options without secret key disables the add method
|
||||
"""
|
||||
self.assertTrue(self._option_required('s3_store_secret_key'))
|
||||
|
||||
def test_no_host(self):
|
||||
"""
|
||||
Tests that options without host disables the add method
|
||||
"""
|
||||
self.assertTrue(self._option_required('s3_store_host'))
|
||||
|
||||
def test_delete(self):
|
||||
"""
|
||||
Test we can delete an existing image in the s3 store
|
||||
"""
|
||||
uri = "s3://user:key@auth_address/glance/%s" % FAKE_UUID
|
||||
loc = get_location_from_uri(uri)
|
||||
self.store.delete(loc)
|
||||
|
||||
self.assertRaises(exception.NotFound, self.store.get, loc)
|
||||
|
||||
def test_delete_non_existing(self):
|
||||
"""
|
||||
Test that trying to delete a s3 that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
uri = "s3://user:key@auth_address/glance/noexist"
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.NotFound, self.store.delete, loc)
|
||||
|
||||
def _do_test_get_s3_location(self, host, loc):
|
||||
self.assertEqual(get_s3_location(host), loc)
|
||||
self.assertEqual(get_s3_location(host + ':80'), loc)
|
||||
self.assertEqual(get_s3_location('http://' + host), loc)
|
||||
self.assertEqual(get_s3_location('http://' + host + ':80'), loc)
|
||||
self.assertEqual(get_s3_location('https://' + host), loc)
|
||||
self.assertEqual(get_s3_location('https://' + host + ':80'), loc)
|
||||
|
||||
def test_get_s3_good_location(self):
|
||||
"""
|
||||
Test that the s3 location can be derived from the host
|
||||
"""
|
||||
good_locations = [
|
||||
('s3.amazonaws.com', ''),
|
||||
('s3-eu-west-1.amazonaws.com', 'EU'),
|
||||
('s3-us-west-1.amazonaws.com', 'us-west-1'),
|
||||
('s3-ap-southeast-1.amazonaws.com', 'ap-southeast-1'),
|
||||
('s3-ap-northeast-1.amazonaws.com', 'ap-northeast-1'),
|
||||
]
|
||||
for (url, expected) in good_locations:
|
||||
self._do_test_get_s3_location(url, expected)
|
||||
|
||||
def test_get_s3_bad_location(self):
|
||||
"""
|
||||
Test that the s3 location cannot be derived from an unexpected host
|
||||
"""
|
||||
bad_locations = [
|
||||
('', ''),
|
||||
('s3.amazon.co.uk', ''),
|
||||
('s3-govcloud.amazonaws.com', ''),
|
||||
('cloudfiles.rackspace.com', ''),
|
||||
]
|
||||
for (url, expected) in bad_locations:
|
||||
self._do_test_get_s3_location(url, expected)
|
||||
|
||||
def test_calling_format_path(self):
|
||||
self.config(s3_store_bucket_url_format='path')
|
||||
self.assertIsInstance(glance.store.s3.get_calling_format(),
|
||||
boto.s3.connection.OrdinaryCallingFormat)
|
||||
|
||||
def test_calling_format_subdomain(self):
|
||||
self.config(s3_store_bucket_url_format='subdomain')
|
||||
self.assertIsInstance(glance.store.s3.get_calling_format(),
|
||||
boto.s3.connection.SubdomainCallingFormat)
|
||||
|
||||
def test_calling_format_default(self):
|
||||
self.assertIsInstance(glance.store.s3.get_calling_format(),
|
||||
boto.s3.connection.SubdomainCallingFormat)
|
60
tests/unit/test_sheepdog_store.py
Normal file
60
tests/unit/test_sheepdog_store.py
Normal file
@ -0,0 +1,60 @@
|
||||
# Copyright 2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import StringIO
|
||||
|
||||
import stubout
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance.store.common import utils
|
||||
from glance.openstack.common import processutils
|
||||
import glance.store.sheepdog
|
||||
from glance.store.sheepdog import Store
|
||||
from glance.tests.unit import base
|
||||
|
||||
|
||||
SHEEPDOG_CONF = {'verbose': True,
|
||||
'debug': True,
|
||||
'default_store': 'sheepdog'}
|
||||
|
||||
|
||||
class TestStore(base.StoreClearingUnitTest):
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment"""
|
||||
def _fake_execute(*cmd, **kwargs):
|
||||
pass
|
||||
|
||||
self.config(**SHEEPDOG_CONF)
|
||||
super(TestStore, self).setUp()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
self.stubs.Set(processutils, 'execute', _fake_execute)
|
||||
self.store = Store()
|
||||
self.addCleanup(self.stubs.UnsetAll)
|
||||
|
||||
def test_cleanup_when_add_image_exception(self):
|
||||
called_commands = []
|
||||
|
||||
def _fake_run_command(self, command, data, *params):
|
||||
called_commands.append(command)
|
||||
|
||||
self.stubs.Set(glance.store.sheepdog.SheepdogImage,
|
||||
'_run_command', _fake_run_command)
|
||||
|
||||
self.assertRaises(exception.ImageSizeLimitExceeded,
|
||||
self.store.add,
|
||||
'fake_image_id',
|
||||
utils.LimitingReader(StringIO.StringIO('xx'), 1),
|
||||
2)
|
||||
self.assertEqual(called_commands, ['list -r', 'create', 'delete'])
|
59
tests/unit/test_store_base.py
Normal file
59
tests/unit/test_store_base.py
Normal file
@ -0,0 +1,59 @@
|
||||
# Copyright 2011-2013 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from glance.store.common import exception
|
||||
from glance import store
|
||||
from glance.store import base as store_base
|
||||
from glance.tests.unit import base as test_base
|
||||
|
||||
|
||||
class FakeUnconfigurableStoreDriver(store_base.Store):
|
||||
def configure(self):
|
||||
raise exception.BadStoreConfiguration("Unconfigurable store driver.")
|
||||
|
||||
|
||||
class TestStoreBase(test_base.StoreClearingUnitTest):
|
||||
|
||||
def setUp(self):
|
||||
self.config(default_store='file')
|
||||
super(TestStoreBase, self).setUp()
|
||||
|
||||
def test_exception_to_unicode(self):
|
||||
class FakeException(Exception):
|
||||
def __str__(self):
|
||||
raise UnicodeError()
|
||||
|
||||
exc = Exception('error message')
|
||||
ret = store_base._exception_to_unicode(exc)
|
||||
self.assertIsInstance(ret, unicode)
|
||||
self.assertEqual(ret, 'error message')
|
||||
|
||||
exc = Exception('\xa5 error message')
|
||||
ret = store_base._exception_to_unicode(exc)
|
||||
self.assertIsInstance(ret, unicode)
|
||||
self.assertEqual(ret, ' error message')
|
||||
|
||||
exc = FakeException('\xa5 error message')
|
||||
ret = store_base._exception_to_unicode(exc)
|
||||
self.assertIsInstance(ret, unicode)
|
||||
self.assertEqual(ret, _("Caught '%(exception)s' exception.") %
|
||||
{'exception': 'FakeException'})
|
||||
|
||||
def test_create_store_exclude_unconfigurable_drivers(self):
|
||||
self.config(known_stores=[
|
||||
"glance.tests.unit.test_store_base.FakeUnconfigurableStoreDriver",
|
||||
"glance.store.filesystem.Store"])
|
||||
count = store.create_stores()
|
||||
self.assertEqual(count, 1)
|
977
tests/unit/test_swift_store.py
Normal file
977
tests/unit/test_swift_store.py
Normal file
@ -0,0 +1,977 @@
|
||||
# Copyright 2011 OpenStack Foundation
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""Tests the Swift backend store"""
|
||||
|
||||
import hashlib
|
||||
import httplib
|
||||
import mock
|
||||
import StringIO
|
||||
import tempfile
|
||||
import urllib
|
||||
import uuid
|
||||
|
||||
from oslo.config import cfg
|
||||
import stubout
|
||||
import swiftclient
|
||||
|
||||
import glance.store.common.auth
|
||||
from glance.store.common import exception
|
||||
from glance.openstack.common import units
|
||||
|
||||
from glance.store import BackendException
|
||||
from glance.store.location import get_location_from_uri
|
||||
import glance.store.swift
|
||||
from glance.tests.unit import base
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
FAKE_UUID = lambda: str(uuid.uuid4())
|
||||
|
||||
Store = glance.store.swift.Store
|
||||
FIVE_KB = 5 * units.Ki
|
||||
FIVE_GB = 5 * units.Gi
|
||||
MAX_SWIFT_OBJECT_SIZE = FIVE_GB
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
SWIFT_CONF = {'verbose': True,
|
||||
'debug': True,
|
||||
'known_stores': ['glance.store.swift.Store'],
|
||||
'default_store': 'swift',
|
||||
'swift_store_user': 'user',
|
||||
'swift_store_key': 'key',
|
||||
'swift_store_auth_address': 'localhost:8080',
|
||||
'swift_store_container': 'glance'}
|
||||
|
||||
|
||||
# We stub out as little as possible to ensure that the code paths
|
||||
# between glance.store.swift and swiftclient are tested
|
||||
# thoroughly
|
||||
def stub_out_swiftclient(stubs, swift_store_auth_version):
|
||||
fixture_containers = ['glance']
|
||||
fixture_container_headers = {}
|
||||
fixture_headers = {
|
||||
'glance/%s' % FAKE_UUID: {
|
||||
'content-length': FIVE_KB,
|
||||
'etag': 'c2e5db72bd7fd153f53ede5da5a06de3'
|
||||
}
|
||||
}
|
||||
fixture_objects = {'glance/%s' % FAKE_UUID:
|
||||
StringIO.StringIO("*" * FIVE_KB)}
|
||||
|
||||
def fake_head_container(url, token, container, **kwargs):
|
||||
if container not in fixture_containers:
|
||||
msg = "No container %s found" % container
|
||||
raise swiftclient.ClientException(msg,
|
||||
http_status=httplib.NOT_FOUND)
|
||||
return fixture_container_headers
|
||||
|
||||
def fake_put_container(url, token, container, **kwargs):
|
||||
fixture_containers.append(container)
|
||||
|
||||
def fake_post_container(url, token, container, headers, http_conn=None):
|
||||
for key, value in headers.iteritems():
|
||||
fixture_container_headers[key] = value
|
||||
|
||||
def fake_put_object(url, token, container, name, contents, **kwargs):
|
||||
# PUT returns the ETag header for the newly-added object
|
||||
# Large object manifest...
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS += 1
|
||||
CHUNKSIZE = 64 * units.Ki
|
||||
fixture_key = "%s/%s" % (container, name)
|
||||
if fixture_key not in fixture_headers:
|
||||
if kwargs.get('headers'):
|
||||
etag = kwargs['headers']['ETag']
|
||||
fixture_headers[fixture_key] = {'manifest': True,
|
||||
'etag': etag}
|
||||
return etag
|
||||
if hasattr(contents, 'read'):
|
||||
fixture_object = StringIO.StringIO()
|
||||
chunk = contents.read(CHUNKSIZE)
|
||||
checksum = hashlib.md5()
|
||||
while chunk:
|
||||
fixture_object.write(chunk)
|
||||
checksum.update(chunk)
|
||||
chunk = contents.read(CHUNKSIZE)
|
||||
etag = checksum.hexdigest()
|
||||
else:
|
||||
fixture_object = StringIO.StringIO(contents)
|
||||
etag = hashlib.md5(fixture_object.getvalue()).hexdigest()
|
||||
read_len = fixture_object.len
|
||||
if read_len > MAX_SWIFT_OBJECT_SIZE:
|
||||
msg = ('Image size:%d exceeds Swift max:%d' %
|
||||
(read_len, MAX_SWIFT_OBJECT_SIZE))
|
||||
raise swiftclient.ClientException(
|
||||
msg, http_status=httplib.REQUEST_ENTITY_TOO_LARGE)
|
||||
fixture_objects[fixture_key] = fixture_object
|
||||
fixture_headers[fixture_key] = {
|
||||
'content-length': read_len,
|
||||
'etag': etag}
|
||||
return etag
|
||||
else:
|
||||
msg = ("Object PUT failed - Object with key %s already exists"
|
||||
% fixture_key)
|
||||
raise swiftclient.ClientException(msg,
|
||||
http_status=httplib.CONFLICT)
|
||||
|
||||
def fake_get_object(url, token, container, name, **kwargs):
|
||||
# GET returns the tuple (list of headers, file object)
|
||||
fixture_key = "%s/%s" % (container, name)
|
||||
if fixture_key not in fixture_headers:
|
||||
msg = "Object GET failed"
|
||||
raise swiftclient.ClientException(msg,
|
||||
http_status=httplib.NOT_FOUND)
|
||||
|
||||
fixture = fixture_headers[fixture_key]
|
||||
if 'manifest' in fixture:
|
||||
# Large object manifest... we return a file containing
|
||||
# all objects with prefix of this fixture key
|
||||
chunk_keys = sorted([k for k in fixture_headers.keys()
|
||||
if k.startswith(fixture_key) and
|
||||
k != fixture_key])
|
||||
result = StringIO.StringIO()
|
||||
for key in chunk_keys:
|
||||
result.write(fixture_objects[key].getvalue())
|
||||
return fixture_headers[fixture_key], result
|
||||
|
||||
else:
|
||||
return fixture_headers[fixture_key], fixture_objects[fixture_key]
|
||||
|
||||
def fake_head_object(url, token, container, name, **kwargs):
|
||||
# HEAD returns the list of headers for an object
|
||||
try:
|
||||
fixture_key = "%s/%s" % (container, name)
|
||||
return fixture_headers[fixture_key]
|
||||
except KeyError:
|
||||
msg = "Object HEAD failed - Object does not exist"
|
||||
raise swiftclient.ClientException(msg,
|
||||
http_status=httplib.NOT_FOUND)
|
||||
|
||||
def fake_delete_object(url, token, container, name, **kwargs):
|
||||
# DELETE returns nothing
|
||||
fixture_key = "%s/%s" % (container, name)
|
||||
if fixture_key not in fixture_headers:
|
||||
msg = "Object DELETE failed - Object does not exist"
|
||||
raise swiftclient.ClientException(msg,
|
||||
http_status=httplib.NOT_FOUND)
|
||||
else:
|
||||
del fixture_headers[fixture_key]
|
||||
del fixture_objects[fixture_key]
|
||||
|
||||
def fake_http_connection(*args, **kwargs):
|
||||
return None
|
||||
|
||||
def fake_get_auth(url, user, key, snet, auth_version, **kwargs):
|
||||
if url is None:
|
||||
return None, None
|
||||
if 'http' in url and '://' not in url:
|
||||
raise ValueError('Invalid url %s' % url)
|
||||
# Check the auth version against the configured value
|
||||
if swift_store_auth_version != auth_version:
|
||||
msg = 'AUTHENTICATION failed (version mismatch)'
|
||||
raise swiftclient.ClientException(msg)
|
||||
return None, None
|
||||
|
||||
stubs.Set(swiftclient.client,
|
||||
'head_container', fake_head_container)
|
||||
stubs.Set(swiftclient.client,
|
||||
'put_container', fake_put_container)
|
||||
stubs.Set(swiftclient.client,
|
||||
'post_container', fake_post_container)
|
||||
stubs.Set(swiftclient.client,
|
||||
'put_object', fake_put_object)
|
||||
stubs.Set(swiftclient.client,
|
||||
'delete_object', fake_delete_object)
|
||||
stubs.Set(swiftclient.client,
|
||||
'head_object', fake_head_object)
|
||||
stubs.Set(swiftclient.client,
|
||||
'get_object', fake_get_object)
|
||||
stubs.Set(swiftclient.client,
|
||||
'get_auth', fake_get_auth)
|
||||
stubs.Set(swiftclient.client,
|
||||
'http_connection', fake_http_connection)
|
||||
|
||||
|
||||
class SwiftTests(object):
|
||||
|
||||
@property
|
||||
def swift_store_user(self):
|
||||
return urllib.quote(CONF.swift_store_user)
|
||||
|
||||
def test_get_size(self):
|
||||
"""
|
||||
Test that we can get the size of an object in the swift store
|
||||
"""
|
||||
uri = "swift://%s:key@auth_address/glance/%s" % (
|
||||
self.swift_store_user, FAKE_UUID)
|
||||
loc = get_location_from_uri(uri)
|
||||
image_size = self.store.get_size(loc)
|
||||
self.assertEqual(image_size, 5120)
|
||||
|
||||
def test_get_size_with_multi_tenant_on(self):
|
||||
"""Test that single tenant uris work with multi tenant on."""
|
||||
uri = ("swift://%s:key@auth_address/glance/%s" %
|
||||
(self.swift_store_user, FAKE_UUID))
|
||||
self.config(swift_store_multi_tenant=True)
|
||||
#NOTE(markwash): ensure the image is found
|
||||
context = glance.context.RequestContext()
|
||||
size = glance.store.get_size_from_backend(context, uri)
|
||||
self.assertEqual(size, 5120)
|
||||
|
||||
def test_get(self):
|
||||
"""Test a "normal" retrieval of an image in chunks"""
|
||||
uri = "swift://%s:key@auth_address/glance/%s" % (
|
||||
self.swift_store_user, FAKE_UUID)
|
||||
loc = get_location_from_uri(uri)
|
||||
(image_swift, image_size) = self.store.get(loc)
|
||||
self.assertEqual(image_size, 5120)
|
||||
|
||||
expected_data = "*" * FIVE_KB
|
||||
data = ""
|
||||
|
||||
for chunk in image_swift:
|
||||
data += chunk
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
def test_get_with_http_auth(self):
|
||||
"""
|
||||
Test a retrieval from Swift with an HTTP authurl. This is
|
||||
specified either via a Location header with swift+http:// or using
|
||||
http:// in the swift_store_auth_address config value
|
||||
"""
|
||||
loc = get_location_from_uri("swift+http://%s:key@auth_address/"
|
||||
"glance/%s" %
|
||||
(self.swift_store_user, FAKE_UUID))
|
||||
(image_swift, image_size) = self.store.get(loc)
|
||||
self.assertEqual(image_size, 5120)
|
||||
|
||||
expected_data = "*" * FIVE_KB
|
||||
data = ""
|
||||
|
||||
for chunk in image_swift:
|
||||
data += chunk
|
||||
self.assertEqual(expected_data, data)
|
||||
|
||||
def test_get_non_existing(self):
|
||||
"""
|
||||
Test that trying to retrieve a swift that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
loc = get_location_from_uri("swift://%s:key@authurl/glance/noexist" % (
|
||||
self.swift_store_user))
|
||||
self.assertRaises(exception.NotFound,
|
||||
self.store.get,
|
||||
loc)
|
||||
|
||||
def test_add(self):
|
||||
"""Test that we can add an image via the swift backend"""
|
||||
expected_swift_size = FIVE_KB
|
||||
expected_swift_contents = "*" * expected_swift_size
|
||||
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
loc = 'swift+https://%s:key@localhost:8080/glance/%s'
|
||||
expected_location = loc % (self.swift_store_user,
|
||||
expected_image_id)
|
||||
image_swift = StringIO.StringIO(expected_swift_contents)
|
||||
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_swift,
|
||||
expected_swift_size)
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_swift_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
# Expecting a single object to be created on Swift i.e. no chunking.
|
||||
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_swift, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = new_image_swift.getvalue()
|
||||
new_image_swift_size = len(new_image_swift)
|
||||
|
||||
self.assertEqual(expected_swift_contents, new_image_contents)
|
||||
self.assertEqual(expected_swift_size, new_image_swift_size)
|
||||
|
||||
def test_add_auth_url_variations(self):
|
||||
"""
|
||||
Test that we can add an image via the swift backend with
|
||||
a variety of different auth_address values
|
||||
"""
|
||||
variations = {
|
||||
'http://localhost:80': 'swift+http://%s:key@localhost:80'
|
||||
'/glance/%s',
|
||||
'http://localhost': 'swift+http://%s:key@localhost/glance/%s',
|
||||
'http://localhost/v1': 'swift+http://%s:key@localhost'
|
||||
'/v1/glance/%s',
|
||||
'http://localhost/v1/': 'swift+http://%s:key@localhost'
|
||||
'/v1/glance/%s',
|
||||
'https://localhost': 'swift+https://%s:key@localhost/glance/%s',
|
||||
'https://localhost:8080': 'swift+https://%s:key@localhost:8080'
|
||||
'/glance/%s',
|
||||
'https://localhost/v1': 'swift+https://%s:key@localhost'
|
||||
'/v1/glance/%s',
|
||||
'https://localhost/v1/': 'swift+https://%s:key@localhost'
|
||||
'/v1/glance/%s',
|
||||
'localhost': 'swift+https://%s:key@localhost/glance/%s',
|
||||
'localhost:8080/v1': 'swift+https://%s:key@localhost:8080'
|
||||
'/v1/glance/%s',
|
||||
}
|
||||
|
||||
for variation, expected_location in variations.items():
|
||||
image_id = str(uuid.uuid4())
|
||||
expected_location = expected_location % (
|
||||
self.swift_store_user, image_id)
|
||||
expected_swift_size = FIVE_KB
|
||||
expected_swift_contents = "*" * expected_swift_size
|
||||
expected_checksum = \
|
||||
hashlib.md5(expected_swift_contents).hexdigest()
|
||||
|
||||
image_swift = StringIO.StringIO(expected_swift_contents)
|
||||
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
|
||||
self.config(swift_store_auth_address=variation)
|
||||
self.store = Store()
|
||||
location, size, checksum, _ = self.store.add(image_id, image_swift,
|
||||
expected_swift_size)
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_swift_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_swift, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = new_image_swift.getvalue()
|
||||
new_image_swift_size = len(new_image_swift)
|
||||
|
||||
self.assertEqual(expected_swift_contents, new_image_contents)
|
||||
self.assertEqual(expected_swift_size, new_image_swift_size)
|
||||
|
||||
def test_add_no_container_no_create(self):
|
||||
"""
|
||||
Tests that adding an image with a non-existing container
|
||||
raises an appropriate exception
|
||||
"""
|
||||
self.config(swift_store_create_container_on_put=False,
|
||||
swift_store_container='noexist')
|
||||
self.store = Store()
|
||||
|
||||
image_swift = StringIO.StringIO("nevergonnamakeit")
|
||||
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
|
||||
# We check the exception text to ensure the container
|
||||
# missing text is found in it, otherwise, we would have
|
||||
# simply used self.assertRaises here
|
||||
exception_caught = False
|
||||
try:
|
||||
self.store.add(str(uuid.uuid4()), image_swift, 0)
|
||||
except BackendException as e:
|
||||
exception_caught = True
|
||||
self.assertTrue("container noexist does not exist "
|
||||
"in Swift" in str(e))
|
||||
self.assertTrue(exception_caught)
|
||||
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 0)
|
||||
|
||||
def test_add_no_container_and_create(self):
|
||||
"""
|
||||
Tests that adding an image with a non-existing container
|
||||
creates the container automatically if flag is set
|
||||
"""
|
||||
expected_swift_size = FIVE_KB
|
||||
expected_swift_contents = "*" * expected_swift_size
|
||||
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
loc = 'swift+https://%s:key@localhost:8080/noexist/%s'
|
||||
expected_location = loc % (self.swift_store_user,
|
||||
expected_image_id)
|
||||
image_swift = StringIO.StringIO(expected_swift_contents)
|
||||
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
|
||||
self.config(swift_store_create_container_on_put=True,
|
||||
swift_store_container='noexist')
|
||||
self.store = Store()
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_swift,
|
||||
expected_swift_size)
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_swift_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 1)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_swift, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = new_image_swift.getvalue()
|
||||
new_image_swift_size = len(new_image_swift)
|
||||
|
||||
self.assertEqual(expected_swift_contents, new_image_contents)
|
||||
self.assertEqual(expected_swift_size, new_image_swift_size)
|
||||
|
||||
def test_add_large_object(self):
|
||||
"""
|
||||
Tests that adding a very large image. We simulate the large
|
||||
object by setting store.large_object_size to a small number
|
||||
and then verify that there have been a number of calls to
|
||||
put_object()...
|
||||
"""
|
||||
expected_swift_size = FIVE_KB
|
||||
expected_swift_contents = "*" * expected_swift_size
|
||||
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
loc = 'swift+https://%s:key@localhost:8080/glance/%s'
|
||||
expected_location = loc % (self.swift_store_user,
|
||||
expected_image_id)
|
||||
image_swift = StringIO.StringIO(expected_swift_contents)
|
||||
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
|
||||
self.config(swift_store_container='glance')
|
||||
self.store = Store()
|
||||
orig_max_size = self.store.large_object_size
|
||||
orig_temp_size = self.store.large_object_chunk_size
|
||||
try:
|
||||
self.store.large_object_size = 1024
|
||||
self.store.large_object_chunk_size = 1024
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_swift,
|
||||
expected_swift_size)
|
||||
finally:
|
||||
self.store.large_object_chunk_size = orig_temp_size
|
||||
self.store.large_object_size = orig_max_size
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_swift_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
# Expecting 6 objects to be created on Swift -- 5 chunks and 1
|
||||
# manifest.
|
||||
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 6)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_swift, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = new_image_swift.getvalue()
|
||||
new_image_swift_size = len(new_image_swift)
|
||||
|
||||
self.assertEqual(expected_swift_contents, new_image_contents)
|
||||
self.assertEqual(expected_swift_size, new_image_swift_size)
|
||||
|
||||
def test_add_large_object_zero_size(self):
|
||||
"""
|
||||
Tests that adding an image to Swift which has both an unknown size and
|
||||
exceeds Swift's maximum limit of 5GB is correctly uploaded.
|
||||
|
||||
We avoid the overhead of creating a 5GB object for this test by
|
||||
temporarily setting MAX_SWIFT_OBJECT_SIZE to 1KB, and then adding
|
||||
an object of 5KB.
|
||||
|
||||
Bug lp:891738
|
||||
"""
|
||||
# Set up a 'large' image of 5KB
|
||||
expected_swift_size = FIVE_KB
|
||||
expected_swift_contents = "*" * expected_swift_size
|
||||
expected_checksum = hashlib.md5(expected_swift_contents).hexdigest()
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
loc = 'swift+https://%s:key@localhost:8080/glance/%s'
|
||||
expected_location = loc % (self.swift_store_user,
|
||||
expected_image_id)
|
||||
image_swift = StringIO.StringIO(expected_swift_contents)
|
||||
|
||||
global SWIFT_PUT_OBJECT_CALLS
|
||||
SWIFT_PUT_OBJECT_CALLS = 0
|
||||
|
||||
# Temporarily set Swift MAX_SWIFT_OBJECT_SIZE to 1KB and add our image,
|
||||
# explicitly setting the image_length to 0
|
||||
self.config(swift_store_container='glance')
|
||||
self.store = Store()
|
||||
orig_max_size = self.store.large_object_size
|
||||
orig_temp_size = self.store.large_object_chunk_size
|
||||
global MAX_SWIFT_OBJECT_SIZE
|
||||
orig_max_swift_object_size = MAX_SWIFT_OBJECT_SIZE
|
||||
try:
|
||||
MAX_SWIFT_OBJECT_SIZE = 1024
|
||||
self.store.large_object_size = 1024
|
||||
self.store.large_object_chunk_size = 1024
|
||||
location, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_swift, 0)
|
||||
finally:
|
||||
self.store.large_object_chunk_size = orig_temp_size
|
||||
self.store.large_object_size = orig_max_size
|
||||
MAX_SWIFT_OBJECT_SIZE = orig_max_swift_object_size
|
||||
|
||||
self.assertEqual(expected_location, location)
|
||||
self.assertEqual(expected_swift_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
# Expecting 7 calls to put_object -- 5 chunks, a zero chunk which is
|
||||
# then deleted, and the manifest. Note the difference with above
|
||||
# where the image_size is specified in advance (there's no zero chunk
|
||||
# in that case).
|
||||
self.assertEqual(SWIFT_PUT_OBJECT_CALLS, 7)
|
||||
|
||||
loc = get_location_from_uri(expected_location)
|
||||
(new_image_swift, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = new_image_swift.getvalue()
|
||||
new_image_swift_size = len(new_image_swift)
|
||||
|
||||
self.assertEqual(expected_swift_contents, new_image_contents)
|
||||
self.assertEqual(expected_swift_size, new_image_swift_size)
|
||||
|
||||
def test_add_already_existing(self):
|
||||
"""
|
||||
Tests that adding an image with an existing identifier
|
||||
raises an appropriate exception
|
||||
"""
|
||||
image_swift = StringIO.StringIO("nevergonnamakeit")
|
||||
self.assertRaises(exception.Duplicate,
|
||||
self.store.add,
|
||||
FAKE_UUID, image_swift, 0)
|
||||
|
||||
def test_add_saves_and_reraises_and_not_uses_wildcard_raise(self):
|
||||
image_id = str(uuid.uuid4())
|
||||
swift_size = self.store.large_object_size = 1024
|
||||
loc = 'swift+https://%s:key@localhost:8080/glance/%s'
|
||||
swift_contents = "*" * swift_size
|
||||
connection = mock.Mock()
|
||||
|
||||
def fake_delete_chunk(connection,
|
||||
container,
|
||||
chunks):
|
||||
try:
|
||||
raise Exception()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
image_swift = StringIO.StringIO(swift_contents)
|
||||
connection.put_object.side_effect = exception.ClientConnectionError
|
||||
self.store._delete_stale_chunks = fake_delete_chunk
|
||||
|
||||
self.assertRaises(exception.ClientConnectionError,
|
||||
self.store.add,
|
||||
image_id,
|
||||
image_swift,
|
||||
swift_size,
|
||||
connection)
|
||||
|
||||
def _option_required(self, key):
|
||||
conf = self.getConfig()
|
||||
conf[key] = None
|
||||
|
||||
try:
|
||||
self.config(**conf)
|
||||
self.store = Store()
|
||||
return self.store.add == self.store.add_disabled
|
||||
except Exception:
|
||||
return False
|
||||
return False
|
||||
|
||||
def test_no_user(self):
|
||||
"""
|
||||
Tests that options without user disables the add method
|
||||
"""
|
||||
self.assertTrue(self._option_required('swift_store_user'))
|
||||
|
||||
def test_no_key(self):
|
||||
"""
|
||||
Tests that options without key disables the add method
|
||||
"""
|
||||
self.assertTrue(self._option_required('swift_store_key'))
|
||||
|
||||
def test_no_auth_address(self):
|
||||
"""
|
||||
Tests that options without auth address disables the add method
|
||||
"""
|
||||
self.assertTrue(self._option_required('swift_store_auth_address'))
|
||||
|
||||
def test_delete(self):
|
||||
"""
|
||||
Test we can delete an existing image in the swift store
|
||||
"""
|
||||
uri = "swift://%s:key@authurl/glance/%s" % (
|
||||
self.swift_store_user, FAKE_UUID)
|
||||
loc = get_location_from_uri(uri)
|
||||
self.store.delete(loc)
|
||||
|
||||
self.assertRaises(exception.NotFound, self.store.get, loc)
|
||||
|
||||
def test_delete_non_existing(self):
|
||||
"""
|
||||
Test that trying to delete a swift that doesn't exist
|
||||
raises an error
|
||||
"""
|
||||
loc = get_location_from_uri("swift://%s:key@authurl/glance/noexist" % (
|
||||
self.swift_store_user))
|
||||
self.assertRaises(exception.NotFound, self.store.delete, loc)
|
||||
|
||||
def test_read_acl_public(self):
|
||||
"""
|
||||
Test that we can set a public read acl.
|
||||
"""
|
||||
self.config(swift_store_multi_tenant=True)
|
||||
context = glance.context.RequestContext()
|
||||
store = Store(context)
|
||||
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
|
||||
loc = get_location_from_uri(uri)
|
||||
store.set_acls(loc, public=True)
|
||||
container_headers = swiftclient.client.head_container('x', 'y',
|
||||
'glance')
|
||||
self.assertEqual(container_headers['X-Container-Read'],
|
||||
".r:*,.rlistings")
|
||||
|
||||
def test_read_acl_tenants(self):
|
||||
"""
|
||||
Test that we can set read acl for tenants.
|
||||
"""
|
||||
self.config(swift_store_multi_tenant=True)
|
||||
context = glance.context.RequestContext()
|
||||
store = Store(context)
|
||||
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
|
||||
loc = get_location_from_uri(uri)
|
||||
read_tenants = ['matt', 'mark']
|
||||
store.set_acls(loc, read_tenants=read_tenants)
|
||||
container_headers = swiftclient.client.head_container('x', 'y',
|
||||
'glance')
|
||||
self.assertEqual(container_headers['X-Container-Read'],
|
||||
'matt:*,mark:*')
|
||||
|
||||
def test_write_acls(self):
|
||||
"""
|
||||
Test that we can set write acl for tenants.
|
||||
"""
|
||||
self.config(swift_store_multi_tenant=True)
|
||||
context = glance.context.RequestContext()
|
||||
store = Store(context)
|
||||
uri = "swift+http://storeurl/glance/%s" % FAKE_UUID
|
||||
loc = get_location_from_uri(uri)
|
||||
read_tenants = ['frank', 'jim']
|
||||
store.set_acls(loc, write_tenants=read_tenants)
|
||||
container_headers = swiftclient.client.head_container('x', 'y',
|
||||
'glance')
|
||||
self.assertEqual(container_headers['X-Container-Write'],
|
||||
'frank:*,jim:*')
|
||||
|
||||
|
||||
class TestStoreAuthV1(base.StoreClearingUnitTest, SwiftTests):
|
||||
|
||||
def getConfig(self):
|
||||
conf = SWIFT_CONF.copy()
|
||||
conf['swift_store_auth_version'] = '1'
|
||||
conf['swift_store_user'] = 'user'
|
||||
return conf
|
||||
|
||||
def setUp(self):
|
||||
"""Establish a clean test environment"""
|
||||
conf = self.getConfig()
|
||||
self.config(**conf)
|
||||
super(TestStoreAuthV1, self).setUp()
|
||||
self.stubs = stubout.StubOutForTesting()
|
||||
stub_out_swiftclient(self.stubs, conf['swift_store_auth_version'])
|
||||
self.store = Store()
|
||||
self.addCleanup(self.stubs.UnsetAll)
|
||||
|
||||
|
||||
class TestStoreAuthV2(TestStoreAuthV1):
|
||||
|
||||
def getConfig(self):
|
||||
conf = super(TestStoreAuthV2, self).getConfig()
|
||||
conf['swift_store_user'] = 'tenant:user'
|
||||
conf['swift_store_auth_version'] = '2'
|
||||
return conf
|
||||
|
||||
def test_v2_with_no_tenant(self):
|
||||
conf = self.getConfig()
|
||||
conf['swift_store_user'] = 'failme'
|
||||
uri = "swift://%s:key@auth_address/glance/%s" % (
|
||||
conf['swift_store_user'], FAKE_UUID)
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertRaises(exception.BadStoreUri,
|
||||
self.store.get,
|
||||
loc)
|
||||
|
||||
def test_v2_multi_tenant_location(self):
|
||||
conf = self.getConfig()
|
||||
conf['swift_store_multi_tenant'] = True
|
||||
uri = "swift://auth_address/glance/%s" % (FAKE_UUID)
|
||||
loc = get_location_from_uri(uri)
|
||||
self.assertEqual('swift', loc.store_name)
|
||||
|
||||
|
||||
class FakeConnection(object):
|
||||
def __init__(self, authurl, user, key, retries=5, preauthurl=None,
|
||||
preauthtoken=None, snet=False, starting_backoff=1,
|
||||
tenant_name=None, os_options={}, auth_version="1",
|
||||
insecure=False, ssl_compression=True):
|
||||
self.authurl = authurl
|
||||
self.user = user
|
||||
self.key = key
|
||||
self.preauthurl = preauthurl
|
||||
self.preauthtoken = preauthtoken
|
||||
self.snet = snet
|
||||
self.tenant_name = tenant_name
|
||||
self.os_options = os_options
|
||||
self.auth_version = auth_version
|
||||
self.insecure = insecure
|
||||
|
||||
|
||||
class TestSingleTenantStoreConnections(base.IsolatedUnitTest):
|
||||
def setUp(self):
|
||||
super(TestSingleTenantStoreConnections, self).setUp()
|
||||
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
|
||||
self.store = glance.store.swift.SingleTenantStore()
|
||||
specs = {'scheme': 'swift',
|
||||
'auth_or_store_url': 'example.com/v2/',
|
||||
'user': 'tenant:user',
|
||||
'key': 'abcdefg',
|
||||
'container': 'cont',
|
||||
'obj': 'object'}
|
||||
self.location = glance.store.swift.StoreLocation(specs)
|
||||
|
||||
def test_basic_connection(self):
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.authurl, 'https://example.com/v2/')
|
||||
self.assertEqual(connection.auth_version, '2')
|
||||
self.assertEqual(connection.user, 'user')
|
||||
self.assertEqual(connection.tenant_name, 'tenant')
|
||||
self.assertEqual(connection.key, 'abcdefg')
|
||||
self.assertFalse(connection.snet)
|
||||
self.assertEqual(connection.preauthurl, None)
|
||||
self.assertEqual(connection.preauthtoken, None)
|
||||
self.assertFalse(connection.insecure)
|
||||
self.assertEqual(connection.os_options,
|
||||
{'service_type': 'object-store',
|
||||
'endpoint_type': 'publicURL'})
|
||||
|
||||
def test_connection_with_no_trailing_slash(self):
|
||||
self.location.auth_or_store_url = 'example.com/v2'
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.authurl, 'https://example.com/v2/')
|
||||
|
||||
def test_connection_insecure(self):
|
||||
self.config(swift_store_auth_insecure=True)
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertTrue(connection.insecure)
|
||||
|
||||
def test_connection_with_auth_v1(self):
|
||||
self.config(swift_store_auth_version='1')
|
||||
self.store.configure()
|
||||
self.location.user = 'auth_v1_user'
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.auth_version, '1')
|
||||
self.assertEqual(connection.user, 'auth_v1_user')
|
||||
self.assertEqual(connection.tenant_name, None)
|
||||
|
||||
def test_connection_invalid_user(self):
|
||||
self.store.configure()
|
||||
self.location.user = 'invalid:format:user'
|
||||
self.assertRaises(exception.BadStoreUri,
|
||||
self.store.get_connection, self.location)
|
||||
|
||||
def test_connection_missing_user(self):
|
||||
self.store.configure()
|
||||
self.location.user = None
|
||||
self.assertRaises(exception.BadStoreUri,
|
||||
self.store.get_connection, self.location)
|
||||
|
||||
def test_connection_with_region(self):
|
||||
self.config(swift_store_region='Sahara')
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.os_options,
|
||||
{'region_name': 'Sahara',
|
||||
'service_type': 'object-store',
|
||||
'endpoint_type': 'publicURL'})
|
||||
|
||||
def test_connection_with_service_type(self):
|
||||
self.config(swift_store_service_type='shoe-store')
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.os_options,
|
||||
{'service_type': 'shoe-store',
|
||||
'endpoint_type': 'publicURL'})
|
||||
|
||||
def test_connection_with_endpoint_type(self):
|
||||
self.config(swift_store_endpoint_type='internalURL')
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.os_options,
|
||||
{'service_type': 'object-store',
|
||||
'endpoint_type': 'internalURL'})
|
||||
|
||||
def test_connection_with_snet(self):
|
||||
self.config(swift_enable_snet=True)
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertTrue(connection.snet)
|
||||
|
||||
|
||||
class TestMultiTenantStoreConnections(base.IsolatedUnitTest):
|
||||
def setUp(self):
|
||||
super(TestMultiTenantStoreConnections, self).setUp()
|
||||
self.stubs.Set(swiftclient, 'Connection', FakeConnection)
|
||||
self.context = glance.context.RequestContext(
|
||||
user='user', tenant='tenant', auth_tok='0123')
|
||||
self.store = glance.store.swift.MultiTenantStore(self.context)
|
||||
specs = {'scheme': 'swift',
|
||||
'auth_or_store_url': 'example.com',
|
||||
'container': 'cont',
|
||||
'obj': 'object'}
|
||||
self.location = glance.store.swift.StoreLocation(specs)
|
||||
|
||||
def test_basic_connection(self):
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertEqual(connection.authurl, None)
|
||||
self.assertEqual(connection.auth_version, '2')
|
||||
self.assertEqual(connection.user, 'user')
|
||||
self.assertEqual(connection.tenant_name, 'tenant')
|
||||
self.assertEqual(connection.key, None)
|
||||
self.assertFalse(connection.snet)
|
||||
self.assertEqual(connection.preauthurl, 'https://example.com')
|
||||
self.assertEqual(connection.preauthtoken, '0123')
|
||||
self.assertEqual(connection.os_options, {})
|
||||
|
||||
def test_connection_with_snet(self):
|
||||
self.config(swift_enable_snet=True)
|
||||
self.store.configure()
|
||||
connection = self.store.get_connection(self.location)
|
||||
self.assertTrue(connection.snet)
|
||||
|
||||
|
||||
class FakeGetEndpoint(object):
|
||||
def __init__(self, response):
|
||||
self.response = response
|
||||
|
||||
def __call__(self, service_catalog, service_type=None,
|
||||
endpoint_region=None, endpoint_type=None):
|
||||
self.service_type = service_type
|
||||
self.endpoint_region = endpoint_region
|
||||
self.endpoint_type = endpoint_type
|
||||
return self.response
|
||||
|
||||
|
||||
class TestCreatingLocations(base.IsolatedUnitTest):
|
||||
def test_single_tenant_location(self):
|
||||
self.config(swift_store_auth_address='example.com/v2',
|
||||
swift_store_container='container',
|
||||
swift_store_user='tenant:user',
|
||||
swift_store_key='auth_key')
|
||||
store = glance.store.swift.SingleTenantStore()
|
||||
location = store.create_location('image-id')
|
||||
self.assertEqual(location.scheme, 'swift+https')
|
||||
self.assertEqual(location.swift_url, 'https://example.com/v2')
|
||||
self.assertEqual(location.container, 'container')
|
||||
self.assertEqual(location.obj, 'image-id')
|
||||
self.assertEqual(location.user, 'tenant:user')
|
||||
self.assertEqual(location.key, 'auth_key')
|
||||
|
||||
def test_single_tenant_location_http(self):
|
||||
self.config(swift_store_auth_address='http://example.com/v2',
|
||||
swift_store_container='container',
|
||||
swift_store_user='tenant:user',
|
||||
swift_store_key='auth_key')
|
||||
store = glance.store.swift.SingleTenantStore()
|
||||
location = store.create_location('image-id')
|
||||
self.assertEqual(location.scheme, 'swift+http')
|
||||
self.assertEqual(location.swift_url, 'http://example.com/v2')
|
||||
|
||||
def test_multi_tenant_location(self):
|
||||
self.config(swift_store_container='container')
|
||||
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
|
||||
self.stubs.Set(glance.store.common.auth, 'get_endpoint', fake_get_endpoint)
|
||||
context = glance.context.RequestContext(
|
||||
user='user', tenant='tenant', auth_tok='123',
|
||||
service_catalog={})
|
||||
store = glance.store.swift.MultiTenantStore(context)
|
||||
location = store.create_location('image-id')
|
||||
self.assertEqual(location.scheme, 'swift+https')
|
||||
self.assertEqual(location.swift_url, 'https://some_endpoint')
|
||||
self.assertEqual(location.container, 'container_image-id')
|
||||
self.assertEqual(location.obj, 'image-id')
|
||||
self.assertEqual(location.user, None)
|
||||
self.assertEqual(location.key, None)
|
||||
self.assertEqual(fake_get_endpoint.service_type, 'object-store')
|
||||
|
||||
def test_multi_tenant_location_http(self):
|
||||
fake_get_endpoint = FakeGetEndpoint('http://some_endpoint')
|
||||
self.stubs.Set(glance.store.common.auth, 'get_endpoint', fake_get_endpoint)
|
||||
context = glance.context.RequestContext(
|
||||
user='user', tenant='tenant', auth_tok='123',
|
||||
service_catalog={})
|
||||
store = glance.store.swift.MultiTenantStore(context)
|
||||
location = store.create_location('image-id')
|
||||
self.assertEqual(location.scheme, 'swift+http')
|
||||
self.assertEqual(location.swift_url, 'http://some_endpoint')
|
||||
|
||||
def test_multi_tenant_location_with_region(self):
|
||||
self.config(swift_store_region='WestCarolina')
|
||||
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
|
||||
self.stubs.Set(glance.store.common.auth, 'get_endpoint', fake_get_endpoint)
|
||||
context = glance.context.RequestContext(
|
||||
user='user', tenant='tenant', auth_tok='123',
|
||||
service_catalog={})
|
||||
store = glance.store.swift.MultiTenantStore(context)
|
||||
self.assertEqual(fake_get_endpoint.endpoint_region, 'WestCarolina')
|
||||
|
||||
def test_multi_tenant_location_custom_service_type(self):
|
||||
self.config(swift_store_service_type='toy-store')
|
||||
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
|
||||
self.stubs.Set(glance.store.common.auth, 'get_endpoint', fake_get_endpoint)
|
||||
context = glance.context.RequestContext(
|
||||
user='user', tenant='tenant', auth_tok='123',
|
||||
service_catalog={})
|
||||
store = glance.store.swift.MultiTenantStore(context)
|
||||
self.assertEqual(fake_get_endpoint.service_type, 'toy-store')
|
||||
|
||||
def test_multi_tenant_location_custom_endpoint_type(self):
|
||||
self.config(swift_store_endpoint_type='InternalURL')
|
||||
fake_get_endpoint = FakeGetEndpoint('https://some_endpoint')
|
||||
self.stubs.Set(glance.store.common.auth, 'get_endpoint', fake_get_endpoint)
|
||||
context = glance.context.RequestContext(
|
||||
user='user', tenant='tenant', auth_tok='123',
|
||||
service_catalog={})
|
||||
store = glance.store.swift.MultiTenantStore(context)
|
||||
self.assertEqual(fake_get_endpoint.endpoint_type, 'InternalURL')
|
||||
|
||||
|
||||
class TestChunkReader(base.StoreClearingUnitTest):
|
||||
|
||||
def test_read_all_data(self):
|
||||
"""
|
||||
Replicate what goes on in the Swift driver with the
|
||||
repeated creation of the ChunkReader object
|
||||
"""
|
||||
CHUNKSIZE = 100
|
||||
checksum = hashlib.md5()
|
||||
data_file = tempfile.NamedTemporaryFile()
|
||||
data_file.write('*' * units.Ki)
|
||||
data_file.flush()
|
||||
infile = open(data_file.name, 'rb')
|
||||
bytes_read = 0
|
||||
while True:
|
||||
cr = glance.store.swift.ChunkReader(infile, checksum, CHUNKSIZE)
|
||||
chunk = cr.read(CHUNKSIZE)
|
||||
bytes_read += len(chunk)
|
||||
if not chunk:
|
||||
break
|
||||
self.assertEqual(1024, bytes_read)
|
||||
data_file.close()
|
Loading…
x
Reference in New Issue
Block a user