Add tests w/ some fixes, although they don't run yet

This commit is contained in:
Flavio Percoco 2014-01-30 11:21:22 +01:00
parent 50fca911c1
commit 28d0258873
14 changed files with 2398 additions and 6 deletions

View File

@ -18,10 +18,11 @@ import sys
from oslo.config import cfg
from glance.common import exception
from glance.common import utils
from glance.store.common import exception
from glance.store.common import utils
import glance.context
import glance.domain.proxy
from glance.openstack.common.gettextutils import _
from glance.openstack.common import importutils
import glance.openstack.common.log as logging
from glance.store import location

View File

@ -24,8 +24,8 @@ import urlparse
from oslo.config import cfg
from glance.common import exception
from glance.common import utils
from glance.store.common import exception
from glance.store.common import utils
from glance.openstack.common import jsonutils
import glance.openstack.common.log as logging
import glance.store
@ -49,8 +49,7 @@ CONF.register_opts(filesystem_opts)
class StoreLocation(glance.store.location.StoreLocation):
"""Class describing a Filesystem URI"""
"""Class describing a Filesystem URI."""
def process_specs(self):
self.scheme = self.specs.get('scheme', 'file')

0
tests/__init__.py Normal file
View File

0
tests/unit/__init__.py Normal file
View File

53
tests/unit/base.py Normal file
View File

@ -0,0 +1,53 @@
# Copyright 2011 OpenStack Foundation
# Copyright 2014 Red Hat, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from oslo.config import cfg
import testtools
import glance.store as store
from glance.store import location
class StoreBaseTest(testtools.TestCase):
def setUp(self):
super(StoreBaseTest, self).setUp()
self.conf = cfg.ConfigOpts()
self.conf.parse_args(args=[])
# Ensure stores + locations cleared
location.SCHEME_TO_CLS_MAP = {}
store.create_stores()
self.addCleanup(setattr, location, 'SCHEME_TO_CLS_MAP', dict())
self.test_dir = self.useFixture(fixtures.TempDir()).path
def config(self, **kw):
"""Override some configuration values.
The keyword arguments are the names of configuration options to
override and their values.
If a group argument is supplied, the overrides are applied to
the specified configuration option group.
All overrides are automatically cleared at the end of the current
test by the fixtures cleanup process.
"""
group = kw.pop('group', None)
for k, v in kw.iteritems():
self.conf.set_override(k, v, group)

View File

@ -0,0 +1,83 @@
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import stubout
from cinderclient.v2 import client as cinderclient
from glance.store.common import exception
from glance.openstack.common import units
import glance.store.cinder as cinder
from glance.store.location import get_location_from_uri
from glance.tests.unit import base
class FakeObject(object):
def __init__(self, **kwargs):
for name, value in kwargs.iteritems():
setattr(self, name, value)
class TestCinderStore(base.StoreClearingUnitTest):
def setUp(self):
self.config(default_store='cinder',
known_stores=['glance.store.cinder.Store'])
super(TestCinderStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
def test_cinder_configure_add(self):
store = cinder.Store()
self.assertRaises(exception.BadStoreConfiguration,
store.configure_add)
store = cinder.Store(context=None)
self.assertRaises(exception.BadStoreConfiguration,
store.configure_add)
store = cinder.Store(context=FakeObject(service_catalog=None))
self.assertRaises(exception.BadStoreConfiguration,
store.configure_add)
store = cinder.Store(context=FakeObject(service_catalog=
'fake_service_catalog'))
store.configure_add()
def test_cinder_get_size(self):
fake_client = FakeObject(auth_token=None, management_url=None)
fake_volumes = {'12345678-9012-3455-6789-012345678901':
FakeObject(size=5)}
class FakeCinderClient(FakeObject):
def __init__(self, *args, **kwargs):
super(FakeCinderClient, self).__init__(client=fake_client,
volumes=fake_volumes)
self.stubs.Set(cinderclient, 'Client', FakeCinderClient)
fake_sc = [{u'endpoints': [{u'publicURL': u'foo_public_url'}],
u'endpoints_links': [],
u'name': u'cinder',
u'type': u'volume'}]
fake_context = FakeObject(service_catalog=fake_sc,
user='fake_uer',
auth_tok='fake_token',
tenant='fake_tenant')
uri = 'cinder://%s' % fake_volumes.keys()[0]
loc = get_location_from_uri(uri)
store = cinder.Store(context=fake_context)
image_size = store.get_size(loc)
self.assertEqual(image_size,
fake_volumes.values()[0].size * units.Gi)
self.assertEqual(fake_client.auth_token, 'fake_token')
self.assertEqual(fake_client.management_url, 'foo_public_url')

View File

@ -0,0 +1,295 @@
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests the filesystem backend store"""
import __builtin__
import errno
import hashlib
import json
import os
import StringIO
import uuid
from glance.store.common import exception
from glance.store.filesystem import ChunkedFile
from glance.store.filesystem import Store
from glance.store.location import get_location_from_uri
from glance.store.tests.unit import base
class TestStore(base.StoreBaseTest):
def setUp(self):
"""Establish a clean test environment."""
super(TestStore, self).setUp()
self.orig_chunksize = ChunkedFile.CHUNKSIZE
ChunkedFile.CHUNKSIZE = 10
self.store = Store()
def tearDown(self):
"""Clear the test environment."""
super(TestStore, self).tearDown()
ChunkedFile.CHUNKSIZE = self.orig_chunksize
def test_get(self):
"""Test a "normal" retrieval of an image in chunks."""
# First add an image...
image_id = str(uuid.uuid4())
file_contents = "chunk00000remainder"
image_file = StringIO.StringIO(file_contents)
location, size, checksum, _ = self.store.add(image_id,
image_file,
len(file_contents))
# Now read it back...
uri = "file:///%s/%s" % (self.test_dir, image_id)
loc = get_location_from_uri(uri)
(image_file, image_size) = self.store.get(loc)
expected_data = "chunk00000remainder"
expected_num_chunks = 2
data = ""
num_chunks = 0
for chunk in image_file:
num_chunks += 1
data += chunk
self.assertEqual(expected_data, data)
self.assertEqual(expected_num_chunks, num_chunks)
def test_get_non_existing(self):
"""
Test that trying to retrieve a file that doesn't exist
raises an error
"""
loc = get_location_from_uri("file:///%s/non-existing" % self.test_dir)
self.assertRaises(exception.NotFound,
self.store.get,
loc)
def test_add(self):
"""Test that we can add an image via the filesystem backend"""
ChunkedFile.CHUNKSIZE = 1024
expected_image_id = str(uuid.uuid4())
expected_file_size = 5 * units.Ki # 5K
expected_file_contents = "*" * expected_file_size
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
expected_location = "file://%s/%s" % (self.test_dir,
expected_image_id)
image_file = StringIO.StringIO(expected_file_contents)
location, size, checksum, _ = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_file_size, size)
self.assertEqual(expected_checksum, checksum)
uri = "file:///%s/%s" % (self.test_dir, expected_image_id)
loc = get_location_from_uri(uri)
(new_image_file, new_image_size) = self.store.get(loc)
new_image_contents = ""
new_image_file_size = 0
for chunk in new_image_file:
new_image_file_size += len(chunk)
new_image_contents += chunk
self.assertEqual(expected_file_contents, new_image_contents)
self.assertEqual(expected_file_size, new_image_file_size)
def test_add_check_metadata_success(self):
expected_image_id = str(uuid.uuid4())
in_metadata = {'akey': u'some value', 'list': [u'1', u'2', u'3']}
jsonfilename = os.path.join(self.test_dir,
"storage_metadata.%s" % expected_image_id)
self.config(filesystem_store_metadata_file=jsonfilename)
with open(jsonfilename, 'w') as fptr:
json.dump(in_metadata, fptr)
expected_file_size = 10
expected_file_contents = "*" * expected_file_size
image_file = StringIO.StringIO(expected_file_contents)
location, size, checksum, metadata = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(metadata, in_metadata)
def test_add_check_metadata_bad_data(self):
expected_image_id = str(uuid.uuid4())
in_metadata = {'akey': 10} # only unicode is allowed
jsonfilename = os.path.join(self.test_dir,
"storage_metadata.%s" % expected_image_id)
self.config(filesystem_store_metadata_file=jsonfilename)
with open(jsonfilename, 'w') as fptr:
json.dump(in_metadata, fptr)
expected_file_size = 10
expected_file_contents = "*" * expected_file_size
image_file = StringIO.StringIO(expected_file_contents)
location, size, checksum, metadata = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(metadata, {})
def test_add_check_metadata_bad_nosuch_file(self):
expected_image_id = str(uuid.uuid4())
jsonfilename = os.path.join(self.test_dir,
"storage_metadata.%s" % expected_image_id)
self.config(filesystem_store_metadata_file=jsonfilename)
expected_file_size = 10
expected_file_contents = "*" * expected_file_size
image_file = StringIO.StringIO(expected_file_contents)
location, size, checksum, metadata = self.store.add(expected_image_id,
image_file,
expected_file_size)
self.assertEqual(metadata, {})
def test_add_already_existing(self):
"""
Tests that adding an image with an existing identifier
raises an appropriate exception
"""
ChunkedFile.CHUNKSIZE = 1024
image_id = str(uuid.uuid4())
file_size = 5 * units.Ki # 5K
file_contents = "*" * file_size
image_file = StringIO.StringIO(file_contents)
location, size, checksum, _ = self.store.add(image_id,
image_file,
file_size)
image_file = StringIO.StringIO("nevergonnamakeit")
self.assertRaises(exception.Duplicate,
self.store.add,
image_id, image_file, 0)
def _do_test_add_write_failure(self, errno, exception):
ChunkedFile.CHUNKSIZE = 1024
image_id = str(uuid.uuid4())
file_size = 5 * units.Ki # 5K
file_contents = "*" * file_size
path = os.path.join(self.test_dir, image_id)
image_file = StringIO.StringIO(file_contents)
m = mox.Mox()
m.StubOutWithMock(__builtin__, 'open')
e = IOError()
e.errno = errno
open(path, 'wb').AndRaise(e)
m.ReplayAll()
try:
self.assertRaises(exception,
self.store.add,
image_id, image_file, 0)
self.assertFalse(os.path.exists(path))
finally:
m.VerifyAll()
m.UnsetStubs()
def test_add_storage_full(self):
"""
Tests that adding an image without enough space on disk
raises an appropriate exception
"""
self._do_test_add_write_failure(errno.ENOSPC, exception.StorageFull)
def test_add_file_too_big(self):
"""
Tests that adding an excessively large image file
raises an appropriate exception
"""
self._do_test_add_write_failure(errno.EFBIG, exception.StorageFull)
def test_add_storage_write_denied(self):
"""
Tests that adding an image with insufficient filestore permissions
raises an appropriate exception
"""
self._do_test_add_write_failure(errno.EACCES,
exception.StorageWriteDenied)
def test_add_other_failure(self):
"""
Tests that a non-space-related IOError does not raise a
StorageFull exception.
"""
self._do_test_add_write_failure(errno.ENOTDIR, IOError)
def test_add_cleanup_on_read_failure(self):
"""
Tests the partial image file is cleaned up after a read
failure.
"""
ChunkedFile.CHUNKSIZE = 1024
image_id = str(uuid.uuid4())
file_size = 5 * units.Ki # 5K
file_contents = "*" * file_size
path = os.path.join(self.test_dir, image_id)
image_file = StringIO.StringIO(file_contents)
def fake_Error(size):
raise AttributeError()
self.stubs.Set(image_file, 'read', fake_Error)
self.assertRaises(AttributeError,
self.store.add,
image_id, image_file, 0)
self.assertFalse(os.path.exists(path))
def test_delete(self):
"""
Test we can delete an existing image in the filesystem store
"""
# First add an image
image_id = str(uuid.uuid4())
file_size = 5 * units.Ki # 5K
file_contents = "*" * file_size
image_file = StringIO.StringIO(file_contents)
location, size, checksum, _ = self.store.add(image_id,
image_file,
file_size)
# Now check that we can delete it
uri = "file:///%s/%s" % (self.test_dir, image_id)
loc = get_location_from_uri(uri)
self.store.delete(loc)
self.assertRaises(exception.NotFound, self.store.get, loc)
def test_delete_non_existing(self):
"""
Test that trying to delete a file that doesn't exist
raises an error
"""
loc = get_location_from_uri("file:///tmp/glance-tests/non-existing")
self.assertRaises(exception.NotFound,
self.store.delete,
loc)

View File

@ -0,0 +1,97 @@
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import stubout
from glance.store.common import exception
from glance.store.common import utils
from glance.store.gridfs import Store
from glance.tests.unit import base
try:
import gridfs
import pymongo
except ImportError:
pymongo = None
GRIDFS_CONF = {'verbose': True,
'debug': True,
'default_store': 'gridfs',
'mongodb_store_uri': 'mongodb://fake_store_uri',
'mongodb_store_db': 'fake_store_db'}
def stub_out_gridfs(stubs):
class FakeMongoClient(object):
def __init__(self, *args, **kwargs):
pass
def __getitem__(self, key):
return None
class FakeGridFS(object):
image_data = {}
called_commands = []
def __init__(self, *args, **kwargs):
pass
def exists(self, image_id):
self.called_commands.append('exists')
return False
def put(self, image_file, _id):
self.called_commands.append('put')
data = None
while True:
data = image_file.read(64)
if data:
self.image_data[_id] = \
self.image_data.setdefault(_id, '') + data
else:
break
def delete(self, _id):
self.called_commands.append('delete')
if pymongo is not None:
stubs.Set(pymongo, 'MongoClient', FakeMongoClient)
stubs.Set(gridfs, 'GridFS', FakeGridFS)
class TestStore(base.StoreClearingUnitTest):
def setUp(self):
"""Establish a clean test environment"""
self.config(**GRIDFS_CONF)
super(TestStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_gridfs(self.stubs)
self.store = Store()
self.addCleanup(self.stubs.UnsetAll)
def test_cleanup_when_add_image_exception(self):
if pymongo is None:
msg = 'GridFS store can not add images, skip test.'
self.skipTest(msg)
self.assertRaises(exception.ImageSizeLimitExceeded,
self.store.add,
'fake_image_id',
utils.LimitingReader(StringIO.StringIO('xx'), 1),
2)
self.assertEqual(self.store.fs.called_commands,
['exists', 'put', 'delete'])

View File

@ -0,0 +1,188 @@
# Copyright 2010-2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import xrange
import stubout
from glance.store.common import exception
from glance import context
from glance.db.sqlalchemy import api as db_api
from glance.registry.client.v1.api import configure_registry_client
from glance.store import (delete_from_backend,
safe_delete_from_backend)
from glance.store.http import Store, MAX_REDIRECTS
from glance.store.location import get_location_from_uri
from glance.tests.unit import base
from glance.tests import utils, stubs as test_stubs
# The response stack is used to return designated responses in order;
# however when it's empty a default 200 OK response is returned from
# FakeHTTPConnection below.
FAKE_RESPONSE_STACK = []
def stub_out_http_backend(stubs):
"""
Stubs out the httplib.HTTPRequest.getresponse to return
faked-out data instead of grabbing actual contents of a resource
The stubbed getresponse() returns an iterator over
the data "I am a teapot, short and stout\n"
:param stubs: Set of stubout stubs
"""
class FakeHTTPConnection(object):
def __init__(self, *args, **kwargs):
pass
def getresponse(self):
if len(FAKE_RESPONSE_STACK):
return FAKE_RESPONSE_STACK.pop()
return utils.FakeHTTPResponse()
def request(self, *_args, **_kwargs):
pass
def close(self):
pass
def fake_get_conn_class(self, *args, **kwargs):
return FakeHTTPConnection
stubs.Set(Store, '_get_conn_class', fake_get_conn_class)
def stub_out_registry_image_update(stubs):
"""
Stubs an image update on the registry.
:param stubs: Set of stubout stubs
"""
test_stubs.stub_out_registry_server(stubs)
def fake_image_update(ctx, image_id, values, purge_props=False):
return {'properties': {}}
stubs.Set(db_api, 'image_update', fake_image_update)
class TestHttpStore(base.StoreClearingUnitTest):
def setUp(self):
global FAKE_RESPONSE_STACK
FAKE_RESPONSE_STACK = []
self.config(default_store='http',
known_stores=['glance.store.http.Store'])
super(TestHttpStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_http_backend(self.stubs)
Store.CHUNKSIZE = 2
self.store = Store()
configure_registry_client()
def test_http_get(self):
uri = "http://netloc/path/to/file.tar.gz"
expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s',
'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n']
loc = get_location_from_uri(uri)
(image_file, image_size) = self.store.get(loc)
self.assertEqual(image_size, 31)
chunks = [c for c in image_file]
self.assertEqual(chunks, expected_returns)
def test_http_get_redirect(self):
# Add two layers of redirects to the response stack, which will
# return the default 200 OK with the expected data after resolving
# both redirects.
redirect_headers_1 = {"location": "http://example.com/teapot.img"}
redirect_resp_1 = utils.FakeHTTPResponse(status=302,
headers=redirect_headers_1)
redirect_headers_2 = {"location": "http://example.com/teapot_real.img"}
redirect_resp_2 = utils.FakeHTTPResponse(status=301,
headers=redirect_headers_2)
FAKE_RESPONSE_STACK.append(redirect_resp_1)
FAKE_RESPONSE_STACK.append(redirect_resp_2)
uri = "http://netloc/path/to/file.tar.gz"
expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s',
'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n']
loc = get_location_from_uri(uri)
(image_file, image_size) = self.store.get(loc)
self.assertEqual(image_size, 31)
chunks = [c for c in image_file]
self.assertEqual(chunks, expected_returns)
def test_http_get_max_redirects(self):
# Add more than MAX_REDIRECTS redirects to the response stack
redirect_headers = {"location": "http://example.com/teapot.img"}
redirect_resp = utils.FakeHTTPResponse(status=302,
headers=redirect_headers)
for i in xrange(MAX_REDIRECTS + 2):
FAKE_RESPONSE_STACK.append(redirect_resp)
uri = "http://netloc/path/to/file.tar.gz"
loc = get_location_from_uri(uri)
self.assertRaises(exception.MaxRedirectsExceeded, self.store.get, loc)
def test_http_get_redirect_invalid(self):
redirect_headers = {"location": "http://example.com/teapot.img"}
redirect_resp = utils.FakeHTTPResponse(status=307,
headers=redirect_headers)
FAKE_RESPONSE_STACK.append(redirect_resp)
uri = "http://netloc/path/to/file.tar.gz"
loc = get_location_from_uri(uri)
self.assertRaises(exception.BadStoreUri, self.store.get, loc)
def test_http_get_not_found(self):
not_found_resp = utils.FakeHTTPResponse(status=404,
data="404 Not Found")
FAKE_RESPONSE_STACK.append(not_found_resp)
uri = "http://netloc/path/to/file.tar.gz"
loc = get_location_from_uri(uri)
self.assertRaises(exception.BadStoreUri, self.store.get, loc)
def test_https_get(self):
uri = "https://netloc/path/to/file.tar.gz"
expected_returns = ['I ', 'am', ' a', ' t', 'ea', 'po', 't,', ' s',
'ho', 'rt', ' a', 'nd', ' s', 'to', 'ut', '\n']
loc = get_location_from_uri(uri)
(image_file, image_size) = self.store.get(loc)
self.assertEqual(image_size, 31)
chunks = [c for c in image_file]
self.assertEqual(chunks, expected_returns)
def test_http_delete_raise_error(self):
uri = "https://netloc/path/to/file.tar.gz"
loc = get_location_from_uri(uri)
ctx = context.RequestContext()
self.assertRaises(NotImplementedError, self.store.delete, loc)
self.assertRaises(exception.StoreDeleteNotSupported,
delete_from_backend, ctx, uri)
def test_http_schedule_delete_swallows_error(self):
uri = "https://netloc/path/to/file.tar.gz"
ctx = context.RequestContext()
stub_out_registry_image_update(self.stubs)
try:
safe_delete_from_backend(ctx, uri, 'image_id')
except exception.StoreDeleteNotSupported:
self.fail('StoreDeleteNotSupported should be swallowed')

View File

@ -0,0 +1,167 @@
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import mock
from glance.store.common import exception
from glance.store.common import utils
from glance.openstack.common import units
from glance.store.location import Location
import glance.store.rbd as rbd_store
from glance.store.rbd import StoreLocation
from glance.tests.unit import base
from glance.tests.unit.fake_rados import mock_rados
from glance.tests.unit.fake_rados import mock_rbd
class TestStore(base.StoreClearingUnitTest):
def setUp(self):
"""Establish a clean test environment"""
super(TestStore, self).setUp()
self.stubs.Set(rbd_store, 'rados', mock_rados)
self.stubs.Set(rbd_store, 'rbd', mock_rbd)
self.store = rbd_store.Store()
self.store.chunk_size = 2
self.called_commands_actual = []
self.called_commands_expected = []
self.store_specs = {'image': 'fake_image',
'snapshot': 'fake_snapshot'}
self.location = StoreLocation(self.store_specs)
# Provide enough data to get more than one chunk iteration.
self.data_len = 3 * units.Ki
self.data_iter = StringIO.StringIO('*' * self.data_len)
def test_add_w_image_size_zero(self):
"""Assert that correct size is returned even though 0 was provided."""
self.store.chunk_size = units.Ki
with mock.patch.object(rbd_store.rbd.Image, 'resize') as resize:
with mock.patch.object(rbd_store.rbd.Image, 'write') as write:
ret = self.store.add('fake_image_id', self.data_iter, 0)
resize.assert_called()
write.assert_called()
self.assertEqual(ret[1], self.data_len)
def test_add_w_rbd_image_exception(self):
def _fake_create_image(*args, **kwargs):
self.called_commands_actual.append('create')
return self.location
def _fake_delete_image(*args, **kwargs):
self.called_commands_actual.append('delete')
def _fake_enter(*args, **kwargs):
raise exception.NotFound("")
self.stubs.Set(self.store, '_create_image', _fake_create_image)
self.stubs.Set(self.store, '_delete_image', _fake_delete_image)
self.stubs.Set(mock_rbd.Image, '__enter__', _fake_enter)
self.assertRaises(exception.NotFound, self.store.add,
'fake_image_id', self.data_iter, self.data_len)
self.called_commands_expected = ['create', 'delete']
def test_add_duplicate_image(self):
def _fake_create_image(*args, **kwargs):
self.called_commands_actual.append('create')
raise mock_rbd.ImageExists()
self.stubs.Set(self.store, '_create_image', _fake_create_image)
self.assertRaises(exception.Duplicate, self.store.add,
'fake_image_id', self.data_iter, self.data_len)
self.called_commands_expected = ['create']
def test_delete(self):
def _fake_remove(*args, **kwargs):
self.called_commands_actual.append('remove')
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
self.store.delete(Location('test_rbd_store', StoreLocation,
self.location.get_uri()))
self.called_commands_expected = ['remove']
def test__delete_image(self):
def _fake_remove(*args, **kwargs):
self.called_commands_actual.append('remove')
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
self.store._delete_image(self.location)
self.called_commands_expected = ['remove']
def test__delete_image_w_snap(self):
def _fake_unprotect_snap(*args, **kwargs):
self.called_commands_actual.append('unprotect_snap')
def _fake_remove_snap(*args, **kwargs):
self.called_commands_actual.append('remove_snap')
def _fake_remove(*args, **kwargs):
self.called_commands_actual.append('remove')
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
self.stubs.Set(mock_rbd.Image, 'unprotect_snap', _fake_unprotect_snap)
self.stubs.Set(mock_rbd.Image, 'remove_snap', _fake_remove_snap)
self.store._delete_image(self.location, snapshot_name='snap')
self.called_commands_expected = ['unprotect_snap', 'remove_snap',
'remove']
def test__delete_image_w_snap_exc_image_not_found(self):
def _fake_unprotect_snap(*args, **kwargs):
self.called_commands_actual.append('unprotect_snap')
raise mock_rbd.ImageNotFound()
self.stubs.Set(mock_rbd.Image, 'unprotect_snap', _fake_unprotect_snap)
self.assertRaises(exception.NotFound, self.store._delete_image,
self.location, snapshot_name='snap')
self.called_commands_expected = ['unprotect_snap']
def test__delete_image_exc_image_not_found(self):
def _fake_remove(*args, **kwargs):
self.called_commands_actual.append('remove')
raise mock_rbd.ImageNotFound()
self.stubs.Set(mock_rbd.RBD, 'remove', _fake_remove)
self.assertRaises(exception.NotFound, self.store._delete_image,
self.location, snapshot_name='snap')
self.called_commands_expected = ['remove']
def test_image_size_exceeded_exception(self):
def _fake_write(*args, **kwargs):
if 'write' not in self.called_commands_actual:
self.called_commands_actual.append('write')
raise exception.ImageSizeLimitExceeded
def _fake_delete_image(*args, **kwargs):
self.called_commands_actual.append('delete')
self.stubs.Set(mock_rbd.Image, 'write', _fake_write)
self.stubs.Set(self.store, '_delete_image', _fake_delete_image)
data = utils.LimitingReader(self.data_iter, self.data_len)
self.assertRaises(exception.ImageSizeLimitExceeded,
self.store.add, 'fake_image_id',
data, self.data_len + 1)
self.called_commands_expected = ['write', 'delete']
def tearDown(self):
self.assertEqual(self.called_commands_actual,
self.called_commands_expected)
super(TestStore, self).tearDown()

413
tests/unit/test_s3_store.py Normal file
View File

@ -0,0 +1,413 @@
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests the S3 backend store"""
import hashlib
import StringIO
import uuid
import boto.s3.connection
import stubout
from glance.store.common import exception
from glance.openstack.common import units
from glance.store.location import get_location_from_uri
import glance.store.s3
from glance.store.s3 import Store, get_s3_location
from glance.store import UnsupportedBackend
from glance.tests.unit import base
FAKE_UUID = str(uuid.uuid4())
FIVE_KB = 5 * units.Ki
S3_CONF = {'verbose': True,
'debug': True,
'default_store': 's3',
's3_store_access_key': 'user',
's3_store_secret_key': 'key',
's3_store_host': 'localhost:8080',
's3_store_bucket': 'glance'}
# We stub out as little as possible to ensure that the code paths
# between glance.store.s3 and boto.s3.connection are tested
# thoroughly
def stub_out_s3(stubs):
class FakeKey:
"""
Acts like a ``boto.s3.key.Key``
"""
def __init__(self, bucket, name):
self.bucket = bucket
self.name = name
self.data = None
self.size = 0
self.BufferSize = 1024
def close(self):
pass
def exists(self):
return self.bucket.exists(self.name)
def delete(self):
self.bucket.delete(self.name)
def compute_md5(self, data):
chunk = data.read(self.BufferSize)
checksum = hashlib.md5()
while chunk:
checksum.update(chunk)
chunk = data.read(self.BufferSize)
checksum_hex = checksum.hexdigest()
return checksum_hex, None
def set_contents_from_file(self, fp, replace=False, **kwargs):
self.data = StringIO.StringIO()
for bytes in fp:
self.data.write(bytes)
self.size = self.data.len
# Reset the buffer to start
self.data.seek(0)
self.read = self.data.read
def get_file(self):
return self.data
class FakeBucket:
"""
Acts like a ``boto.s3.bucket.Bucket``
"""
def __init__(self, name, keys=None):
self.name = name
self.keys = keys or {}
def __str__(self):
return self.name
def exists(self, key):
return key in self.keys
def delete(self, key):
del self.keys[key]
def get_key(self, key_name, **kwargs):
key = self.keys.get(key_name)
if not key:
return FakeKey(self, key_name)
return key
def new_key(self, key_name):
new_key = FakeKey(self, key_name)
self.keys[key_name] = new_key
return new_key
fixture_buckets = {'glance': FakeBucket('glance')}
b = fixture_buckets['glance']
k = b.new_key(FAKE_UUID)
k.set_contents_from_file(StringIO.StringIO("*" * FIVE_KB))
def fake_connection_constructor(self, *args, **kwargs):
host = kwargs.get('host')
if host.startswith('http://') or host.startswith('https://'):
raise UnsupportedBackend(host)
def fake_get_bucket(conn, bucket_id):
bucket = fixture_buckets.get(bucket_id)
if not bucket:
bucket = FakeBucket(bucket_id)
return bucket
stubs.Set(boto.s3.connection.S3Connection,
'__init__', fake_connection_constructor)
stubs.Set(boto.s3.connection.S3Connection,
'get_bucket', fake_get_bucket)
def format_s3_location(user, key, authurl, bucket, obj):
"""
Helper method that returns a S3 store URI given
the component pieces.
"""
scheme = 's3'
if authurl.startswith('https://'):
scheme = 's3+https'
authurl = authurl[8:]
elif authurl.startswith('http://'):
authurl = authurl[7:]
authurl = authurl.strip('/')
return "%s://%s:%s@%s/%s/%s" % (scheme, user, key, authurl,
bucket, obj)
class TestStore(base.StoreClearingUnitTest):
def setUp(self):
"""Establish a clean test environment"""
self.config(**S3_CONF)
super(TestStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
stub_out_s3(self.stubs)
self.store = Store()
self.addCleanup(self.stubs.UnsetAll)
def test_get(self):
"""Test a "normal" retrieval of an image in chunks"""
loc = get_location_from_uri(
"s3://user:key@auth_address/glance/%s" % FAKE_UUID)
(image_s3, image_size) = self.store.get(loc)
self.assertEqual(image_size, FIVE_KB)
expected_data = "*" * FIVE_KB
data = ""
for chunk in image_s3:
data += chunk
self.assertEqual(expected_data, data)
def test_get_calling_format_path(self):
"""Test a "normal" retrieval of an image in chunks"""
self.config(s3_store_bucket_url_format='path')
def fake_S3Connection_init(*args, **kwargs):
expected_cls = boto.s3.connection.OrdinaryCallingFormat
self.assertIsInstance(kwargs.get('calling_format'), expected_cls)
self.stubs.Set(boto.s3.connection.S3Connection, '__init__',
fake_S3Connection_init)
loc = get_location_from_uri(
"s3://user:key@auth_address/glance/%s" % FAKE_UUID)
(image_s3, image_size) = self.store.get(loc)
def test_get_calling_format_default(self):
"""Test a "normal" retrieval of an image in chunks"""
def fake_S3Connection_init(*args, **kwargs):
expected_cls = boto.s3.connection.SubdomainCallingFormat
self.assertIsInstance(kwargs.get('calling_format'), expected_cls)
self.stubs.Set(boto.s3.connection.S3Connection, '__init__',
fake_S3Connection_init)
loc = get_location_from_uri(
"s3://user:key@auth_address/glance/%s" % FAKE_UUID)
(image_s3, image_size) = self.store.get(loc)
def test_get_non_existing(self):
"""
Test that trying to retrieve a s3 that doesn't exist
raises an error
"""
uri = "s3://user:key@auth_address/badbucket/%s" % FAKE_UUID
loc = get_location_from_uri(uri)
self.assertRaises(exception.NotFound, self.store.get, loc)
uri = "s3://user:key@auth_address/glance/noexist"
loc = get_location_from_uri(uri)
self.assertRaises(exception.NotFound, self.store.get, loc)
def test_add(self):
"""Test that we can add an image via the s3 backend"""
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
expected_s3_contents = "*" * expected_s3_size
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
expected_location = format_s3_location(
S3_CONF['s3_store_access_key'],
S3_CONF['s3_store_secret_key'],
S3_CONF['s3_store_host'],
S3_CONF['s3_store_bucket'],
expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents)
location, size, checksum, _ = self.store.add(expected_image_id,
image_s3,
expected_s3_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_s3_size, size)
self.assertEqual(expected_checksum, checksum)
loc = get_location_from_uri(expected_location)
(new_image_s3, new_image_size) = self.store.get(loc)
new_image_contents = StringIO.StringIO()
for chunk in new_image_s3:
new_image_contents.write(chunk)
new_image_s3_size = new_image_contents.len
self.assertEqual(expected_s3_contents, new_image_contents.getvalue())
self.assertEqual(expected_s3_size, new_image_s3_size)
def test_add_host_variations(self):
"""
Test that having http(s):// in the s3serviceurl in config
options works as expected.
"""
variations = ['http://localhost:80',
'http://localhost',
'http://localhost/v1',
'http://localhost/v1/',
'https://localhost',
'https://localhost:8080',
'https://localhost/v1',
'https://localhost/v1/',
'localhost',
'localhost:8080/v1']
for variation in variations:
expected_image_id = str(uuid.uuid4())
expected_s3_size = FIVE_KB
expected_s3_contents = "*" * expected_s3_size
expected_checksum = hashlib.md5(expected_s3_contents).hexdigest()
new_conf = S3_CONF.copy()
new_conf['s3_store_host'] = variation
expected_location = format_s3_location(
new_conf['s3_store_access_key'],
new_conf['s3_store_secret_key'],
new_conf['s3_store_host'],
new_conf['s3_store_bucket'],
expected_image_id)
image_s3 = StringIO.StringIO(expected_s3_contents)
self.config(**new_conf)
self.store = Store()
location, size, checksum, _ = self.store.add(expected_image_id,
image_s3,
expected_s3_size)
self.assertEqual(expected_location, location)
self.assertEqual(expected_s3_size, size)
self.assertEqual(expected_checksum, checksum)
loc = get_location_from_uri(expected_location)
(new_image_s3, new_image_size) = self.store.get(loc)
new_image_contents = new_image_s3.getvalue()
new_image_s3_size = len(new_image_s3)
self.assertEqual(expected_s3_contents, new_image_contents)
self.assertEqual(expected_s3_size, new_image_s3_size)
def test_add_already_existing(self):
"""
Tests that adding an image with an existing identifier
raises an appropriate exception
"""
image_s3 = StringIO.StringIO("nevergonnamakeit")
self.assertRaises(exception.Duplicate,
self.store.add,
FAKE_UUID, image_s3, 0)
def _option_required(self, key):
conf = S3_CONF.copy()
conf[key] = None
try:
self.config(**conf)
self.store = Store()
return self.store.add == self.store.add_disabled
except Exception:
return False
return False
def test_no_access_key(self):
"""
Tests that options without access key disables the add method
"""
self.assertTrue(self._option_required('s3_store_access_key'))
def test_no_secret_key(self):
"""
Tests that options without secret key disables the add method
"""
self.assertTrue(self._option_required('s3_store_secret_key'))
def test_no_host(self):
"""
Tests that options without host disables the add method
"""
self.assertTrue(self._option_required('s3_store_host'))
def test_delete(self):
"""
Test we can delete an existing image in the s3 store
"""
uri = "s3://user:key@auth_address/glance/%s" % FAKE_UUID
loc = get_location_from_uri(uri)
self.store.delete(loc)
self.assertRaises(exception.NotFound, self.store.get, loc)
def test_delete_non_existing(self):
"""
Test that trying to delete a s3 that doesn't exist
raises an error
"""
uri = "s3://user:key@auth_address/glance/noexist"
loc = get_location_from_uri(uri)
self.assertRaises(exception.NotFound, self.store.delete, loc)
def _do_test_get_s3_location(self, host, loc):
self.assertEqual(get_s3_location(host), loc)
self.assertEqual(get_s3_location(host + ':80'), loc)
self.assertEqual(get_s3_location('http://' + host), loc)
self.assertEqual(get_s3_location('http://' + host + ':80'), loc)
self.assertEqual(get_s3_location('https://' + host), loc)
self.assertEqual(get_s3_location('https://' + host + ':80'), loc)
def test_get_s3_good_location(self):
"""
Test that the s3 location can be derived from the host
"""
good_locations = [
('s3.amazonaws.com', ''),
('s3-eu-west-1.amazonaws.com', 'EU'),
('s3-us-west-1.amazonaws.com', 'us-west-1'),
('s3-ap-southeast-1.amazonaws.com', 'ap-southeast-1'),
('s3-ap-northeast-1.amazonaws.com', 'ap-northeast-1'),
]
for (url, expected) in good_locations:
self._do_test_get_s3_location(url, expected)
def test_get_s3_bad_location(self):
"""
Test that the s3 location cannot be derived from an unexpected host
"""
bad_locations = [
('', ''),
('s3.amazon.co.uk', ''),
('s3-govcloud.amazonaws.com', ''),
('cloudfiles.rackspace.com', ''),
]
for (url, expected) in bad_locations:
self._do_test_get_s3_location(url, expected)
def test_calling_format_path(self):
self.config(s3_store_bucket_url_format='path')
self.assertIsInstance(glance.store.s3.get_calling_format(),
boto.s3.connection.OrdinaryCallingFormat)
def test_calling_format_subdomain(self):
self.config(s3_store_bucket_url_format='subdomain')
self.assertIsInstance(glance.store.s3.get_calling_format(),
boto.s3.connection.SubdomainCallingFormat)
def test_calling_format_default(self):
self.assertIsInstance(glance.store.s3.get_calling_format(),
boto.s3.connection.SubdomainCallingFormat)

View File

@ -0,0 +1,60 @@
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import StringIO
import stubout
from glance.store.common import exception
from glance.store.common import utils
from glance.openstack.common import processutils
import glance.store.sheepdog
from glance.store.sheepdog import Store
from glance.tests.unit import base
SHEEPDOG_CONF = {'verbose': True,
'debug': True,
'default_store': 'sheepdog'}
class TestStore(base.StoreClearingUnitTest):
def setUp(self):
"""Establish a clean test environment"""
def _fake_execute(*cmd, **kwargs):
pass
self.config(**SHEEPDOG_CONF)
super(TestStore, self).setUp()
self.stubs = stubout.StubOutForTesting()
self.stubs.Set(processutils, 'execute', _fake_execute)
self.store = Store()
self.addCleanup(self.stubs.UnsetAll)
def test_cleanup_when_add_image_exception(self):
called_commands = []
def _fake_run_command(self, command, data, *params):
called_commands.append(command)
self.stubs.Set(glance.store.sheepdog.SheepdogImage,
'_run_command', _fake_run_command)
self.assertRaises(exception.ImageSizeLimitExceeded,
self.store.add,
'fake_image_id',
utils.LimitingReader(StringIO.StringIO('xx'), 1),
2)
self.assertEqual(called_commands, ['list -r', 'create', 'delete'])

View File

@ -0,0 +1,59 @@
# Copyright 2011-2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.store.common import exception
from glance import store
from glance.store import base as store_base
from glance.tests.unit import base as test_base
class FakeUnconfigurableStoreDriver(store_base.Store):
def configure(self):
raise exception.BadStoreConfiguration("Unconfigurable store driver.")
class TestStoreBase(test_base.StoreClearingUnitTest):
def setUp(self):
self.config(default_store='file')
super(TestStoreBase, self).setUp()
def test_exception_to_unicode(self):