Only warn on duplicate path on fs backend
Currently if the same path is used in two filesystem_store_datadirs a BadStoreConfiguration exception is raised in the filesystem driver. This is the desired behavior if the same path appears with different priorities, as we don't know which one is correct, but if it appears with the same priority (it is an exact duplicate) a warning in the log should be enough. DocImpact Related-Bug: #1422699 Closes-Bug: #1428257 Change-Id: I4b0685f3c74851de6a47f1fdfdb6109b3eceb8ef
This commit is contained in:
@@ -335,10 +335,12 @@ class Store(glance_store.driver.Store):
|
||||
for datadir in self.conf.glance_store.filesystem_store_datadirs:
|
||||
(datadir_path,
|
||||
priority) = self._get_datadir_path_and_priority(datadir)
|
||||
self._check_directory_paths(datadir_path, directory_paths)
|
||||
priority_paths = self.priority_data_map.setdefault(
|
||||
int(priority), [])
|
||||
self._check_directory_paths(datadir_path, directory_paths,
|
||||
priority_paths)
|
||||
directory_paths.add(datadir_path)
|
||||
self.priority_data_map.setdefault(int(priority),
|
||||
[]).append(datadir_path)
|
||||
priority_paths.append(datadir_path)
|
||||
|
||||
self.priority_list = sorted(self.priority_data_map,
|
||||
reverse=True)
|
||||
@@ -349,7 +351,8 @@ class Store(glance_store.driver.Store):
|
||||
if metadata_file:
|
||||
self._validate_metadata(metadata_file)
|
||||
|
||||
def _check_directory_paths(self, datadir_path, directory_paths):
|
||||
def _check_directory_paths(self, datadir_path, directory_paths,
|
||||
priority_paths):
|
||||
"""
|
||||
Checks if directory_path is already present in directory_paths.
|
||||
|
||||
@@ -363,9 +366,15 @@ class Store(glance_store.driver.Store):
|
||||
"multiple times in filesystem_store_datadirs "
|
||||
"option of filesystem configuration") %
|
||||
{'datadir_path': datadir_path})
|
||||
LOG.exception(msg)
|
||||
raise exceptions.BadStoreConfiguration(
|
||||
store_name="filesystem", reason=msg)
|
||||
|
||||
# If present with different priority it's a bad configuration
|
||||
if datadir_path not in priority_paths:
|
||||
LOG.exception(msg)
|
||||
raise exceptions.BadStoreConfiguration(
|
||||
store_name="filesystem", reason=msg)
|
||||
|
||||
# Present with same prio (exact duplicate) only deserves a warning
|
||||
LOG.warning(msg)
|
||||
|
||||
def _get_datadir_path_and_priority(self, datadir):
|
||||
"""
|
||||
|
||||
@@ -453,6 +453,56 @@ class TestStore(base.StoreBaseTest,
|
||||
self.assertRaises(exceptions.BadStoreConfiguration,
|
||||
self.store.configure_add)
|
||||
|
||||
def test_configure_add_same_dir_multiple_times_same_priority(self):
|
||||
"""
|
||||
Tests BadStoreConfiguration exception is raised if same directory
|
||||
is specified multiple times in filesystem_store_datadirs.
|
||||
"""
|
||||
store_map = [self.useFixture(fixtures.TempDir()).path,
|
||||
self.useFixture(fixtures.TempDir()).path]
|
||||
self.conf.clear_override('filesystem_store_datadir',
|
||||
group='glance_store')
|
||||
self.conf.set_override('filesystem_store_datadirs',
|
||||
[store_map[0] + ":100",
|
||||
store_map[1] + ":200",
|
||||
store_map[0] + ":100"],
|
||||
group='glance_store')
|
||||
try:
|
||||
self.store.configure()
|
||||
except exceptions.BadStoreConfiguration:
|
||||
self.fail("configure() raised BadStoreConfiguration unexpectedly!")
|
||||
|
||||
# Test that we can add an image via the filesystem backend
|
||||
ChunkedFile.CHUNKSIZE = 1024
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_file_size = 5 * units.Ki # 5K
|
||||
expected_file_contents = "*" * expected_file_size
|
||||
expected_checksum = hashlib.md5(expected_file_contents).hexdigest()
|
||||
expected_location = "file://%s/%s" % (store_map[1],
|
||||
expected_image_id)
|
||||
image_file = six.StringIO(expected_file_contents)
|
||||
|
||||
loc, size, checksum, _ = self.store.add(expected_image_id,
|
||||
image_file,
|
||||
expected_file_size)
|
||||
|
||||
self.assertEqual(expected_location, loc)
|
||||
self.assertEqual(expected_file_size, size)
|
||||
self.assertEqual(expected_checksum, checksum)
|
||||
|
||||
loc = location.get_location_from_uri(expected_location,
|
||||
conf=self.conf)
|
||||
(new_image_file, new_image_size) = self.store.get(loc)
|
||||
new_image_contents = ""
|
||||
new_image_file_size = 0
|
||||
|
||||
for chunk in new_image_file:
|
||||
new_image_file_size += len(chunk)
|
||||
new_image_contents += chunk
|
||||
|
||||
self.assertEqual(expected_file_contents, new_image_contents)
|
||||
self.assertEqual(expected_file_size, new_image_file_size)
|
||||
|
||||
def test_add_with_multiple_dirs(self):
|
||||
"""Test adding multiple filesystem directories."""
|
||||
store_map = [self.useFixture(fixtures.TempDir()).path,
|
||||
@@ -466,7 +516,7 @@ class TestStore(base.StoreBaseTest,
|
||||
|
||||
self.store.configure()
|
||||
|
||||
"""Test that we can add an image via the filesystem backend"""
|
||||
# Test that we can add an image via the filesystem backend
|
||||
ChunkedFile.CHUNKSIZE = units.Ki
|
||||
expected_image_id = str(uuid.uuid4())
|
||||
expected_file_size = 5 * units.Ki # 5K
|
||||
|
||||
Reference in New Issue
Block a user