Merge "relinker: allow clobber-hardlink-collision"
This commit is contained in:
@@ -793,3 +793,12 @@ use = egg:swift#xprofile
|
||||
#
|
||||
# stats_interval = 300.0
|
||||
# recon_cache_path = /var/cache/swift
|
||||
#
|
||||
# Because of the way old versions of swift on old kernels worked you may end up
|
||||
# with a file in the new part dir path that had the exact timestamp of a
|
||||
# "newer" file in the current part dir. With this option enabled during the
|
||||
# relink phase we'll quarantine the colliding file in the new target part dir
|
||||
# and retry the relink. During the cleanup phase we ignore the un-matched
|
||||
# inode "collision" and allow the cleanup of the old file in the old part dir
|
||||
# same as tombstones.
|
||||
# clobber_hardlink_collisions = false
|
||||
|
@@ -395,11 +395,128 @@ class Relinker(object):
|
||||
hashes.remove(hsh)
|
||||
return hashes
|
||||
|
||||
def process_location(self, hash_path, new_hash_path):
|
||||
# Compare the contents of each hash dir with contents of same hash
|
||||
# dir in its new partition to verify that the new location has the
|
||||
# most up to date set of files. The new location may have newer
|
||||
# files if it has been updated since relinked.
|
||||
def do_relink(self, device, hash_path, new_hash_path, filename,
|
||||
already_quarantined=False):
|
||||
"""
|
||||
Attempt to relink a file from old location to new location.
|
||||
|
||||
:param device: device name
|
||||
:param hash_path: source hash directory path
|
||||
:param new_hash_path: destination hash directory path
|
||||
:param filename: filename to relink
|
||||
:param already_quarantined: whether quarantine has already been
|
||||
attempted
|
||||
:returns: tuple of (success, created) where success is True if the link
|
||||
is successfully verified, and created is True if a new link
|
||||
needed to be created for successful verification (if created
|
||||
is True for any new link in any hash_path some caller above
|
||||
us should ideally invalidate the whole suffix)
|
||||
"""
|
||||
old_file = os.path.join(hash_path, filename)
|
||||
new_file = os.path.join(new_hash_path, filename)
|
||||
success = created = False
|
||||
try:
|
||||
created = diskfile.relink_paths(old_file, new_file)
|
||||
success = True
|
||||
except FileExistsError:
|
||||
# we've detected a hardlink collision, so we need to handle it
|
||||
# depending on what kind of file it is and our mode and
|
||||
# configuration
|
||||
if filename.endswith('.ts'):
|
||||
# special case for duplicate tombstones, see:
|
||||
# https://bugs.launchpad.net/swift/+bug/1921718
|
||||
# https://bugs.launchpad.net/swift/+bug/1934142
|
||||
self.logger.debug(
|
||||
"Relinking%s: tolerating different inodes for "
|
||||
"tombstone with same timestamp: %s to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
success = True
|
||||
elif self.conf['clobber_hardlink_collisions']:
|
||||
if self.do_cleanup:
|
||||
# At this point your clients are already *in* the new part
|
||||
# dir, if the "better" data was in the old part dir you're
|
||||
# already hurting and maybe flipped back to retry the
|
||||
# relink phase again? If you're moving forward with the
|
||||
# cleanup presumably you're ready for this circus to be
|
||||
# over and doing extra io to quarantine the data you're
|
||||
# currently using and replace it with old data seems less
|
||||
# attractive than letting the un-referenced data get
|
||||
# cleaned up. But there might be a case to argue that
|
||||
# clobber_hardlink_collision should quarantine old_file
|
||||
# here before returning success.
|
||||
self.logger.debug(
|
||||
"Relinking%s: tolerating hardlink collision: "
|
||||
"%s to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
success = True
|
||||
elif already_quarantined:
|
||||
# Already attempted quarantine, this is a failure, but user
|
||||
# can retry (or already_quarantined becomes a counter?)
|
||||
# N.B. this can exit non-zero w/o logging at "error"
|
||||
self.logger.warning(
|
||||
"Relinking%s: hardlink collision persists after "
|
||||
"quarantine: %s to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
else:
|
||||
# During relink phase, quarantine and retry once
|
||||
dev_path = os.path.join(self.diskfile_mgr.devices, device)
|
||||
to_dir = diskfile.quarantine_renamer(dev_path, new_file)
|
||||
self.logger.info(
|
||||
"Relinking%s: clobbering hardlink collision: "
|
||||
"%s moved to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
new_file, to_dir)
|
||||
# retry with quarantine flag set
|
||||
return self.do_relink(
|
||||
device, hash_path, new_hash_path, filename,
|
||||
already_quarantined=True)
|
||||
else:
|
||||
self.logger.error(
|
||||
"Error relinking%s: hardlink collision: "
|
||||
"%s to %s (consider enabling clobber_hardlink_collisions)",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
except Exception as exc:
|
||||
# Depending on what kind of errors these are, it might be
|
||||
# reasonable to consider them "warnings" if we expect re-running
|
||||
# the relinker would be able to fix them (like if it's just a
|
||||
# general file-system corruption error and your auditor is still
|
||||
# running maybe it will quarantine bad paths to clear the way).
|
||||
# But AFAIK all currently known/observed error conditions are
|
||||
# enumerated above and any unknown error conditions may not be
|
||||
# fixable by simply re-running the relinker: so we log them as
|
||||
# error to match the expected non-zero return code.
|
||||
self.logger.error(
|
||||
"Error relinking%s: failed to relink %s to %s: %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file, exc)
|
||||
if created:
|
||||
self.logger.debug(
|
||||
"Relinking%s created link: %s to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
return success, created
|
||||
|
||||
def process_location(self, device, hash_path, new_hash_path):
|
||||
"""
|
||||
Handle relink of all files in a hash_dir path.
|
||||
|
||||
Compare the contents of each hash dir with contents of same hash
|
||||
dir in its new partition to verify that the new location has the
|
||||
most up to date set of files. The new location may have newer
|
||||
files if it has been updated since relinked.
|
||||
|
||||
If any new links are created the suffix will be invalidated.
|
||||
In cleanup mode, the unwanted files in the old hash_path will be
|
||||
removed as long as there are no errors.
|
||||
|
||||
:param device: device name
|
||||
:param hash_path: old hash directory path
|
||||
:param new_hash_path: new hash directory path
|
||||
"""
|
||||
self.stats['hash_dirs'] += 1
|
||||
|
||||
# Get on disk data for new and old locations, cleaning up any
|
||||
@@ -449,33 +566,15 @@ class Relinker(object):
|
||||
# no longer required. The new file will eventually be
|
||||
# cleaned up again.
|
||||
self.stats['files'] += 1
|
||||
old_file = os.path.join(hash_path, filename)
|
||||
new_file = os.path.join(new_hash_path, filename)
|
||||
try:
|
||||
if diskfile.relink_paths(old_file, new_file):
|
||||
self.logger.debug(
|
||||
"Relinking%s created link: %s to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
success, created = self.do_relink(
|
||||
device, hash_path, new_hash_path, filename)
|
||||
if success:
|
||||
if created:
|
||||
created_links += 1
|
||||
self.stats['linked'] += 1
|
||||
except OSError as exc:
|
||||
if exc.errno == errno.EEXIST and filename.endswith('.ts'):
|
||||
# special case for duplicate tombstones, see:
|
||||
# https://bugs.launchpad.net/swift/+bug/1921718
|
||||
# https://bugs.launchpad.net/swift/+bug/1934142
|
||||
self.logger.debug(
|
||||
"Relinking%s: tolerating different inodes for "
|
||||
"tombstone with same timestamp: %s to %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file)
|
||||
else:
|
||||
self.logger.warning(
|
||||
"Error relinking%s: failed to relink %s to %s: %s",
|
||||
' (cleanup)' if self.do_cleanup else '',
|
||||
old_file, new_file, exc)
|
||||
self.stats['errors'] += 1
|
||||
missing_links += 1
|
||||
else:
|
||||
self.stats['errors'] += 1
|
||||
missing_links += 1
|
||||
if created_links:
|
||||
self.linked_into_partitions.add(get_partition_from_path(
|
||||
self.conf['devices'], new_hash_path))
|
||||
@@ -503,6 +602,8 @@ class Relinker(object):
|
||||
try:
|
||||
os.remove(old_file)
|
||||
except OSError as exc:
|
||||
# N.B. if we want to allow old_file to get quarantined this
|
||||
# should probably be robust to ENOENT
|
||||
self.logger.warning('Error cleaning up %s: %r', old_file, exc)
|
||||
self.stats['errors'] += 1
|
||||
else:
|
||||
@@ -560,13 +661,13 @@ class Relinker(object):
|
||||
if self.conf['files_per_second'] > 0:
|
||||
locations = RateLimitedIterator(
|
||||
locations, self.conf['files_per_second'])
|
||||
for hash_path, device, partition in locations:
|
||||
for hash_path, device, _part_num in locations:
|
||||
# note, in cleanup step next_part_power == part_power
|
||||
new_hash_path = replace_partition_in_path(
|
||||
self.conf['devices'], hash_path, self.next_part_power)
|
||||
if new_hash_path == hash_path:
|
||||
continue
|
||||
self.process_location(hash_path, new_hash_path)
|
||||
self.process_location(device, hash_path, new_hash_path)
|
||||
|
||||
# any unmounted devices don't trigger the pre_device trigger.
|
||||
# so we'll deal with them here.
|
||||
@@ -793,6 +894,14 @@ def main(args=None):
|
||||
help='Set log file name. Ignored if using conf_file.')
|
||||
parser.add_argument('--debug', default=False, action='store_true',
|
||||
help='Enable debug mode')
|
||||
parser.add_argument('--clobber-hardlink-collisions', action='store_true',
|
||||
help='Tolerate hard link collisions when relinking'
|
||||
'object files. If the action is relink then the '
|
||||
'file in the new target part dir is quarantined '
|
||||
'and the relink is retried. If the action is '
|
||||
'cleanup then the file in the new target dir is '
|
||||
'retained and the file in the old target dir is '
|
||||
'removed. (default: false)')
|
||||
|
||||
args = parser.parse_args(args)
|
||||
hubs.use_hub(get_hub())
|
||||
@@ -835,6 +944,10 @@ def main(args=None):
|
||||
'stats_interval': non_negative_float(
|
||||
args.stats_interval or conf.get('stats_interval',
|
||||
DEFAULT_STATS_INTERVAL)),
|
||||
'clobber_hardlink_collisions': (
|
||||
args.clobber_hardlink_collisions or
|
||||
config_true_value(conf.get('clobber_hardlink_collisions',
|
||||
'false'))),
|
||||
})
|
||||
return parallel_process(
|
||||
args.action == 'cleanup', conf, logger, args.device_list)
|
||||
|
@@ -520,6 +520,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'partitions': set(),
|
||||
'recon_cache_path': '/var/cache/swift',
|
||||
'stats_interval': 300.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}
|
||||
mock_relinker.assert_called_once_with(
|
||||
exp_conf, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
@@ -566,6 +567,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'workers': 'auto',
|
||||
'recon_cache_path': '/var/cache/swift-foo',
|
||||
'stats_interval': 111.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
logger = mock_relinker.call_args[0][1]
|
||||
self.assertEqual(logging.WARNING, logger.getEffectiveLevel())
|
||||
@@ -598,6 +600,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'workers': 2,
|
||||
'recon_cache_path': '/var/cache/swift-foo',
|
||||
'stats_interval': 222.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
|
||||
with mock.patch('swift.cli.relinker.Relinker') as mock_relinker, \
|
||||
@@ -616,6 +619,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'workers': 'auto',
|
||||
'recon_cache_path': '/var/cache/swift',
|
||||
'stats_interval': 300.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
mock_logging_config.assert_called_once_with(
|
||||
format='%(message)s', level=logging.INFO, filename=None)
|
||||
@@ -643,6 +647,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'workers': 'auto',
|
||||
'recon_cache_path': '/var/cache/swift',
|
||||
'stats_interval': 300.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
# --debug is now effective
|
||||
mock_logging_config.assert_called_once_with(
|
||||
@@ -682,6 +687,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'workers': 'auto',
|
||||
'recon_cache_path': '/var/cache/swift',
|
||||
'stats_interval': 300.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
logger = mock_relinker.call_args[0][1]
|
||||
self.assertEqual(logging.WARNING, logger.getEffectiveLevel())
|
||||
@@ -717,11 +723,102 @@ class TestRelinker(unittest.TestCase):
|
||||
'workers': 'auto',
|
||||
'recon_cache_path': '/var/cache/swift',
|
||||
'stats_interval': 300.0,
|
||||
'clobber_hardlink_collisions': False,
|
||||
}, mock.ANY, ['sdx'], do_cleanup=False)
|
||||
logger = mock_relinker.call_args[0][1]
|
||||
self.assertEqual(logging.WARNING, logger.getEffectiveLevel())
|
||||
self.assertEqual('test-relinker', logger.logger.name)
|
||||
|
||||
def test_relinker_clobber_hardlink_collisions_default(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = %(swift_dir)s
|
||||
devices = /test/node
|
||||
mount_check = false
|
||||
|
||||
[object-relinker]
|
||||
log_name = test-relinker
|
||||
""" % {
|
||||
'swift_dir': self.testdir,
|
||||
}
|
||||
|
||||
conf_file = os.path.join(self.testdir, 'relinker.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(dedent(config))
|
||||
|
||||
captured_relinker_instance = None
|
||||
|
||||
def capture_relinker(instance_self):
|
||||
nonlocal captured_relinker_instance
|
||||
captured_relinker_instance = instance_self
|
||||
|
||||
with mock.patch('swift.cli.relinker.Relinker.run', capture_relinker):
|
||||
relinker.main(['relink', conf_file, '--device', 'sdx'])
|
||||
|
||||
self.assertFalse(
|
||||
captured_relinker_instance.conf['clobber_hardlink_collisions'])
|
||||
|
||||
def test_relinker_clobber_hardlink_collisions_config(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = %(swift_dir)s
|
||||
devices = /test/node
|
||||
mount_check = false
|
||||
|
||||
[object-relinker]
|
||||
log_name = test-relinker
|
||||
clobber_hardlink_collisions = true
|
||||
""" % {
|
||||
'swift_dir': self.testdir,
|
||||
}
|
||||
|
||||
conf_file = os.path.join(self.testdir, 'relinker.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(dedent(config))
|
||||
|
||||
captured_relinker_instance = None
|
||||
|
||||
def capture_relinker(instance_self):
|
||||
nonlocal captured_relinker_instance
|
||||
captured_relinker_instance = instance_self
|
||||
|
||||
with mock.patch('swift.cli.relinker.Relinker.run', capture_relinker):
|
||||
relinker.main(['relink', conf_file, '--device', 'sdx'])
|
||||
|
||||
self.assertTrue(
|
||||
captured_relinker_instance.conf['clobber_hardlink_collisions'])
|
||||
|
||||
def test_relinker_clobber_hardlink_collisions_arg(self):
|
||||
config = """
|
||||
[DEFAULT]
|
||||
swift_dir = %(swift_dir)s
|
||||
devices = /test/node
|
||||
mount_check = false
|
||||
|
||||
[object-relinker]
|
||||
log_name = test-relinker
|
||||
clobber_hardlink_collisions = false
|
||||
""" % {
|
||||
'swift_dir': self.testdir,
|
||||
}
|
||||
|
||||
conf_file = os.path.join(self.testdir, 'relinker.conf')
|
||||
with open(conf_file, 'w') as f:
|
||||
f.write(dedent(config))
|
||||
|
||||
captured_relinker_instance = None
|
||||
|
||||
def capture_relinker(instance_self):
|
||||
nonlocal captured_relinker_instance
|
||||
captured_relinker_instance = instance_self
|
||||
|
||||
with mock.patch('swift.cli.relinker.Relinker.run', capture_relinker):
|
||||
relinker.main(['relink', conf_file, '--device', 'sdx',
|
||||
'--clobber-hardlink-collisions'])
|
||||
|
||||
self.assertTrue(
|
||||
captured_relinker_instance.conf['clobber_hardlink_collisions'])
|
||||
|
||||
def test_relinker_utils_get_hub(self):
|
||||
cli_cmd = ['relink', '--device', 'sdx', '--workers', 'auto',
|
||||
'--device', '/some/device']
|
||||
@@ -880,7 +977,18 @@ class TestRelinker(unittest.TestCase):
|
||||
self.assertEqual(sorted(exp_filenames), sorted(actual_old))
|
||||
else:
|
||||
self.assertFalse(os.path.exists(self.objdir))
|
||||
self.assertEqual([], self.logger.get_lines_for_level('error'))
|
||||
if exp_ret_code == 0:
|
||||
# a successful relink should not have logged errors
|
||||
self.assertEqual([], self.logger.get_lines_for_level('error'))
|
||||
elif conflict_file_specs:
|
||||
# if a conflict was an error, we should have logged it
|
||||
error_lines = self.logger.get_lines_for_level('error')
|
||||
self.assertEqual(len(conflict_file_specs), len(error_lines))
|
||||
for err_msg in error_lines:
|
||||
self.assertTrue(err_msg.startswith("Error relinking"))
|
||||
self.assertIn("hardlink collision", err_msg)
|
||||
self.assertTrue(err_msg.endswith(
|
||||
"(consider enabling clobber_hardlink_collisions)"))
|
||||
|
||||
def _relink_test(self, old_file_specs, new_file_specs,
|
||||
exp_old_specs, exp_new_specs):
|
||||
@@ -1081,15 +1189,138 @@ class TestRelinker(unittest.TestCase):
|
||||
'--skip-mount',
|
||||
]))
|
||||
|
||||
error_lines = self.logger.get_lines_for_level('error')
|
||||
self.assertEqual([
|
||||
'Error relinking: hardlink collision: %s to %s '
|
||||
'(consider enabling clobber_hardlink_collisions)' % (
|
||||
self.objname,
|
||||
self.expected_file,
|
||||
),
|
||||
], error_lines)
|
||||
self.assertEqual([
|
||||
'1 hash dirs processed (cleanup=False) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
], self.logger.get_lines_for_level('warning'))
|
||||
|
||||
def test_relink_link_already_exists_clobber_hardlink_collisions(self):
|
||||
self.rb.prepare_increase_partition_power()
|
||||
self._save_ring()
|
||||
|
||||
# make a file where we'd expect the link to be created
|
||||
os.makedirs(self.expected_dir)
|
||||
with open(self.expected_file, 'w'):
|
||||
pass
|
||||
|
||||
# expect no error w/ --clobber-hardlink-collisions
|
||||
with self._mock_relinker():
|
||||
self.assertEqual(0, relinker.main([
|
||||
'relink',
|
||||
'--swift-dir', self.testdir,
|
||||
'--devices', self.devices,
|
||||
'--skip-mount',
|
||||
'--clobber-hardlink-collisions',
|
||||
]))
|
||||
|
||||
for level in ['error', 'warning']:
|
||||
self.assertEqual([], self.logger.get_lines_for_level(level))
|
||||
# ... just info about clobbering your hardlink collisions
|
||||
info_lines = self.logger.get_lines_for_level('info')
|
||||
clobbering_lines = [
|
||||
line for line in info_lines
|
||||
if line.startswith('Relinking: clobbering hardlink collision')]
|
||||
self.assertEqual(1, len(clobbering_lines), info_lines)
|
||||
quarantine_dir = os.path.join(
|
||||
self.devices, 'sda1', 'quarantined', 'objects')
|
||||
self.assertIn(quarantine_dir, clobbering_lines[0])
|
||||
self.assertEqual(
|
||||
'1 hash dirs processed (cleanup=False) '
|
||||
'(1 files, 1 linked, 0 removed, 0 errors)',
|
||||
info_lines[-2])
|
||||
|
||||
def test_relink_clobber_hardlink_collisions_after_quarantine(self):
|
||||
self.rb.prepare_increase_partition_power()
|
||||
self._save_ring()
|
||||
|
||||
# make a file where we'd expect the link to be created
|
||||
os.makedirs(self.expected_dir)
|
||||
with open(self.expected_file, 'w'):
|
||||
pass
|
||||
|
||||
# expect error
|
||||
with mock.patch(
|
||||
'swift.obj.diskfile.quarantine_renamer') as mock_quarantine, \
|
||||
self._mock_relinker():
|
||||
mock_quarantine.return_value = '/the/quarantine/dir'
|
||||
self.assertEqual(1, relinker.main([
|
||||
'relink',
|
||||
'--swift-dir', self.testdir,
|
||||
'--devices', self.devices,
|
||||
'--skip-mount',
|
||||
'--clobber-hardlink-collisions',
|
||||
]))
|
||||
|
||||
dev_path = os.path.join(self.devices, 'sda1')
|
||||
self.assertEqual([
|
||||
mock.call(dev_path, self.expected_file)
|
||||
], mock_quarantine.call_args_list)
|
||||
|
||||
# we still log info about clobbering hardlink collisions
|
||||
info_lines = self.logger.get_lines_for_level('info')
|
||||
clobbering_lines = [
|
||||
line for line in info_lines
|
||||
if line.startswith('Relinking: clobbering hardlink collision')]
|
||||
self.assertEqual(1, len(clobbering_lines), info_lines)
|
||||
self.assertIn('/the/quarantine/dir', clobbering_lines[0])
|
||||
|
||||
# but there was a *another* hardlink collision (!?)
|
||||
warning_lines = self.logger.get_lines_for_level('warning')
|
||||
self.assertIn('Error relinking: failed to relink %s to %s: '
|
||||
'[Errno 17] File exists'
|
||||
% (self.objname, self.expected_file),
|
||||
warning_lines[0])
|
||||
self.assertIn('1 hash dirs processed (cleanup=False) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
warning_lines)
|
||||
collision_lines = [
|
||||
line for line in warning_lines
|
||||
if line.startswith('Relinking: hardlink collision')]
|
||||
self.assertEqual(1, len(collision_lines), warning_lines)
|
||||
self.assertIn('persists after quarantine', collision_lines[0])
|
||||
|
||||
# XXX this is kind of sketch: we got exit code 1 but no *error* lines;
|
||||
# does relinker just treat errors as warnings?
|
||||
self.assertEqual([], self.logger.get_lines_for_level('error'))
|
||||
self.assertEqual(
|
||||
'1 hash dirs processed (cleanup=False) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
warning_lines[-1])
|
||||
|
||||
def test_cleanup_link_already_exists_clobber_hardlink_collisions(self):
|
||||
self.rb.prepare_increase_partition_power()
|
||||
self.rb.increase_partition_power()
|
||||
self._save_ring()
|
||||
|
||||
# make a file where we'd expect the link to be created
|
||||
os.makedirs(self.expected_dir)
|
||||
with open(self.expected_file, 'w'):
|
||||
pass
|
||||
|
||||
with self._mock_relinker():
|
||||
self.assertEqual(0, relinker.main([
|
||||
'cleanup',
|
||||
'--swift-dir', self.testdir,
|
||||
'--devices', self.devices,
|
||||
'--skip-mount',
|
||||
'--debug',
|
||||
'--clobber-hardlink-collisions',
|
||||
]))
|
||||
|
||||
for level in ['error', 'warning']:
|
||||
self.assertEqual([], self.logger.get_lines_for_level(level))
|
||||
debug_lines = self.logger.get_lines_for_level('debug')
|
||||
relinking_lines = [
|
||||
line for line in debug_lines
|
||||
if line.startswith('Relinking (cleanup):')]
|
||||
self.assertEqual(1, len(relinking_lines), debug_lines)
|
||||
self.assertIn('tolerating hardlink collision', relinking_lines[0])
|
||||
info_lines = self.logger.get_lines_for_level('info')
|
||||
self.assertEqual(
|
||||
'1 hash dirs processed (cleanup=True) '
|
||||
'(1 files, 0 linked, 1 removed, 0 errors)',
|
||||
info_lines[-2])
|
||||
|
||||
def test_relink_link_already_exists(self):
|
||||
self.rb.prepare_increase_partition_power()
|
||||
@@ -2026,6 +2257,11 @@ class TestRelinker(unittest.TestCase):
|
||||
relink_errors={'data': OSError(errno.EPERM, 'oops'),
|
||||
'meta': OSError(errno.EPERM, 'oops')}
|
||||
)
|
||||
error_lines = self.logger.get_lines_for_level('error')
|
||||
self.assertEqual(2, len(error_lines))
|
||||
for err_msg in error_lines:
|
||||
self.assertTrue(err_msg.startswith("Error relinking"))
|
||||
self.assertIn("failed to relink", err_msg)
|
||||
warning_lines = self.logger.get_lines_for_level('warning')
|
||||
self.assertIn('1 hash dirs processed (cleanup=True) '
|
||||
'(2 files, 0 linked, 0 removed, 2 errors)',
|
||||
@@ -3095,17 +3331,20 @@ class TestRelinker(unittest.TestCase):
|
||||
self.assertTrue(os.path.isfile(self.expected_file))
|
||||
with open(self.expected_file, 'r') as fd:
|
||||
self.assertEqual('same but different', fd.read())
|
||||
warning_lines = self.logger.get_lines_for_level('warning')
|
||||
self.assertEqual(2, len(warning_lines), warning_lines)
|
||||
self.assertIn('Error relinking (cleanup): failed to relink %s to %s'
|
||||
% (self.objname, self.expected_file), warning_lines[0])
|
||||
error_lines = self.logger.get_lines_for_level('error')
|
||||
self.assertEqual([
|
||||
'Error relinking (cleanup): hardlink collision: %s to %s'
|
||||
' (consider enabling clobber_hardlink_collisions)'
|
||||
% (self.objname, self.expected_file),
|
||||
], error_lines)
|
||||
# suffix should not be invalidated in new partition
|
||||
hashes_invalid = os.path.join(self.next_part_dir, 'hashes.invalid')
|
||||
self.assertFalse(os.path.exists(hashes_invalid))
|
||||
self.assertEqual('1 hash dirs processed (cleanup=True) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
warning_lines[1])
|
||||
self.assertEqual([], self.logger.get_lines_for_level('error'))
|
||||
warning_lines = self.logger.get_lines_for_level('warning')
|
||||
self.assertEqual([
|
||||
'1 hash dirs processed (cleanup=True) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
], warning_lines)
|
||||
|
||||
def test_cleanup_older_object_in_new_partition(self):
|
||||
# relink of the current object failed, but there is an older version of
|
||||
@@ -3411,7 +3650,7 @@ class TestRelinker(unittest.TestCase):
|
||||
'(1 files, 1 linked, 1 removed, 0 errors)', info_lines)
|
||||
self.assertEqual([], self.logger.get_lines_for_level('error'))
|
||||
|
||||
def test_cleanup_new_does_not_exist_and_relink_fails(self):
|
||||
def test_cleanup_new_does_not_exist_and_relink_raises_os_error(self):
|
||||
# force rehash of new partition to not happen during cleanup
|
||||
self._setup_object(lambda part: part >= 2 ** (PART_POWER - 1))
|
||||
self._common_test_cleanup()
|
||||
@@ -3422,7 +3661,8 @@ class TestRelinker(unittest.TestCase):
|
||||
# cleanup: cleanup attempts to re-create the link but fails
|
||||
os.remove(self.expected_file)
|
||||
|
||||
with mock.patch('swift.obj.diskfile.os.link', side_effect=OSError):
|
||||
with mock.patch('swift.obj.diskfile.os.link',
|
||||
side_effect=OSError('os-error!')):
|
||||
with self._mock_relinker():
|
||||
self.assertEqual(1, relinker.main([
|
||||
'cleanup',
|
||||
@@ -3432,9 +3672,49 @@ class TestRelinker(unittest.TestCase):
|
||||
]))
|
||||
self.assertFalse(os.path.isfile(self.expected_file))
|
||||
self.assertTrue(os.path.isfile(self.objname)) # old file intact
|
||||
self.assertEqual(self.logger.get_lines_for_level('warning'), [
|
||||
'Error relinking (cleanup): failed to relink %s to %s: '
|
||||
self.assertEqual(self.logger.get_lines_for_level('error'), [
|
||||
'Error relinking (cleanup): failed to relink %s to %s: os-error!'
|
||||
% (self.objname, self.expected_file),
|
||||
])
|
||||
self.assertEqual(self.logger.get_lines_for_level('warning'), [
|
||||
'1 hash dirs processed (cleanup=True) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
])
|
||||
# suffix should not be invalidated in new partition
|
||||
self.assertTrue(os.path.exists(hashes_invalid))
|
||||
with open(hashes_invalid, 'r') as fd:
|
||||
self.assertEqual('', fd.read().strip())
|
||||
# nor in the old partition
|
||||
old_hashes_invalid = os.path.join(self.part_dir, 'hashes.invalid')
|
||||
self.assertFalse(os.path.exists(old_hashes_invalid))
|
||||
|
||||
def test_cleanup_new_does_not_exist_and_relink_raises_other_error(self):
|
||||
# force rehash of new partition to not happen during cleanup
|
||||
self._setup_object(lambda part: part >= 2 ** (PART_POWER - 1))
|
||||
self._common_test_cleanup()
|
||||
# rehash during relink creates hashes.invalid...
|
||||
hashes_invalid = os.path.join(self.next_part_dir, 'hashes.invalid')
|
||||
self.assertTrue(os.path.exists(hashes_invalid))
|
||||
# Pretend the file in the new place got deleted in between relink and
|
||||
# cleanup: cleanup attempts to re-create the link but fails
|
||||
os.remove(self.expected_file)
|
||||
|
||||
with mock.patch('swift.obj.diskfile.os.link',
|
||||
side_effect=ValueError('kaboom!')):
|
||||
with self._mock_relinker():
|
||||
self.assertEqual(1, relinker.main([
|
||||
'cleanup',
|
||||
'--swift-dir', self.testdir,
|
||||
'--devices', self.devices,
|
||||
'--skip-mount',
|
||||
]))
|
||||
self.assertFalse(os.path.isfile(self.expected_file))
|
||||
self.assertTrue(os.path.isfile(self.objname)) # old file intact
|
||||
self.assertEqual(self.logger.get_lines_for_level('error'), [
|
||||
'Error relinking (cleanup): failed to relink %s to %s: kaboom!'
|
||||
% (self.objname, self.expected_file),
|
||||
])
|
||||
self.assertEqual(self.logger.get_lines_for_level('warning'), [
|
||||
'1 hash dirs processed (cleanup=True) '
|
||||
'(1 files, 0 linked, 0 removed, 1 errors)',
|
||||
])
|
||||
@@ -3445,7 +3725,6 @@ class TestRelinker(unittest.TestCase):
|
||||
# nor in the old partition
|
||||
old_hashes_invalid = os.path.join(self.part_dir, 'hashes.invalid')
|
||||
self.assertFalse(os.path.exists(old_hashes_invalid))
|
||||
self.assertEqual([], self.logger.get_lines_for_level('error'))
|
||||
|
||||
def test_cleanup_remove_fails(self):
|
||||
meta_file = utils.Timestamp(int(self.obj_ts) + 1).internal + '.meta'
|
||||
|
Reference in New Issue
Block a user