Browse Source

Stop syncing empty suffixes list

Change-Id: I918ab4ccbf4d081b26f4117937410cdad1caf8d3
Closes-Bug: #1862645
Closes-Bug: #1886782
(cherry picked from commit 907942eb47)
changes/14/743614/1
Tim Burke 3 weeks ago
committed by Tim Burke
parent
commit
69225ada60
2 changed files with 39 additions and 1 deletions
  1. +3
    -0
      swift/obj/replicator.py
  2. +36
    -1
      test/unit/obj/test_replicator.py

+ 3
- 0
swift/obj/replicator.py View File

@@ -681,6 +681,9 @@ class ObjectReplicator(Daemon):
suffixes = [suffix for suffix in local_hash if
local_hash[suffix] !=
remote_hash.get(suffix, -1)]
if not suffixes:
stats.hashmatch += 1
continue
stats.rsync += 1
success, _junk = self.sync(node, job, suffixes)
with Timeout(self.http_timeout):


+ 36
- 1
test/unit/obj/test_replicator.py View File

@@ -1918,7 +1918,7 @@ class TestObjectReplicator(unittest.TestCase):
# Check successful http_connection and exception with
# incorrect pickle.loads(resp.read())
resp.status = 200
resp.read.return_value = 'garbage'
resp.read.return_value = b'garbage'
expect = 'Error syncing with node: %r: '
for job in jobs:
set_default(self)
@@ -1968,6 +1968,7 @@ class TestObjectReplicator(unittest.TestCase):
self.assertEqual(stats.suffix_sync, 2)
self.assertEqual(stats.suffix_hash, 1)
self.assertEqual(stats.suffix_count, 1)
self.assertEqual(stats.hashmatch, 0)

# Efficient Replication Case
set_default(self)
@@ -1988,6 +1989,7 @@ class TestObjectReplicator(unittest.TestCase):
self.assertEqual(stats.suffix_sync, 1)
self.assertEqual(stats.suffix_hash, 1)
self.assertEqual(stats.suffix_count, 1)
self.assertEqual(stats.hashmatch, 0)

mock_http.reset_mock()
self.logger.clear()
@@ -2014,6 +2016,39 @@ class TestObjectReplicator(unittest.TestCase):
'/a83', headers=self.headers))
mock_http.assert_has_calls(reqs, any_order=True)

@mock.patch('swift.obj.replicator.tpool.execute')
@mock.patch('swift.obj.replicator.http_connect', autospec=True)
@mock.patch('swift.obj.replicator._do_listdir')
def test_update_local_hash_changes_during_replication(
self, mock_do_listdir, mock_http, mock_tpool_execute):
mock_http.return_value = answer = mock.MagicMock()
answer.getresponse.return_value = resp = mock.MagicMock()
resp.status = 200
resp.read.return_value = pickle.dumps({
'a83': 'c130a2c17ed45102aada0f4eee69494ff'})

self.replicator.sync = fake_sync = \
mock.MagicMock(return_value=(True, []))
local_job = [
job for job in self.replicator.collect_jobs()
if not job['delete']
and job['partition'] == '0' and int(job['policy']) == 0
][0]

mock_tpool_execute.side_effect = [
(1, {'a83': 'ba47fd314242ec8c7efb91f5d57336e4'}),
(1, {'a83': 'c130a2c17ed45102aada0f4eee69494ff'}),
]
self.replicator.update(local_job)
self.assertEqual(fake_sync.call_count, 0)
self.assertEqual(mock_http.call_count, 2)
stats = self.replicator.total_stats
self.assertEqual(stats.attempted, 1)
self.assertEqual(stats.suffix_sync, 0)
self.assertEqual(stats.suffix_hash, 1)
self.assertEqual(stats.suffix_count, 1)
self.assertEqual(stats.hashmatch, 2)

def test_rsync_compress_different_region(self):
self.assertEqual(self.replicator.sync_method, self.replicator.rsync)
jobs = self.replicator.collect_jobs()


Loading…
Cancel
Save