From 4b2ac606b1a4bcbb949e6feef98e1b2600c83a14 Mon Sep 17 00:00:00 2001 From: Takashi Kajinami Date: Tue, 22 Mar 2016 23:29:48 +0900 Subject: [PATCH] Pass logger instances to AccountBroker/ContainerBroker There are some processes which don't pass their logger instances when creating AccountBrokers/ContainerBrokers, which causes some error messages with a different setting from the other logs generated by the processes. This patch makes them pass logger instances, to make sure they generate all logs according to their log settings. Change-Id: I914c3a2811e1a2b7f19ad2bc9b3d042fcba63820 --- swift/account/auditor.py | 2 +- swift/account/reaper.py | 3 +- swift/common/db_replicator.py | 9 ++-- swift/container/auditor.py | 2 +- swift/container/replicator.py | 2 +- swift/container/sync.py | 2 +- test/unit/account/test_auditor.py | 3 +- test/unit/account/test_reaper.py | 2 +- test/unit/container/test_auditor.py | 2 +- test/unit/container/test_sync.py | 74 ++++++++++++++++------------- 10 files changed, 57 insertions(+), 44 deletions(-) diff --git a/swift/account/auditor.py b/swift/account/auditor.py index 2d3588bb8e..66eda75111 100644 --- a/swift/account/auditor.py +++ b/swift/account/auditor.py @@ -135,7 +135,7 @@ class AccountAuditor(Daemon): """ start_time = time.time() try: - broker = AccountBroker(path) + broker = AccountBroker(path, logger=self.logger) if not broker.is_deleted(): self.validate_per_policy_counts(broker) self.logger.increment('passes') diff --git a/swift/account/reaper.py b/swift/account/reaper.py index 24b876073f..885b0099cf 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -193,7 +193,8 @@ class AccountReaper(Daemon): elif fname.endswith('.db'): self.start_time = time() broker = \ - AccountBroker(os.path.join(hsh_path, fname)) + AccountBroker(os.path.join(hsh_path, fname), + logger=self.logger) if broker.is_status_deleted() and \ not broker.empty(): self.reap_account( diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 639f570892..a5698a6d51 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -860,7 +860,8 @@ class ReplicatorRpc(object): mkdirs(os.path.join(self.root, drive, 'tmp')) if not self._db_file_exists(db_file): return HTTPNotFound() - return getattr(self, op)(self.broker_class(db_file), args) + return getattr(self, op)( + self.broker_class(db_file, logger=self.logger), args) @contextmanager def debug_timing(self, name): @@ -970,7 +971,7 @@ class ReplicatorRpc(object): return HTTPNotFound() if not os.path.exists(old_filename): return HTTPNotFound() - broker = self.broker_class(old_filename) + broker = self.broker_class(old_filename, logger=self.logger) broker.newid(args[0]) renamer(old_filename, db_file) return HTTPNoContent() @@ -987,8 +988,8 @@ class ReplicatorRpc(object): tmp_filename = os.path.join(self.root, drive, 'tmp', args[0]) if self._abort_rsync_then_merge(db_file, tmp_filename): return HTTPNotFound() - new_broker = self.broker_class(tmp_filename) - existing_broker = self.broker_class(db_file) + new_broker = self.broker_class(tmp_filename, logger=self.logger) + existing_broker = self.broker_class(db_file, logger=self.logger) db_file = existing_broker.db_file point = -1 objects = existing_broker.get_items_since(point, 1000) diff --git a/swift/container/auditor.py b/swift/container/auditor.py index c7d3aec893..ebb9005950 100644 --- a/swift/container/auditor.py +++ b/swift/container/auditor.py @@ -111,7 +111,7 @@ class ContainerAuditor(Daemon): """ start_time = time.time() try: - broker = ContainerBroker(path) + broker = ContainerBroker(path, logger=self.logger) if not broker.is_deleted(): broker.get_info() self.logger.increment('passes') diff --git a/swift/container/replicator.py b/swift/container/replicator.py index ea18fbd962..2ffa7c3b28 100644 --- a/swift/container/replicator.py +++ b/swift/container/replicator.py @@ -370,7 +370,7 @@ class ContainerReplicatorRpc(db_replicator.ReplicatorRpc): # if the local db has started sharding since the original 'sync' # request then abort object replication now; instantiate a fresh broker # each time this check if performed so to get latest state - broker = ContainerBroker(db_file) + broker = ContainerBroker(db_file, logger=self.logger) return broker.sharding_initiated() def _post_rsync_then_merge_hook(self, existing_broker, new_broker): diff --git a/swift/container/sync.py b/swift/container/sync.py index a17c28612c..f6d5fba8ed 100644 --- a/swift/container/sync.py +++ b/swift/container/sync.py @@ -334,7 +334,7 @@ class ContainerSync(Daemon): """ broker = None try: - broker = ContainerBroker(path) + broker = ContainerBroker(path, logger=self.logger) # The path we pass to the ContainerBroker is a real path of # a container DB. If we get here, however, it means that this # path is linked from the sync_containers dir. In rare cases diff --git a/test/unit/account/test_auditor.py b/test/unit/account/test_auditor.py index 3ea361ff14..a46a34c2fc 100644 --- a/test/unit/account/test_auditor.py +++ b/test/unit/account/test_auditor.py @@ -33,10 +33,11 @@ from test.unit.account.test_backend import ( class FakeAccountBroker(object): - def __init__(self, path): + def __init__(self, path, logger): self.path = path self.db_file = path self.file = os.path.basename(path) + self.logger = logger def is_deleted(self): return False diff --git a/test/unit/account/test_reaper.py b/test/unit/account/test_reaper.py index c801ef0949..fdd1165ae1 100644 --- a/test/unit/account/test_reaper.py +++ b/test/unit/account/test_reaper.py @@ -75,7 +75,7 @@ class FakeBroker(object): class FakeAccountBroker(object): - def __init__(self, containers): + def __init__(self, containers, logger=None): self.containers = containers self.containers_yielded = [] diff --git a/test/unit/container/test_auditor.py b/test/unit/container/test_auditor.py index a4be7f8529..dab87d8c77 100644 --- a/test/unit/container/test_auditor.py +++ b/test/unit/container/test_auditor.py @@ -29,7 +29,7 @@ from test.unit.container import test_backend class FakeContainerBroker(object): - def __init__(self, path): + def __init__(self, path, logger): self.path = path self.db_file = path self.file = os.path.basename(path) diff --git a/test/unit/container/test_sync.py b/test/unit/container/test_sync.py index d3c7e468d5..88fc304ff7 100644 --- a/test/unit/container/test_sync.py +++ b/test/unit/container/test_sync.py @@ -48,7 +48,7 @@ class FakeRing(object): class FakeContainerBroker(object): def __init__(self, path, metadata=None, info=None, deleted=False, - items_since=None): + items_since=None, logger=None): self.db_file = path self.db_dir = os.path.dirname(path) self.metadata = metadata if metadata else {} @@ -189,9 +189,9 @@ class TestContainerSync(unittest.TestCase): mock.patch('swift.container.sync.sleep', fake_sleep), \ mock.patch(gen_func) as fake_generator, \ mock.patch('swift.container.sync.ContainerBroker', - lambda p: FakeContainerBroker(p, info={ + lambda p, logger: FakeContainerBroker(p, info={ 'account': 'a', 'container': 'c', - 'storage_policy_index': 0})): + 'storage_policy_index': 0}, logger=logger)): fake_generator.side_effect = [iter(['container.db']), iter(['container.db'])] cs = sync.ContainerSync({}, container_ring=FakeRing()) @@ -235,9 +235,9 @@ class TestContainerSync(unittest.TestCase): mock.patch('swift.container.sync.time', fake_time), \ mock.patch(gen_func) as fake_generator, \ mock.patch('swift.container.sync.ContainerBroker', - lambda p: FakeContainerBroker(p, info={ + lambda p, logger: FakeContainerBroker(p, info={ 'account': 'a', 'container': 'c', - 'storage_policy_index': 0})): + 'storage_policy_index': 0}, logger=logger)): fake_generator.side_effect = [iter(['container.db']), iter(['container.db'])] cs = sync.ContainerSync({}, container_ring=FakeRing()) @@ -337,9 +337,10 @@ class TestContainerSync(unittest.TestCase): self.assertEqual(['10.0.0.0'], cs._myips) orig_ContainerBroker = sync.ContainerBroker try: - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', - 'storage_policy_index': 0}) + 'storage_policy_index': 0}, + logger=logger) cs._myips = ['127.0.0.1'] # No match cs._myport = 1 # No match cs.container_sync('isa.db') @@ -370,9 +371,10 @@ class TestContainerSync(unittest.TestCase): cs = sync.ContainerSync({}, container_ring=cring) orig_ContainerBroker = sync.ContainerBroker try: - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', - 'storage_policy_index': 0}, deleted=False) + 'storage_policy_index': 0}, deleted=False, + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match # This complete match will cause the 1 container failure since the @@ -380,9 +382,10 @@ class TestContainerSync(unittest.TestCase): cs.container_sync('isa.db') self.assertEqual(cs.container_failures, 1) - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', - 'storage_policy_index': 0}, deleted=True) + 'storage_policy_index': 0}, deleted=True, + logger=logger) # This complete match will not cause any more container failures # since the broker indicates deletion cs.container_sync('isa.db') @@ -396,11 +399,12 @@ class TestContainerSync(unittest.TestCase): cs = sync.ContainerSync({}, container_ring=cring) orig_ContainerBroker = sync.ContainerBroker try: - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', 'storage_policy_index': 0, 'x_container_sync_point1': -1, - 'x_container_sync_point2': -1}) + 'x_container_sync_point2': -1}, + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match # This complete match will be skipped since the broker's metadata @@ -409,12 +413,13 @@ class TestContainerSync(unittest.TestCase): self.assertEqual(cs.container_failures, 0) self.assertEqual(cs.container_skips, 1) - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', 'storage_policy_index': 0, 'x_container_sync_point1': -1, 'x_container_sync_point2': -1}, - metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1)}) + metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1)}, + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match # This complete match will be skipped since the broker's metadata @@ -423,12 +428,13 @@ class TestContainerSync(unittest.TestCase): self.assertEqual(cs.container_failures, 0) self.assertEqual(cs.container_skips, 2) - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', 'storage_policy_index': 0, 'x_container_sync_point1': -1, 'x_container_sync_point2': -1}, - metadata={'x-container-sync-key': ('key', 1)}) + metadata={'x-container-sync-key': ('key', 1)}, + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match # This complete match will be skipped since the broker's metadata @@ -437,13 +443,14 @@ class TestContainerSync(unittest.TestCase): self.assertEqual(cs.container_failures, 0) self.assertEqual(cs.container_skips, 3) - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', 'storage_policy_index': 0, 'x_container_sync_point1': -1, 'x_container_sync_point2': -1}, metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1), - 'x-container-sync-key': ('key', 1)}) + 'x-container-sync-key': ('key', 1)}, + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = [] @@ -453,13 +460,14 @@ class TestContainerSync(unittest.TestCase): self.assertEqual(cs.container_failures, 1) self.assertEqual(cs.container_skips, 3) - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', 'storage_policy_index': 0, 'x_container_sync_point1': -1, 'x_container_sync_point2': -1}, metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1), - 'x-container-sync-key': ('key', 1)}) + 'x-container-sync-key': ('key', 1)}, + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = ['127.0.0.1'] @@ -478,14 +486,15 @@ class TestContainerSync(unittest.TestCase): orig_ContainerBroker = sync.ContainerBroker orig_time = sync.time try: - sync.ContainerBroker = lambda p: FakeContainerBroker( + sync.ContainerBroker = lambda p, logger: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', 'storage_policy_index': 0, 'x_container_sync_point1': -1, 'x_container_sync_point2': -1}, metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1), 'x-container-sync-key': ('key', 1)}, - items_since=['erroneous data']) + items_since=['erroneous data'], + logger=logger) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = ['127.0.0.1'] @@ -534,7 +543,7 @@ class TestContainerSync(unittest.TestCase): 'x-container-sync-key': ('key', 1)}, items_since=[{'ROWID': 1, 'name': 'o'}]) with mock.patch('swift.container.sync.ContainerBroker', - lambda p: fcb), \ + lambda p, logger: fcb), \ mock.patch('swift.container.sync.hash_path', fake_hash_path): cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match @@ -561,7 +570,7 @@ class TestContainerSync(unittest.TestCase): ('key', 1)}, items_since=[{'ROWID': 1, 'name': 'o'}]) with mock.patch('swift.container.sync.ContainerBroker', - lambda p: fcb), \ + lambda p, logger: fcb), \ mock.patch('swift.container.sync.hash_path', fake_hash_path): cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match @@ -582,7 +591,8 @@ class TestContainerSync(unittest.TestCase): metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1), 'x-container-sync-key': ('key', 1)}, items_since=[{'ROWID': 1, 'name': 'o'}]) - with mock.patch('swift.container.sync.ContainerBroker', lambda p: fcb): + with mock.patch('swift.container.sync.ContainerBroker', + lambda p, logger: fcb): cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = ['127.0.0.1'] @@ -607,7 +617,7 @@ class TestContainerSync(unittest.TestCase): items_since=[{'ROWID': 1, 'name': 'o', 'created_at': '1.2', 'deleted': True}]) with mock.patch('swift.container.sync.ContainerBroker', - lambda p: fcb), \ + lambda p, logger: fcb), \ mock.patch('swift.container.sync.delete_object', fake_delete_object): cs._myips = ['10.0.0.0'] # Match @@ -631,7 +641,7 @@ class TestContainerSync(unittest.TestCase): items_since=[{'ROWID': 1, 'name': 'o', 'created_at': '1.2', 'deleted': True}]) with mock.patch('swift.container.sync.ContainerBroker', - lambda p: fcb), \ + lambda p, logger: fcb), \ mock.patch('swift.container.sync.delete_object', lambda *x, **y: None): cs._myips = ['10.0.0.0'] # Match @@ -671,7 +681,7 @@ class TestContainerSync(unittest.TestCase): metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1), 'x-container-sync-key': ('key', 1)}, items_since=[{'ROWID': 1, 'name': 'o'}]) - sync.ContainerBroker = lambda p: fcb + sync.ContainerBroker = lambda p, logger: fcb cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = ['127.0.0.1'] @@ -701,7 +711,7 @@ class TestContainerSync(unittest.TestCase): metadata={'x-container-sync-to': ('http://127.0.0.1/a/c', 1), 'x-container-sync-key': ('key', 1)}, items_since=[{'ROWID': 1, 'name': 'o'}]) - sync.ContainerBroker = lambda p: fcb + sync.ContainerBroker = lambda p, logger: fcb cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = ['127.0.0.1'] @@ -723,7 +733,7 @@ class TestContainerSync(unittest.TestCase): 'x-container-sync-key': ('key', 1)}, items_since=[{'ROWID': 1, 'name': 'o', 'created_at': '1.2', 'deleted': True}]) - sync.ContainerBroker = lambda p: fcb + sync.ContainerBroker = lambda p, logger: fcb cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match cs.allowed_sync_hosts = ['127.0.0.1'] @@ -779,7 +789,7 @@ class TestContainerSync(unittest.TestCase): mock.patch('swift.container.sync.hash_path', fake_hash_path), \ mock.patch('swift.container.sync.ContainerBroker', - lambda p: fcb): + lambda p, logger: fcb): cring = FakeRing() cs = sync.ContainerSync({}, container_ring=cring, logger=self.logger)