Merge "pep8 fix: assertEquals -> assertEqual"

This commit is contained in:
Jenkins 2015-08-11 08:38:03 +00:00 committed by Gerrit Code Review
commit 1894914577
9 changed files with 766 additions and 766 deletions

View File

@ -84,8 +84,8 @@ class TestAuditor(unittest.TestCase):
def test_worker_conf_parms(self):
def check_common_defaults():
self.assertEquals(auditor_worker.max_bytes_per_second, 10000000)
self.assertEquals(auditor_worker.log_time, 3600)
self.assertEqual(auditor_worker.max_bytes_per_second, 10000000)
self.assertEqual(auditor_worker.log_time, 3600)
# test default values
conf = dict(
@ -95,9 +95,9 @@ class TestAuditor(unittest.TestCase):
auditor_worker = auditor.AuditorWorker(conf, self.logger,
self.rcache, self.devices)
check_common_defaults()
self.assertEquals(auditor_worker.diskfile_mgr.disk_chunk_size, 65536)
self.assertEquals(auditor_worker.max_files_per_second, 20)
self.assertEquals(auditor_worker.zero_byte_only_at_fps, 0)
self.assertEqual(auditor_worker.diskfile_mgr.disk_chunk_size, 65536)
self.assertEqual(auditor_worker.max_files_per_second, 20)
self.assertEqual(auditor_worker.zero_byte_only_at_fps, 0)
# test specified audit value overrides
conf.update({'disk_chunk_size': 4096})
@ -105,9 +105,9 @@ class TestAuditor(unittest.TestCase):
self.rcache, self.devices,
zero_byte_only_at_fps=50)
check_common_defaults()
self.assertEquals(auditor_worker.diskfile_mgr.disk_chunk_size, 4096)
self.assertEquals(auditor_worker.max_files_per_second, 50)
self.assertEquals(auditor_worker.zero_byte_only_at_fps, 50)
self.assertEqual(auditor_worker.diskfile_mgr.disk_chunk_size, 4096)
self.assertEqual(auditor_worker.max_files_per_second, 50)
self.assertEqual(auditor_worker.zero_byte_only_at_fps, 50)
def test_object_audit_extra_data(self):
def run_tests(disk_file):
@ -131,15 +131,15 @@ class TestAuditor(unittest.TestCase):
auditor_worker.object_audit(
AuditLocation(disk_file._datadir, 'sda', '0',
policy=POLICIES.legacy))
self.assertEquals(auditor_worker.quarantines, pre_quarantines)
self.assertEqual(auditor_worker.quarantines, pre_quarantines)
os.write(writer._fd, 'extra_data')
auditor_worker.object_audit(
AuditLocation(disk_file._datadir, 'sda', '0',
policy=POLICIES.legacy))
self.assertEquals(auditor_worker.quarantines,
pre_quarantines + 1)
self.assertEqual(auditor_worker.quarantines,
pre_quarantines + 1)
run_tests(self.disk_file)
run_tests(self.disk_file_p1)
@ -168,7 +168,7 @@ class TestAuditor(unittest.TestCase):
auditor_worker.object_audit(
AuditLocation(self.disk_file._datadir, 'sda', '0',
policy=POLICIES.legacy))
self.assertEquals(auditor_worker.quarantines, pre_quarantines)
self.assertEqual(auditor_worker.quarantines, pre_quarantines)
etag = md5()
etag.update('1' + '0' * 1023)
etag = etag.hexdigest()
@ -181,7 +181,7 @@ class TestAuditor(unittest.TestCase):
auditor_worker.object_audit(
AuditLocation(self.disk_file._datadir, 'sda', '0',
policy=POLICIES.legacy))
self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1)
self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1)
def test_object_audit_no_meta(self):
timestamp = str(normalize_timestamp(time.time()))
@ -197,7 +197,7 @@ class TestAuditor(unittest.TestCase):
auditor_worker.object_audit(
AuditLocation(self.disk_file._datadir, 'sda', '0',
policy=POLICIES.legacy))
self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1)
self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1)
def test_object_audit_will_not_swallow_errors_in_tests(self):
timestamp = str(normalize_timestamp(time.time()))
@ -232,7 +232,7 @@ class TestAuditor(unittest.TestCase):
auditor_worker.failsafe_object_audit(
AuditLocation(os.path.dirname(path), 'sda', '0',
policy=POLICIES.legacy))
self.assertEquals(auditor_worker.errors, 1)
self.assertEqual(auditor_worker.errors, 1)
def test_generic_exception_handling(self):
auditor_worker = auditor.AuditorWorker(self.conf, self.logger,
@ -256,7 +256,7 @@ class TestAuditor(unittest.TestCase):
with mock.patch('swift.obj.diskfile.DiskFileManager.diskfile_cls',
lambda *_: 1 / 0):
auditor_worker.audit_all_objects()
self.assertEquals(auditor_worker.errors, pre_errors + 1)
self.assertEqual(auditor_worker.errors, pre_errors + 1)
def test_object_run_once_pass(self):
auditor_worker = auditor.AuditorWorker(self.conf, self.logger,
@ -285,10 +285,10 @@ class TestAuditor(unittest.TestCase):
write_file(self.disk_file_p1)
auditor_worker.audit_all_objects()
self.assertEquals(auditor_worker.quarantines, pre_quarantines)
self.assertEqual(auditor_worker.quarantines, pre_quarantines)
# 1 object per policy falls into 1024 bucket
self.assertEquals(auditor_worker.stats_buckets[1024], 2)
self.assertEquals(auditor_worker.stats_buckets[10240], 0)
self.assertEqual(auditor_worker.stats_buckets[1024], 2)
self.assertEqual(auditor_worker.stats_buckets[10240], 0)
# pick up some additional code coverage, large file
data = '0' * 1024 * 1024
@ -304,22 +304,22 @@ class TestAuditor(unittest.TestCase):
}
writer.put(metadata)
auditor_worker.audit_all_objects(device_dirs=['sda', 'sdb'])
self.assertEquals(auditor_worker.quarantines, pre_quarantines)
self.assertEqual(auditor_worker.quarantines, pre_quarantines)
# still have the 1024 byte object left in policy-1 (plus the
# stats from the original 2)
self.assertEquals(auditor_worker.stats_buckets[1024], 3)
self.assertEquals(auditor_worker.stats_buckets[10240], 0)
self.assertEqual(auditor_worker.stats_buckets[1024], 3)
self.assertEqual(auditor_worker.stats_buckets[10240], 0)
# and then policy-0 disk_file was re-written as a larger object
self.assertEquals(auditor_worker.stats_buckets['OVER'], 1)
self.assertEqual(auditor_worker.stats_buckets['OVER'], 1)
# pick up even more additional code coverage, misc paths
auditor_worker.log_time = -1
auditor_worker.stats_sizes = []
auditor_worker.audit_all_objects(device_dirs=['sda', 'sdb'])
self.assertEquals(auditor_worker.quarantines, pre_quarantines)
self.assertEquals(auditor_worker.stats_buckets[1024], 3)
self.assertEquals(auditor_worker.stats_buckets[10240], 0)
self.assertEquals(auditor_worker.stats_buckets['OVER'], 1)
self.assertEqual(auditor_worker.quarantines, pre_quarantines)
self.assertEqual(auditor_worker.stats_buckets[1024], 3)
self.assertEqual(auditor_worker.stats_buckets[10240], 0)
self.assertEqual(auditor_worker.stats_buckets['OVER'], 1)
def test_object_run_logging(self):
logger = FakeLogger()
@ -360,7 +360,7 @@ class TestAuditor(unittest.TestCase):
writer.put(metadata)
os.write(writer._fd, 'extra_data')
auditor_worker.audit_all_objects()
self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1)
self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1)
def test_object_run_once_multi_devices(self):
auditor_worker = auditor.AuditorWorker(self.conf, self.logger,
@ -398,7 +398,7 @@ class TestAuditor(unittest.TestCase):
writer.put(metadata)
os.write(writer._fd, 'extra_data')
auditor_worker.audit_all_objects()
self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1)
self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1)
def test_object_run_fast_track_non_zero(self):
self.auditor = auditor.ObjectAuditor(self.conf)
@ -611,14 +611,14 @@ class TestAuditor(unittest.TestCase):
self.assertRaises(StopForever,
my_auditor.run_forever, zero_byte_fps=50)
self.assertEquals(mocker.check_kwargs['zero_byte_fps'], 50)
self.assertEquals(mocker.fork_called, 0)
self.assertEqual(mocker.check_kwargs['zero_byte_fps'], 50)
self.assertEqual(mocker.fork_called, 0)
self.assertRaises(SystemExit, my_auditor.run_once)
self.assertEquals(mocker.fork_called, 1)
self.assertEquals(mocker.check_kwargs['zero_byte_fps'], 89)
self.assertEquals(mocker.check_device_dir, [])
self.assertEquals(mocker.check_args, ())
self.assertEqual(mocker.fork_called, 1)
self.assertEqual(mocker.check_kwargs['zero_byte_fps'], 89)
self.assertEqual(mocker.check_device_dir, [])
self.assertEqual(mocker.check_args, ())
device_list = ['sd%s' % i for i in string.ascii_letters[2:10]]
device_string = ','.join(device_list)
@ -627,9 +627,9 @@ class TestAuditor(unittest.TestCase):
mocker.fork_called = 0
self.assertRaises(SystemExit, my_auditor.run_once,
devices=device_string_bogus)
self.assertEquals(mocker.fork_called, 1)
self.assertEquals(mocker.check_kwargs['zero_byte_fps'], 89)
self.assertEquals(sorted(mocker.check_device_dir), device_list)
self.assertEqual(mocker.fork_called, 1)
self.assertEqual(mocker.check_kwargs['zero_byte_fps'], 89)
self.assertEqual(sorted(mocker.check_device_dir), device_list)
mocker.master = 1
@ -638,8 +638,8 @@ class TestAuditor(unittest.TestCase):
# Fork is called 2 times since the zbf process is forked just
# once before self._sleep() is called and StopForever is raised
# Also wait is called just once before StopForever is raised
self.assertEquals(mocker.fork_called, 2)
self.assertEquals(mocker.wait_called, 1)
self.assertEqual(mocker.fork_called, 2)
self.assertEqual(mocker.wait_called, 1)
my_auditor._sleep = mocker.mock_sleep_continue
@ -650,10 +650,10 @@ class TestAuditor(unittest.TestCase):
# Fork is called no. of devices + (no. of devices)/2 + 1 times
# since zbf process is forked (no.of devices)/2 + 1 times
no_devices = len(os.listdir(self.devices))
self.assertEquals(mocker.fork_called, no_devices + no_devices / 2
+ 1)
self.assertEquals(mocker.wait_called, no_devices + no_devices / 2
+ 1)
self.assertEqual(mocker.fork_called, no_devices + no_devices / 2
+ 1)
self.assertEqual(mocker.wait_called, no_devices + no_devices / 2
+ 1)
finally:
os.fork = was_fork

View File

@ -206,28 +206,28 @@ class TestDiskFileModuleMethods(unittest.TestCase):
self.devices, qbit)
def test_get_data_dir(self):
self.assertEquals(diskfile.get_data_dir(POLICIES[0]),
diskfile.DATADIR_BASE)
self.assertEquals(diskfile.get_data_dir(POLICIES[1]),
diskfile.DATADIR_BASE + "-1")
self.assertEqual(diskfile.get_data_dir(POLICIES[0]),
diskfile.DATADIR_BASE)
self.assertEqual(diskfile.get_data_dir(POLICIES[1]),
diskfile.DATADIR_BASE + "-1")
self.assertRaises(ValueError, diskfile.get_data_dir, 'junk')
self.assertRaises(ValueError, diskfile.get_data_dir, 99)
def test_get_async_dir(self):
self.assertEquals(diskfile.get_async_dir(POLICIES[0]),
diskfile.ASYNCDIR_BASE)
self.assertEquals(diskfile.get_async_dir(POLICIES[1]),
diskfile.ASYNCDIR_BASE + "-1")
self.assertEqual(diskfile.get_async_dir(POLICIES[0]),
diskfile.ASYNCDIR_BASE)
self.assertEqual(diskfile.get_async_dir(POLICIES[1]),
diskfile.ASYNCDIR_BASE + "-1")
self.assertRaises(ValueError, diskfile.get_async_dir, 'junk')
self.assertRaises(ValueError, diskfile.get_async_dir, 99)
def test_get_tmp_dir(self):
self.assertEquals(diskfile.get_tmp_dir(POLICIES[0]),
diskfile.TMP_BASE)
self.assertEquals(diskfile.get_tmp_dir(POLICIES[1]),
diskfile.TMP_BASE + "-1")
self.assertEqual(diskfile.get_tmp_dir(POLICIES[0]),
diskfile.TMP_BASE)
self.assertEqual(diskfile.get_tmp_dir(POLICIES[1]),
diskfile.TMP_BASE + "-1")
self.assertRaises(ValueError, diskfile.get_tmp_dir, 'junk')
self.assertRaises(ValueError, diskfile.get_tmp_dir, 99)
@ -1982,7 +1982,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._get_open_disk_file(ts=self.ts().internal,
extra_metadata=orig_metadata)
with df.open():
self.assertEquals('1024', df._metadata['Content-Length'])
self.assertEqual('1024', df._metadata['Content-Length'])
# write some new metadata (fast POST, don't send orig meta, at t0+1)
df = self._simple_get_diskfile()
df.write_metadata({'X-Timestamp': self.ts().internal,
@ -1990,11 +1990,11 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile()
with df.open():
# non-fast-post updateable keys are preserved
self.assertEquals('text/garbage', df._metadata['Content-Type'])
self.assertEqual('text/garbage', df._metadata['Content-Type'])
# original fast-post updateable keys are removed
self.assertTrue('X-Object-Meta-Key1' not in df._metadata)
# new fast-post updateable keys are added
self.assertEquals('Value2', df._metadata['X-Object-Meta-Key2'])
self.assertEqual('Value2', df._metadata['X-Object-Meta-Key2'])
def test_disk_file_preserves_sysmeta(self):
# build an object with some meta (at t0)
@ -2003,7 +2003,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._get_open_disk_file(ts=self.ts().internal,
extra_metadata=orig_metadata)
with df.open():
self.assertEquals('1024', df._metadata['Content-Length'])
self.assertEqual('1024', df._metadata['Content-Length'])
# write some new metadata (fast POST, don't send orig meta, at t0+1s)
df = self._simple_get_diskfile()
df.write_metadata({'X-Timestamp': self.ts().internal,
@ -2012,9 +2012,9 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile()
with df.open():
# non-fast-post updateable keys are preserved
self.assertEquals('text/garbage', df._metadata['Content-Type'])
self.assertEqual('text/garbage', df._metadata['Content-Type'])
# original sysmeta keys are preserved
self.assertEquals('Value1', df._metadata['X-Object-Sysmeta-Key1'])
self.assertEqual('Value1', df._metadata['X-Object-Sysmeta-Key1'])
def test_disk_file_reader_iter(self):
df = self._create_test_file('1234567890')
@ -2037,9 +2037,9 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._create_test_file('1234567890')
quarantine_msgs = []
reader = df.reader(_quarantine_hook=quarantine_msgs.append)
self.assertEquals(''.join(reader.app_iter_range(0, None)),
'1234567890')
self.assertEquals(quarantine_msgs, [])
self.assertEqual(''.join(reader.app_iter_range(0, None)),
'1234567890')
self.assertEqual(quarantine_msgs, [])
df = self._simple_get_diskfile()
with df.open():
reader = df.reader()
@ -2132,7 +2132,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
'5e816ff8b8b8e9a5d355497e5d9e0301\r\n'])
value = header + ''.join(it)
self.assertEquals(quarantine_msgs, [])
self.assertEqual(quarantine_msgs, [])
parts = map(lambda p: p.get_payload(decode=True),
email.message_from_string(value).walk())[1:3]
@ -2571,7 +2571,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'}
df.write_metadata(metadata)
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), file_count + 1)
self.assertEqual(len(dl), file_count + 1)
exp_name = '%s.meta' % timestamp
self.assertTrue(exp_name in set(dl))
@ -2638,8 +2638,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
if policy.policy_type == EC_POLICY:
expected = ['%s#2.data' % timestamp,
'%s.durable' % timestamp]
self.assertEquals(len(dl), len(expected),
'Unexpected dir listing %s' % dl)
self.assertEqual(len(dl), len(expected),
'Unexpected dir listing %s' % dl)
self.assertEqual(sorted(expected), sorted(dl))
def test_write_cleanup(self):
@ -2657,8 +2657,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
if policy.policy_type == EC_POLICY:
expected = ['%s#2.data' % timestamp_2,
'%s.durable' % timestamp_2]
self.assertEquals(len(dl), len(expected),
'Unexpected dir listing %s' % dl)
self.assertEqual(len(dl), len(expected),
'Unexpected dir listing %s' % dl)
self.assertEqual(sorted(expected), sorted(dl))
def test_commit_fsync(self):
@ -2713,8 +2713,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
expected = ['%s#2.data' % timestamp.internal,
'%s.durable' % timestamp.internal]
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), len(expected),
'Unexpected dir listing %s' % dl)
self.assertEqual(len(dl), len(expected),
'Unexpected dir listing %s' % dl)
self.assertEqual(sorted(expected), sorted(dl))
def test_number_calls_to_hash_cleanup_listdir_during_create(self):
@ -2789,7 +2789,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df.delete(ts)
exp_name = '%s.ts' % ts.internal
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1)
self.assertEqual(len(dl), 1)
self.assertTrue(exp_name in set(dl),
'Expected file %s missing in %s' % (exp_name, dl))
# cleanup before next policy
@ -2801,7 +2801,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df.delete(ts)
exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1)
self.assertEqual(len(dl), 1)
self.assertTrue(exp_name in set(dl))
df = self._simple_get_diskfile()
self.assertRaises(DiskFileDeleted, df.open)
@ -2812,7 +2812,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df.delete(ts)
exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1)
self.assertEqual(len(dl), 1)
self.assertTrue(exp_name in set(dl))
# it's pickle-format, so removing the last byte is sufficient to
# corrupt it
@ -2862,7 +2862,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
for chunk in reader:
pass
# close is called at the end of the iterator
self.assertEquals(reader._fp, None)
self.assertEqual(reader._fp, None)
error_lines = df._logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
self.assertTrue('close failure' in error_lines[0])
@ -2891,7 +2891,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
try:
df.open()
except DiskFileDeleted as d:
self.assertEquals(d.timestamp, Timestamp(10).internal)
self.assertEqual(d.timestamp, Timestamp(10).internal)
else:
self.fail("Expected DiskFileDeleted exception")
@ -2907,7 +2907,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
try:
df.open()
except DiskFileDeleted as d:
self.assertEquals(d.timestamp, Timestamp(8).internal)
self.assertEqual(d.timestamp, Timestamp(8).internal)
else:
self.fail("Expected DiskFileDeleted exception")
@ -2925,8 +2925,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile()
with df.open():
self.assertTrue('X-Timestamp' in df._metadata)
self.assertEquals(df._metadata['X-Timestamp'],
Timestamp(10).internal)
self.assertEqual(df._metadata['X-Timestamp'],
Timestamp(10).internal)
self.assertTrue('deleted' not in df._metadata)
def test_ondisk_search_loop_data_meta_ts(self):
@ -2943,8 +2943,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile()
with df.open():
self.assertTrue('X-Timestamp' in df._metadata)
self.assertEquals(df._metadata['X-Timestamp'],
Timestamp(10).internal)
self.assertEqual(df._metadata['X-Timestamp'],
Timestamp(10).internal)
self.assertTrue('deleted' not in df._metadata)
def test_ondisk_search_loop_wayward_files_ignored(self):
@ -2962,8 +2962,8 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df = self._simple_get_diskfile()
with df.open():
self.assertTrue('X-Timestamp' in df._metadata)
self.assertEquals(df._metadata['X-Timestamp'],
Timestamp(10).internal)
self.assertEqual(df._metadata['X-Timestamp'],
Timestamp(10).internal)
self.assertTrue('deleted' not in df._metadata)
def test_ondisk_search_loop_listdir_error(self):
@ -3021,7 +3021,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df.delete(ts)
exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1)
self.assertEqual(len(dl), 1)
self.assertTrue(exp_name in set(dl))
df = self._simple_get_diskfile()
exc = None
@ -3053,7 +3053,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
df.delete(ts)
exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1)
self.assertEqual(len(dl), 1)
self.assertTrue(exp_name in set(dl))
df = self._simple_get_diskfile()
exc = None
@ -3086,7 +3086,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
self.fail("OSError raised when it should have been swallowed")
exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir)
self.assertEquals(len(dl), file_count + 1)
self.assertEqual(len(dl), file_count + 1)
self.assertTrue(exp_name in set(dl))
def _system_can_zero_copy(self):
@ -3908,7 +3908,7 @@ class TestSuffixHashes(unittest.TestCase):
df_mgr.hash_cleanup_listdir, path)
return
files = df_mgr.hash_cleanup_listdir('/whatever')
self.assertEquals(files, output_files)
self.assertEqual(files, output_files)
# hash_cleanup_listdir tests - behaviors

View File

@ -784,7 +784,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase):
error_lines = self.logger.get_lines_for_level('error')
self.assertEqual(len(error_lines), 1)
log_args, log_kwargs = self.logger.log_dict['error'][0]
self.assertEquals(str(log_kwargs['exc_info'][1]), 'Ow!')
self.assertEqual(str(log_kwargs['exc_info'][1]), 'Ow!')
def test_removes_zbf(self):
# After running xfs_repair, a partition directory could become a

View File

@ -314,36 +314,36 @@ class TestObjectReplicator(unittest.TestCase):
jobs_by_pol_part = {}
for job in jobs:
jobs_by_pol_part[str(int(job['policy'])) + job['partition']] = job
self.assertEquals(len(jobs_to_delete), 2)
self.assertEqual(len(jobs_to_delete), 2)
self.assertTrue('1', jobs_to_delete[0]['partition'])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['00']['nodes']], [1, 2])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['01']['nodes']],
[1, 2, 3])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['02']['nodes']], [2, 3])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['03']['nodes']], [3, 1])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['10']['nodes']], [1, 2])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['11']['nodes']],
[1, 2, 3])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['12']['nodes']], [2, 3])
self.assertEquals(
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['13']['nodes']], [3, 1])
for part in ['00', '01', '02', '03']:
for node in jobs_by_pol_part[part]['nodes']:
self.assertEquals(node['device'], 'sda')
self.assertEquals(jobs_by_pol_part[part]['path'],
os.path.join(self.objects, part[1:]))
self.assertEqual(node['device'], 'sda')
self.assertEqual(jobs_by_pol_part[part]['path'],
os.path.join(self.objects, part[1:]))
for part in ['10', '11', '12', '13']:
for node in jobs_by_pol_part[part]['nodes']:
self.assertEquals(node['device'], 'sda')
self.assertEquals(jobs_by_pol_part[part]['path'],
os.path.join(self.objects_1, part[1:]))
self.assertEqual(node['device'], 'sda')
self.assertEqual(jobs_by_pol_part[part]['path'],
os.path.join(self.objects_1, part[1:]))
@mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l)
def test_collect_jobs_multi_disk(self, mock_shuffle):
@ -373,7 +373,7 @@ class TestObjectReplicator(unittest.TestCase):
self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls)
jobs_to_delete = [j for j in jobs if j['delete']]
self.assertEquals(len(jobs_to_delete), 4)
self.assertEqual(len(jobs_to_delete), 4)
self.assertEqual([
'1', '2', # policy 0; 1 not on sda, 2 not on sdb
'1', '2', # policy 1; 1 not on sda, 2 not on sdb
@ -387,64 +387,64 @@ class TestObjectReplicator(unittest.TestCase):
str(int(job['policy'])) + job['partition'] + job['device']
] = job
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['00sda']['nodes']],
[1, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['00sdb']['nodes']],
[0, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['01sda']['nodes']],
[1, 2, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['01sdb']['nodes']],
[2, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['02sda']['nodes']],
[2, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['02sdb']['nodes']],
[2, 3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['03sda']['nodes']],
[3, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['03sdb']['nodes']],
[3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['10sda']['nodes']],
[1, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['10sdb']['nodes']],
[0, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['11sda']['nodes']],
[1, 2, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['11sdb']['nodes']],
[2, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['12sda']['nodes']],
[2, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['12sdb']['nodes']],
[2, 3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['13sda']['nodes']],
[3, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['13sdb']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sda']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdb']['nodes']],
[0, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sda']['nodes']],
[1, 2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdb']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sda']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdb']['nodes']],
[2, 3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sda']['nodes']],
[3, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdb']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sda']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdb']['nodes']],
[0, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sda']['nodes']],
[1, 2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdb']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sda']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdb']['nodes']],
[2, 3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sda']['nodes']],
[3, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdb']['nodes']],
[3, 0])
for part in ['00', '01', '02', '03']:
self.assertEquals(jobs_by_pol_part_dev[part + 'sda']['path'],
os.path.join(self.objects, part[1:]))
self.assertEquals(jobs_by_pol_part_dev[part + 'sdb']['path'],
os.path.join(objects_sdb, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sda']['path'],
os.path.join(self.objects, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdb']['path'],
os.path.join(objects_sdb, part[1:]))
for part in ['10', '11', '12', '13']:
self.assertEquals(jobs_by_pol_part_dev[part + 'sda']['path'],
os.path.join(self.objects_1, part[1:]))
self.assertEquals(jobs_by_pol_part_dev[part + 'sdb']['path'],
os.path.join(objects_1_sdb, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sda']['path'],
os.path.join(self.objects_1, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdb']['path'],
os.path.join(objects_1_sdb, part[1:]))
@mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l)
def test_collect_jobs_multi_disk_diff_ports_normal(self, mock_shuffle):
@ -480,7 +480,7 @@ class TestObjectReplicator(unittest.TestCase):
self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls)
jobs_to_delete = [j for j in jobs if j['delete']]
self.assertEquals(len(jobs_to_delete), 2)
self.assertEqual(len(jobs_to_delete), 2)
self.assertEqual([
'3', # policy 0; 3 not on sdc
'3', # policy 1; 3 not on sdc
@ -494,36 +494,36 @@ class TestObjectReplicator(unittest.TestCase):
str(int(job['policy'])) + job['partition'] + job['device']
] = job
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['00sdc']['nodes']],
[0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['01sdc']['nodes']],
[1, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['02sdc']['nodes']],
[3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['03sdc']['nodes']],
[3, 0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['10sdc']['nodes']],
[0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['11sdc']['nodes']],
[1, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['12sdc']['nodes']],
[3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['13sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdc']['nodes']],
[3, 0, 1])
for part in ['00', '01', '02', '03']:
self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_sdc, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_sdc, part[1:]))
for part in ['10', '11', '12', '13']:
self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_1_sdc, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_1_sdc, part[1:]))
@mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l)
def test_collect_jobs_multi_disk_servers_per_port(self, mock_shuffle):
@ -561,7 +561,7 @@ class TestObjectReplicator(unittest.TestCase):
self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls)
jobs_to_delete = [j for j in jobs if j['delete']]
self.assertEquals(len(jobs_to_delete), 4)
self.assertEqual(len(jobs_to_delete), 4)
self.assertEqual([
'3', '0', # policy 0; 3 not on sdc, 0 not on sdd
'3', '0', # policy 1; 3 not on sdc, 0 not on sdd
@ -575,70 +575,70 @@ class TestObjectReplicator(unittest.TestCase):
str(int(job['policy'])) + job['partition'] + job['device']
] = job
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['00sdc']['nodes']],
[0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['00sdd']['nodes']],
[0, 1, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['01sdc']['nodes']],
[1, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['01sdd']['nodes']],
[1, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['02sdc']['nodes']],
[3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['02sdd']['nodes']],
[2, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['03sdc']['nodes']],
[3, 0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['03sdd']['nodes']],
[0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['10sdc']['nodes']],
[0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['10sdd']['nodes']],
[0, 1, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['11sdc']['nodes']],
[1, 3])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['11sdd']['nodes']],
[1, 2])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['12sdc']['nodes']],
[3, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['12sdd']['nodes']],
[2, 0])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['13sdc']['nodes']],
[3, 0, 1])
self.assertEquals([node['id']
for node in jobs_by_pol_part_dev['13sdd']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdd']['nodes']],
[0, 1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdd']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdd']['nodes']],
[2, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdd']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdd']['nodes']],
[0, 1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdd']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdd']['nodes']],
[2, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdd']['nodes']],
[0, 1])
for part in ['00', '01', '02', '03']:
self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_sdc, part[1:]))
self.assertEquals(jobs_by_pol_part_dev[part + 'sdd']['path'],
os.path.join(objects_sdd, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_sdc, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdd']['path'],
os.path.join(objects_sdd, part[1:]))
for part in ['10', '11', '12', '13']:
self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_1_sdc, part[1:]))
self.assertEquals(jobs_by_pol_part_dev[part + 'sdd']['path'],
os.path.join(objects_1_sdd, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_1_sdc, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdd']['path'],
os.path.join(objects_1_sdd, part[1:]))
def test_collect_jobs_handoffs_first(self):
self.replicator.handoffs_first = True
jobs = self.replicator.collect_jobs()
self.assertTrue(jobs[0]['delete'])
self.assertEquals('1', jobs[0]['partition'])
self.assertEqual('1', jobs[0]['partition'])
def test_replicator_skips_bogus_partition_dirs(self):
# A directory in the wrong place shouldn't crash the replicator
@ -1269,7 +1269,7 @@ class TestObjectReplicator(unittest.TestCase):
self.assertFalse(process_errors)
for i, result in [('0', True), ('1', False),
('2', True), ('3', True)]:
self.assertEquals(os.access(
self.assertEqual(os.access(
os.path.join(self.objects,
i, diskfile.HASH_FILE),
os.F_OK), result)
@ -1391,15 +1391,15 @@ class TestObjectReplicator(unittest.TestCase):
self.replicator.update(job)
self.assertTrue(error in mock_logger.error.call_args[0][0])
self.assertTrue(expect in mock_logger.exception.call_args[0][0])
self.assertEquals(len(self.replicator.partition_times), 1)
self.assertEquals(mock_http.call_count, len(ring._devs) - 1)
self.assertEqual(len(self.replicator.partition_times), 1)
self.assertEqual(mock_http.call_count, len(ring._devs) - 1)
reqs = []
for node in job['nodes']:
reqs.append(mock.call(node['ip'], node['port'], node['device'],
job['partition'], 'REPLICATE', '',
headers=self.headers))
if job['partition'] == '0':
self.assertEquals(self.replicator.suffix_hash, 0)
self.assertEqual(self.replicator.suffix_hash, 0)
mock_http.assert_has_calls(reqs, any_order=True)
mock_http.reset_mock()
mock_logger.reset_mock()
@ -1411,7 +1411,7 @@ class TestObjectReplicator(unittest.TestCase):
set_default(self)
self.replicator.update(job)
self.assertTrue(error in mock_logger.error.call_args[0][0])
self.assertEquals(len(self.replicator.partition_times), 1)
self.assertEqual(len(self.replicator.partition_times), 1)
mock_logger.reset_mock()
# Check successful http_connection and exception with
@ -1422,7 +1422,7 @@ class TestObjectReplicator(unittest.TestCase):
set_default(self)
self.replicator.update(job)
self.assertTrue(expect in mock_logger.exception.call_args[0][0])
self.assertEquals(len(self.replicator.partition_times), 1)
self.assertEqual(len(self.replicator.partition_times), 1)
mock_logger.reset_mock()
# Check successful http_connection and correct
@ -1437,12 +1437,12 @@ class TestObjectReplicator(unittest.TestCase):
local_job = job.copy()
continue
self.replicator.update(job)
self.assertEquals(mock_logger.exception.call_count, 0)
self.assertEquals(mock_logger.error.call_count, 0)
self.assertEquals(len(self.replicator.partition_times), 1)
self.assertEquals(self.replicator.suffix_hash, 0)
self.assertEquals(self.replicator.suffix_sync, 0)
self.assertEquals(self.replicator.suffix_count, 0)
self.assertEqual(mock_logger.exception.call_count, 0)
self.assertEqual(mock_logger.error.call_count, 0)
self.assertEqual(len(self.replicator.partition_times), 1)
self.assertEqual(self.replicator.suffix_hash, 0)
self.assertEqual(self.replicator.suffix_sync, 0)
self.assertEqual(self.replicator.suffix_count, 0)
mock_logger.reset_mock()
# Check successful http_connect and sync for local node
@ -1458,11 +1458,11 @@ class TestObjectReplicator(unittest.TestCase):
for node in local_job['nodes']:
reqs.append(mock.call(node, local_job, ['a83']))
fake_func.assert_has_calls(reqs, any_order=True)
self.assertEquals(fake_func.call_count, 2)
self.assertEquals(self.replicator.replication_count, 1)
self.assertEquals(self.replicator.suffix_sync, 2)
self.assertEquals(self.replicator.suffix_hash, 1)
self.assertEquals(self.replicator.suffix_count, 1)
self.assertEqual(fake_func.call_count, 2)
self.assertEqual(self.replicator.replication_count, 1)
self.assertEqual(self.replicator.suffix_sync, 2)
self.assertEqual(self.replicator.suffix_hash, 1)
self.assertEqual(self.replicator.suffix_count, 1)
# Efficient Replication Case
set_default(self)
@ -1477,11 +1477,11 @@ class TestObjectReplicator(unittest.TestCase):
# The candidate nodes to replicate (i.e. dev1 and dev3)
# belong to another region
self.replicator.update(job)
self.assertEquals(fake_func.call_count, 1)
self.assertEquals(self.replicator.replication_count, 1)
self.assertEquals(self.replicator.suffix_sync, 1)
self.assertEquals(self.replicator.suffix_hash, 1)
self.assertEquals(self.replicator.suffix_count, 1)
self.assertEqual(fake_func.call_count, 1)
self.assertEqual(self.replicator.replication_count, 1)
self.assertEqual(self.replicator.suffix_sync, 1)
self.assertEqual(self.replicator.suffix_hash, 1)
self.assertEqual(self.replicator.suffix_count, 1)
mock_http.reset_mock()
mock_logger.reset_mock()

File diff suppressed because it is too large Load Diff

View File

@ -1205,7 +1205,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
self.assertEquals(len(_PUT_request), 1) # sanity
self.assertEqual(len(_PUT_request), 1) # sanity
req = _PUT_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.content_length, 1)
@ -1321,7 +1321,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
self.assertEquals(len(_PUT_request), 1) # sanity
self.assertEqual(len(_PUT_request), 1) # sanity
req = _PUT_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.content_length, 1)
@ -1378,7 +1378,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
self.assertEquals(len(_PUT_request), 1) # sanity
self.assertEqual(len(_PUT_request), 1) # sanity
req = _PUT_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.content_length, 1)
@ -1423,7 +1423,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
self.assertEquals(len(_DELETE_request), 1) # sanity
self.assertEqual(len(_DELETE_request), 1) # sanity
req = _DELETE_request[0]
self.assertEqual(req.path, '/device/partition/a/c/o')
self.assertEqual(req.headers, {
@ -1459,7 +1459,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in replication.Receiver')
self.assertEquals(len(_BONK_request), 1) # sanity
self.assertEqual(len(_BONK_request), 1) # sanity
self.assertEqual(_BONK_request[0], None)
def test_UPDATES_multiple(self):
@ -1520,7 +1520,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
self.assertEquals(len(_requests), 6) # sanity
self.assertEqual(len(_requests), 6) # sanity
req = _requests.pop(0)
self.assertEqual(req.method, 'PUT')
self.assertEqual(req.path, '/device/partition/a/c/o1')
@ -1645,7 +1645,7 @@ class TestReceiver(unittest.TestCase):
self.assertEqual(resp.status_int, 200)
self.assertFalse(self.controller.logger.exception.called)
self.assertFalse(self.controller.logger.error.called)
self.assertEquals(len(_requests), 2) # sanity
self.assertEqual(len(_requests), 2) # sanity
req = _requests.pop(0)
self.assertEqual(req.path, '/device/partition/a/c/o1')
self.assertEqual(req.content_length, 3)

View File

@ -159,7 +159,7 @@ class TestSender(BaseTestSender):
self.sender.suffixes = ['abc']
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
self.assertEqual(1, len(error_lines))
self.assertEqual('1.2.3.4:5678/sda1/9 1 second: test connect',
@ -178,7 +178,7 @@ class TestSender(BaseTestSender):
self.sender.suffixes = ['abc']
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
self.assertEqual(1, len(error_lines))
self.assertEqual('1.2.3.4:5678/sda1/9 test connect',
@ -193,7 +193,7 @@ class TestSender(BaseTestSender):
self.sender.connect = 'cause exception'
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
for line in error_lines:
self.assertTrue(line.startswith(
@ -206,7 +206,7 @@ class TestSender(BaseTestSender):
self.sender.connect = 'cause exception'
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
for line in error_lines:
self.assertTrue(line.startswith(
@ -220,7 +220,7 @@ class TestSender(BaseTestSender):
self.sender.disconnect = mock.MagicMock()
success, candidates = self.sender()
self.assertTrue(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
self.sender.connect.assert_called_once_with()
self.sender.missing_check.assert_called_once_with()
self.sender.updates.assert_called_once_with()
@ -235,7 +235,7 @@ class TestSender(BaseTestSender):
self.sender.failures = 1
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
self.sender.connect.assert_called_once_with()
self.sender.missing_check.assert_called_once_with()
self.sender.updates.assert_called_once_with()
@ -270,10 +270,10 @@ class TestSender(BaseTestSender):
}
for method_name, expected_calls in expectations.items():
mock_method = getattr(mock_conn, method_name)
self.assertEquals(expected_calls, mock_method.mock_calls,
'connection method "%s" got %r not %r' % (
method_name, mock_method.mock_calls,
expected_calls))
self.assertEqual(expected_calls, mock_method.mock_calls,
'connection method "%s" got %r not %r' % (
method_name, mock_method.mock_calls,
expected_calls))
def test_connect_handoff(self):
node = dict(replication_ip='1.2.3.4', replication_port=5678,
@ -304,10 +304,10 @@ class TestSender(BaseTestSender):
}
for method_name, expected_calls in expectations.items():
mock_method = getattr(mock_conn, method_name)
self.assertEquals(expected_calls, mock_method.mock_calls,
'connection method "%s" got %r not %r' % (
method_name, mock_method.mock_calls,
expected_calls))
self.assertEqual(expected_calls, mock_method.mock_calls,
'connection method "%s" got %r not %r' % (
method_name, mock_method.mock_calls,
expected_calls))
def test_connect_handoff_replicated(self):
node = dict(replication_ip='1.2.3.4', replication_port=5678,
@ -339,10 +339,10 @@ class TestSender(BaseTestSender):
}
for method_name, expected_calls in expectations.items():
mock_method = getattr(mock_conn, method_name)
self.assertEquals(expected_calls, mock_method.mock_calls,
'connection method "%s" got %r not %r' % (
method_name, mock_method.mock_calls,
expected_calls))
self.assertEqual(expected_calls, mock_method.mock_calls,
'connection method "%s" got %r not %r' % (
method_name, mock_method.mock_calls,
expected_calls))
def test_call(self):
def patch_sender(sender):
@ -535,7 +535,7 @@ class TestSender(BaseTestSender):
'putrequest', putrequest):
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
for line in error_lines:
self.assertTrue(line.startswith(
@ -559,7 +559,7 @@ class TestSender(BaseTestSender):
FakeBufferedHTTPConnection):
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
for line in error_lines:
self.assertTrue(line.startswith(
@ -586,7 +586,7 @@ class TestSender(BaseTestSender):
self.daemon, node, job, ['abc'])
success, candidates = self.sender()
self.assertFalse(success)
self.assertEquals(candidates, {})
self.assertEqual(candidates, {})
error_lines = self.daemon.logger.get_lines_for_level('error')
for line in error_lines:
self.assertTrue(line.startswith(

View File

@ -87,10 +87,10 @@ class TestObjectUpdater(unittest.TestCase):
'node_timeout': '5'})
self.assertTrue(hasattr(cu, 'logger'))
self.assertTrue(cu.logger is not None)
self.assertEquals(cu.devices, self.devices_dir)
self.assertEquals(cu.interval, 1)
self.assertEquals(cu.concurrency, 2)
self.assertEquals(cu.node_timeout, 5)
self.assertEqual(cu.devices, self.devices_dir)
self.assertEqual(cu.interval, 1)
self.assertEqual(cu.concurrency, 2)
self.assertEqual(cu.node_timeout, 5)
self.assertTrue(cu.get_container_ring() is not None)
@mock.patch('os.listdir')
@ -183,7 +183,7 @@ class TestObjectUpdater(unittest.TestCase):
'node_timeout': '5'})
cu.logger = mock_logger = mock.MagicMock()
cu.object_sweep(self.sda1)
self.assertEquals(mock_logger.warn.call_count, warn)
self.assertEqual(mock_logger.warn.call_count, warn)
self.assertTrue(
os.path.exists(os.path.join(self.sda1, 'not_a_dir')))
if should_skip:
@ -315,8 +315,8 @@ class TestObjectUpdater(unittest.TestCase):
out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' %
return_code)
out.flush()
self.assertEquals(inc.readline(),
'PUT /sda1/0/a/c/o HTTP/1.1\r\n')
self.assertEqual(inc.readline(),
'PUT /sda1/0/a/c/o HTTP/1.1\r\n')
headers = swob.HeaderKeyDict()
line = inc.readline()
while line and line != '\r\n':

View File

@ -66,7 +66,7 @@ class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'prova mesaĝo\n')
self.assertEqual(translated_message, 'prova mesaĝo\n')
if __name__ == "__main__":