Merge "Clean up a bunch of deprecation warnings"
This commit is contained in:
commit
477423f60a
@ -250,7 +250,7 @@ def validate_hash_conf():
|
||||
if six.PY3:
|
||||
# Use Latin1 to accept arbitrary bytes in the hash prefix/suffix
|
||||
with open(SWIFT_CONF_FILE, encoding='latin1') as swift_conf_file:
|
||||
hash_conf.readfp(swift_conf_file)
|
||||
hash_conf.read_file(swift_conf_file)
|
||||
else:
|
||||
with open(SWIFT_CONF_FILE) as swift_conf_file:
|
||||
hash_conf.readfp(swift_conf_file)
|
||||
@ -1950,7 +1950,7 @@ class StatsdClient(object):
|
||||
warnings.warn(
|
||||
'set_prefix() is deprecated; use the ``tail_prefix`` argument of '
|
||||
'the constructor when instantiating the class instead.',
|
||||
DeprecationWarning
|
||||
DeprecationWarning, stacklevel=2
|
||||
)
|
||||
self._set_prefix(tail_prefix)
|
||||
|
||||
@ -2282,8 +2282,13 @@ class LogAdapter(logging.LoggerAdapter, object):
|
||||
in the proxy-server to differentiate the Account, Container, and Object
|
||||
controllers.
|
||||
"""
|
||||
warnings.warn(
|
||||
'set_statsd_prefix() is deprecated; use the '
|
||||
'``statsd_tail_prefix`` argument to ``get_logger`` instead.',
|
||||
DeprecationWarning, stacklevel=2
|
||||
)
|
||||
if self.logger.statsd_client:
|
||||
self.logger.statsd_client.set_prefix(prefix)
|
||||
self.logger.statsd_client._set_prefix(prefix)
|
||||
|
||||
def statsd_delegate(statsd_func_name):
|
||||
"""
|
||||
@ -3159,7 +3164,10 @@ def readconf(conf_path, section_name=None, log_name=None, defaults=None,
|
||||
if hasattr(conf_path, 'readline'):
|
||||
if hasattr(conf_path, 'seek'):
|
||||
conf_path.seek(0)
|
||||
c.readfp(conf_path)
|
||||
if six.PY2:
|
||||
c.readfp(conf_path)
|
||||
else:
|
||||
c.read_file(conf_path)
|
||||
else:
|
||||
if os.path.isdir(conf_path):
|
||||
# read all configs in directory
|
||||
@ -3600,7 +3608,7 @@ def ratelimit_sleep(running_time, max_rate, incr_by=1, rate_buffer=5):
|
||||
"""
|
||||
warnings.warn(
|
||||
'ratelimit_sleep() is deprecated; use the ``EventletRateLimiter`` '
|
||||
'class instead.', DeprecationWarning
|
||||
'class instead.', DeprecationWarning, stacklevel=2
|
||||
)
|
||||
rate_limit = EventletRateLimiter(max_rate, rate_buffer=rate_buffer,
|
||||
running_time=running_time)
|
||||
|
@ -25,6 +25,7 @@ from swift import gettext_ as _
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
import time
|
||||
import warnings
|
||||
|
||||
import eventlet
|
||||
import eventlet.debug
|
||||
@ -128,7 +129,10 @@ class ConfigString(NamedConfigLoader):
|
||||
self.parser.optionxform = str # Don't lower-case keys
|
||||
# Defaults don't need interpolation (crazy PasteDeploy...)
|
||||
self.parser.defaults = lambda: dict(self.parser._defaults, **defaults)
|
||||
self.parser.readfp(self.contents)
|
||||
if six.PY2:
|
||||
self.parser.readfp(self.contents)
|
||||
else:
|
||||
self.parser.read_file(self.contents)
|
||||
|
||||
def readline(self, *args, **kwargs):
|
||||
return self.contents.readline(*args, **kwargs)
|
||||
@ -428,6 +432,9 @@ def run_server(conf, logger, sock, global_conf=None, ready_callback=None,
|
||||
}
|
||||
if ready_callback:
|
||||
ready_callback()
|
||||
# Yes, eventlet, we know -- we have to support bad clients, though
|
||||
warnings.filterwarnings(
|
||||
'ignore', message='capitalize_response_headers is disabled')
|
||||
try:
|
||||
wsgi.server(sock, app, wsgi_logger, **server_kwargs)
|
||||
except socket.error as err:
|
||||
|
@ -51,8 +51,9 @@ warnings.filterwarnings('ignore', message=(
|
||||
'Therefore, support for it is deprecated in cryptography '
|
||||
'and will be removed in a future release.'))
|
||||
|
||||
import unittest
|
||||
|
||||
if sys.version_info < (3, 2):
|
||||
import unittest
|
||||
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
|
||||
unittest.TestCase.assertRegex = unittest.TestCase.assertRegexpMatches
|
||||
|
||||
@ -132,3 +133,34 @@ def annotate_failure(msg):
|
||||
err_val = '%s Failed with %s' % (msg, err)
|
||||
err_typ = AssertionError
|
||||
reraise(err_typ, err_val, err_tb)
|
||||
|
||||
|
||||
class BaseTestCase(unittest.TestCase):
|
||||
def _assertDictContainsSubset(self, subset, dictionary, msg=None):
|
||||
"""Checks whether dictionary is a superset of subset."""
|
||||
# This is almost identical to the method in python3.4 version of
|
||||
# unitest.case.TestCase.assertDictContainsSubset, reproduced here to
|
||||
# avoid the deprecation warning in the original when using python3.
|
||||
missing = []
|
||||
mismatched = []
|
||||
for key, value in subset.items():
|
||||
if key not in dictionary:
|
||||
missing.append(key)
|
||||
elif value != dictionary[key]:
|
||||
mismatched.append('%s, expected: %s, actual: %s' %
|
||||
(safe_repr(key), safe_repr(value),
|
||||
safe_repr(dictionary[key])))
|
||||
|
||||
if not (missing or mismatched):
|
||||
return
|
||||
|
||||
standardMsg = ''
|
||||
if missing:
|
||||
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
|
||||
missing)
|
||||
if mismatched:
|
||||
if standardMsg:
|
||||
standardMsg += '; '
|
||||
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
|
||||
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
@ -72,7 +72,7 @@ GOOD_RESPONSE_V3 = {'token': {
|
||||
}}
|
||||
|
||||
|
||||
class TestResponse(requests.Response):
|
||||
class FakeResponse(requests.Response):
|
||||
"""Utility class to wrap requests.Response.
|
||||
|
||||
Class used to wrap requests.Response and provide some convenience to
|
||||
@ -81,7 +81,7 @@ class TestResponse(requests.Response):
|
||||
|
||||
def __init__(self, data):
|
||||
self._text = None
|
||||
super(TestResponse, self).__init__()
|
||||
super(FakeResponse, self).__init__()
|
||||
if isinstance(data, dict):
|
||||
self.status_code = data.get('status_code', 200)
|
||||
headers = data.get('headers')
|
||||
@ -328,7 +328,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
|
||||
'insecure': 'True', 'auth_uri': 'http://example.com'})
|
||||
|
||||
text_return_value = json.dumps(GOOD_RESPONSE_V2)
|
||||
MOCK_REQUEST.return_value = TestResponse({
|
||||
MOCK_REQUEST.return_value = FakeResponse({
|
||||
'status_code': 201,
|
||||
'text': text_return_value})
|
||||
|
||||
@ -413,7 +413,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
|
||||
'auth_uri': 'http://example.com',
|
||||
})
|
||||
|
||||
MOCK_REQUEST.return_value = TestResponse({
|
||||
MOCK_REQUEST.return_value = FakeResponse({
|
||||
'status_code': 201,
|
||||
'text': json.dumps(GOOD_RESPONSE_V2)})
|
||||
|
||||
@ -536,7 +536,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
|
||||
fake_cache_response = ({}, {'id': 'tenant_id'}, 'secret')
|
||||
cache.get.return_value = fake_cache_response
|
||||
|
||||
MOCK_REQUEST.return_value = TestResponse({
|
||||
MOCK_REQUEST.return_value = FakeResponse({
|
||||
'status_code': 201,
|
||||
'text': json.dumps(GOOD_RESPONSE_V2)})
|
||||
|
||||
@ -578,7 +578,7 @@ class S3TokenMiddlewareTestGood(S3TokenMiddlewareTestBase):
|
||||
keystone_client = MOCK_KEYSTONE.return_value
|
||||
keystone_client.ec2.get.return_value = mock.Mock(secret='secret')
|
||||
|
||||
MOCK_REQUEST.return_value = TestResponse({
|
||||
MOCK_REQUEST.return_value = FakeResponse({
|
||||
'status_code': 201,
|
||||
'text': json.dumps(GOOD_RESPONSE_V2).encode('ascii')})
|
||||
|
||||
|
@ -287,7 +287,7 @@ class FakeAccountBroker(FakeBroker):
|
||||
info = {'account': TEST_ACCOUNT_NAME}
|
||||
|
||||
|
||||
class TestReplicator(db_replicator.Replicator):
|
||||
class ConcreteReplicator(db_replicator.Replicator):
|
||||
server_type = 'container'
|
||||
ring_file = 'container.ring.gz'
|
||||
brokerclass = FakeBroker
|
||||
@ -323,11 +323,11 @@ class TestDBReplicator(unittest.TestCase):
|
||||
|
||||
def test_creation(self):
|
||||
# later config should be extended to assert more config options
|
||||
replicator = TestReplicator({'node_timeout': '3.5'})
|
||||
replicator = ConcreteReplicator({'node_timeout': '3.5'})
|
||||
self.assertEqual(replicator.node_timeout, 3.5)
|
||||
self.assertEqual(replicator.databases_per_second, 50.0)
|
||||
|
||||
replicator = TestReplicator({'databases_per_second': '0.1'})
|
||||
replicator = ConcreteReplicator({'databases_per_second': '0.1'})
|
||||
self.assertEqual(replicator.node_timeout, 10)
|
||||
self.assertEqual(replicator.databases_per_second, 0.1)
|
||||
|
||||
@ -365,7 +365,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(None, conn.sock)
|
||||
|
||||
def test_rsync_file(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
with _mock_process(-1):
|
||||
self.assertEqual(
|
||||
False,
|
||||
@ -376,7 +376,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
replicator._rsync_file('/some/file', 'remote:/some/file'))
|
||||
|
||||
def test_rsync_file_popen_args(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
with _mock_process(0) as process:
|
||||
replicator._rsync_file('/some/file', 'remote:/some_file')
|
||||
exp_args = ([
|
||||
@ -387,7 +387,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(exp_args, process.args)
|
||||
|
||||
def test_rsync_file_popen_args_whole_file_false(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
with _mock_process(0) as process:
|
||||
replicator._rsync_file('/some/file', 'remote:/some_file', False)
|
||||
exp_args = ([
|
||||
@ -398,7 +398,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(exp_args, process.args)
|
||||
|
||||
def test_rsync_file_popen_args_different_region_and_rsync_compress(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
for rsync_compress in (False, True):
|
||||
replicator.rsync_compress = rsync_compress
|
||||
for different_region in (False, True):
|
||||
@ -415,7 +415,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertFalse('--compress' in process.args[0])
|
||||
|
||||
def test_rsync_db(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator._rsync_file = lambda *args, **kwargs: True
|
||||
fake_device = {'replication_ip': '127.0.0.1', 'device': 'sda1'}
|
||||
replicator._rsync_db(FakeBroker(), fake_device, ReplHttp(), 'abcd')
|
||||
@ -425,7 +425,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
'replication_ip': '127.0.0.1', 'replication_port': '0',
|
||||
'device': 'sda1'}
|
||||
|
||||
class MyTestReplicator(TestReplicator):
|
||||
class MyTestReplicator(ConcreteReplicator):
|
||||
def __init__(self, db_file, remote_file):
|
||||
super(MyTestReplicator, self).__init__({})
|
||||
self.db_file = db_file
|
||||
@ -445,7 +445,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertTrue(replicator._rsync_file_called)
|
||||
|
||||
def test_rsync_db_rsync_file_failure(self):
|
||||
class MyTestReplicator(TestReplicator):
|
||||
class MyTestReplicator(ConcreteReplicator):
|
||||
def __init__(self):
|
||||
super(MyTestReplicator, self).__init__({})
|
||||
self._rsync_file_called = False
|
||||
@ -465,7 +465,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(True, replicator._rsync_file_called)
|
||||
|
||||
def test_rsync_db_change_after_sync(self):
|
||||
class MyTestReplicator(TestReplicator):
|
||||
class MyTestReplicator(ConcreteReplicator):
|
||||
def __init__(self, broker):
|
||||
super(MyTestReplicator, self).__init__({})
|
||||
self.broker = broker
|
||||
@ -505,7 +505,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(2, replicator._rsync_file_call_count)
|
||||
|
||||
def test_in_sync(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
self.assertEqual(replicator._in_sync(
|
||||
{'id': 'a', 'point': 0, 'max_row': 0, 'hash': 'b'},
|
||||
{'id': 'a', 'point': -1, 'max_row': 0, 'hash': 'b'},
|
||||
@ -520,8 +520,8 @@ class TestDBReplicator(unittest.TestCase):
|
||||
FakeBroker(), -1)), False)
|
||||
|
||||
def test_run_once_no_local_device_in_ring(self):
|
||||
replicator = TestReplicator({'recon_cache_path': self.recon_cache},
|
||||
logger=self.logger)
|
||||
replicator = ConcreteReplicator({'recon_cache_path': self.recon_cache},
|
||||
logger=self.logger)
|
||||
with patch('swift.common.db_replicator.whataremyips',
|
||||
return_value=['127.0.0.1']):
|
||||
replicator.run_once()
|
||||
@ -535,14 +535,15 @@ class TestDBReplicator(unittest.TestCase):
|
||||
base = 'swift.common.db_replicator.'
|
||||
with patch(base + 'whataremyips', return_value=['1.1.1.1']), \
|
||||
patch(base + 'ring', FakeRingWithNodes()):
|
||||
replicator = TestReplicator({'bind_port': 6200,
|
||||
'recon_cache_path': self.recon_cache},
|
||||
logger=self.logger)
|
||||
replicator = ConcreteReplicator({
|
||||
'bind_port': 6200,
|
||||
'recon_cache_path': self.recon_cache
|
||||
}, logger=self.logger)
|
||||
replicator.run_once()
|
||||
self.assertFalse(self.logger.get_lines_for_level('error'))
|
||||
|
||||
def test_run_once_no_ips(self):
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
self._patch(patch.object, db_replicator, 'whataremyips',
|
||||
lambda *a, **kw: [])
|
||||
|
||||
@ -558,7 +559,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
# returned by itself.
|
||||
conf = {'mount_check': 'true', 'bind_ip': '1.1.1.1',
|
||||
'bind_port': 6200}
|
||||
replicator = TestReplicator(conf, logger=self.logger)
|
||||
replicator = ConcreteReplicator(conf, logger=self.logger)
|
||||
self.assertEqual(replicator.mount_check, True)
|
||||
self.assertEqual(replicator.port, 6200)
|
||||
|
||||
@ -581,7 +582,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
def test_run_once_node_is_mounted(self):
|
||||
db_replicator.ring = FakeRingWithSingleNode()
|
||||
conf = {'mount_check': 'true', 'bind_port': 6200}
|
||||
replicator = TestReplicator(conf, logger=self.logger)
|
||||
replicator = ConcreteReplicator(conf, logger=self.logger)
|
||||
self.assertEqual(replicator.mount_check, True)
|
||||
self.assertEqual(replicator.port, 6200)
|
||||
|
||||
@ -622,25 +623,25 @@ class TestDBReplicator(unittest.TestCase):
|
||||
|
||||
def test_usync(self):
|
||||
fake_http = ReplHttp()
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890')
|
||||
|
||||
def test_usync_http_error_above_300(self):
|
||||
fake_http = ReplHttp(set_status=301)
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
self.assertFalse(
|
||||
replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890'))
|
||||
|
||||
def test_usync_http_error_below_200(self):
|
||||
fake_http = ReplHttp(set_status=101)
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
self.assertFalse(
|
||||
replicator._usync_db(0, FakeBroker(), fake_http, '12345', '67890'))
|
||||
|
||||
@mock.patch('swift.common.db_replicator.dump_recon_cache')
|
||||
@mock.patch('swift.common.db_replicator.time.time', return_value=1234.5678)
|
||||
def test_stats(self, mock_time, mock_recon_cache):
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
replicator._zero_stats()
|
||||
self.assertEqual(replicator.stats['start'], mock_time.return_value)
|
||||
replicator._report_stats()
|
||||
@ -696,7 +697,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
# verify return values from replicate_object
|
||||
db_replicator.ring = FakeRingWithNodes()
|
||||
db_path = '/path/to/file'
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
info = FakeBroker().get_replication_info()
|
||||
# make remote appear to be in sync
|
||||
rinfo = {'point': info['max_row'], 'id': 'remote_id'}
|
||||
@ -796,7 +797,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertFalse(replicator.logger.get_lines_for_level('warning'))
|
||||
|
||||
def test_replicate_object_quarantine(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
self._patch(patch.object, replicator.brokerclass, 'db_file',
|
||||
'/a/b/c/d/e/hey')
|
||||
self._patch(patch.object, replicator.brokerclass,
|
||||
@ -821,7 +822,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
replicator._replicate_object('0', 'file', 'node_id')
|
||||
|
||||
def test_replicate_object_delete_because_deleted(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
try:
|
||||
replicator.delete_db = self.stub_delete_db
|
||||
replicator.brokerclass.stub_replication_info = {
|
||||
@ -832,7 +833,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(['/path/to/file'], self.delete_db_calls)
|
||||
|
||||
def test_replicate_object_delete_because_not_shouldbehere(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator.brokerclass = FakeAccountBroker
|
||||
replicator._repl_to_node = lambda *args: True
|
||||
@ -850,7 +851,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
def test_handoff_delete(self):
|
||||
def do_test(config, repl_to_node_results, expect_delete):
|
||||
self.delete_db_calls = []
|
||||
replicator = TestReplicator(config)
|
||||
replicator = ConcreteReplicator(config)
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator.brokerclass = FakeAccountBroker
|
||||
mock_repl_to_node = mock.Mock()
|
||||
@ -893,7 +894,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
do_test(cfg, repl_results, expected_delete)
|
||||
|
||||
def test_replicate_object_delete_delegated_to_cleanup_post_replicate(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator.brokerclass = FakeAccountBroker
|
||||
replicator._repl_to_node = lambda *args: True
|
||||
@ -934,7 +935,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(2, replicator.stats['success'])
|
||||
|
||||
def test_cleanup_post_replicate(self):
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
broker = FakeBroker()
|
||||
replicator._repl_to_node = lambda *args: True
|
||||
@ -1000,7 +1001,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
replicator.logger.clear()
|
||||
|
||||
def test_replicate_object_with_exception(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator.brokerclass = FakeAccountBroker
|
||||
replicator.delete_db = self.stub_delete_db
|
||||
@ -1033,7 +1034,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(4, replicator._repl_to_node.call_count)
|
||||
|
||||
def test_replicate_object_with_exception_run_out_of_nodes(self):
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator.brokerclass = FakeAccountBroker
|
||||
replicator.delete_db = self.stub_delete_db
|
||||
@ -1044,7 +1045,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(5, replicator._repl_to_node.call_count)
|
||||
|
||||
def test_replicate_account_out_of_place(self):
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator.brokerclass = FakeAccountBroker
|
||||
replicator._repl_to_node = lambda *args: True
|
||||
@ -1060,7 +1061,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
self.assertEqual(error_msgs, [expected])
|
||||
|
||||
def test_replicate_container_out_of_place(self):
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
replicator.ring = FakeRingWithNodes().Ring('path')
|
||||
replicator._repl_to_node = lambda *args: True
|
||||
replicator.delete_db = self.stub_delete_db
|
||||
@ -1076,7 +1077,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
'be on partition 0; will replicate out and remove.'])
|
||||
|
||||
def test_replicate_container_out_of_place_no_node(self):
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
replicator.ring = FakeRingWithSingleNode().Ring('path')
|
||||
replicator._repl_to_node = lambda *args: True
|
||||
|
||||
@ -1101,7 +1102,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
|
||||
def test_replicate_object_different_region(self):
|
||||
db_replicator.ring = FakeRingWithNodes()
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator._repl_to_node = mock.Mock()
|
||||
# For node_id = 1, one replica in same region(1) and other is in a
|
||||
# different region(2). Refer: FakeRingWithNodes
|
||||
@ -1115,7 +1116,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
|
||||
def test_delete_db(self):
|
||||
db_replicator.lock_parent_directory = lock_parent_directory
|
||||
replicator = TestReplicator({}, logger=self.logger)
|
||||
replicator = ConcreteReplicator({}, logger=self.logger)
|
||||
replicator._zero_stats()
|
||||
replicator.extract_device = lambda _: 'some_device'
|
||||
|
||||
@ -1175,7 +1176,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
rmtree(temp_dir)
|
||||
|
||||
def test_extract_device(self):
|
||||
replicator = TestReplicator({'devices': '/some/root'})
|
||||
replicator = ConcreteReplicator({'devices': '/some/root'})
|
||||
self.assertEqual('some_device', replicator.extract_device(
|
||||
'/some/root/some_device/deeper/and/deeper'))
|
||||
self.assertEqual('UNKNOWN', replicator.extract_device(
|
||||
@ -1784,7 +1785,7 @@ class TestDBReplicator(unittest.TestCase):
|
||||
node = "node"
|
||||
partition = "partition"
|
||||
db_file = __file__
|
||||
replicator = TestReplicator({})
|
||||
replicator = ConcreteReplicator({})
|
||||
replicator._http_connect(node, partition, db_file)
|
||||
expected_hsh = os.path.basename(db_file).split('.', 1)[0]
|
||||
expected_hsh = expected_hsh.split('_', 1)[0]
|
||||
@ -1890,7 +1891,7 @@ class TestHandoffsOnly(unittest.TestCase):
|
||||
rmtree(self.root, ignore_errors=True)
|
||||
|
||||
def test_scary_warnings(self):
|
||||
replicator = TestReplicator({
|
||||
replicator = ConcreteReplicator({
|
||||
'handoffs_only': 'yes',
|
||||
'devices': self.root,
|
||||
'bind_port': 6201,
|
||||
@ -1914,7 +1915,7 @@ class TestHandoffsOnly(unittest.TestCase):
|
||||
'disable them.')])
|
||||
|
||||
def test_skips_primary_partitions(self):
|
||||
replicator = TestReplicator({
|
||||
replicator = ConcreteReplicator({
|
||||
'handoffs_only': 'yes',
|
||||
'devices': self.root,
|
||||
'bind_port': 6201,
|
||||
@ -1938,7 +1939,7 @@ class TestHandoffsOnly(unittest.TestCase):
|
||||
'bcbcbcbc15d3835053d568c57e2c83b5.db'), 1)])
|
||||
|
||||
def test_override_partitions(self):
|
||||
replicator = TestReplicator({
|
||||
replicator = ConcreteReplicator({
|
||||
'devices': self.root,
|
||||
'bind_port': 6201,
|
||||
'mount_check': 'no',
|
||||
@ -1961,7 +1962,7 @@ class TestHandoffsOnly(unittest.TestCase):
|
||||
'bcbcbcbc15d3835053d568c57e2c83b5.db'), 1)])
|
||||
|
||||
def test_override_devices(self):
|
||||
replicator = TestReplicator({
|
||||
replicator = ConcreteReplicator({
|
||||
'devices': self.root,
|
||||
'bind_port': 6201,
|
||||
'mount_check': 'no',
|
||||
@ -1984,7 +1985,7 @@ class TestHandoffsOnly(unittest.TestCase):
|
||||
'abababab2b5368158355e799323b498d.db'), 0)])
|
||||
|
||||
def test_override_devices_and_partitions(self):
|
||||
replicator = TestReplicator({
|
||||
replicator = ConcreteReplicator({
|
||||
'devices': self.root,
|
||||
'bind_port': 6201,
|
||||
'mount_check': 'no',
|
||||
@ -2008,7 +2009,7 @@ class TestReplToNode(unittest.TestCase):
|
||||
db_replicator.ring = FakeRing()
|
||||
self.delete_db_calls = []
|
||||
self.broker = FakeBroker()
|
||||
self.replicator = TestReplicator({'per_diff': 10})
|
||||
self.replicator = ConcreteReplicator({'per_diff': 10})
|
||||
self.fake_node = {'ip': '127.0.0.1', 'device': 'sda1', 'port': 1000}
|
||||
self.fake_info = {'id': 'a', 'point': -1, 'max_row': 20, 'hash': 'b',
|
||||
'created_at': 100, 'put_timestamp': 0,
|
||||
|
@ -74,9 +74,10 @@ class TestStoragePolicies(unittest.TestCase):
|
||||
conf_str = "\n".join(line.strip() for line in conf_str.split("\n"))
|
||||
if six.PY2:
|
||||
conf = ConfigParser()
|
||||
conf.readfp(six.StringIO(conf_str))
|
||||
else:
|
||||
conf = ConfigParser(strict=False)
|
||||
conf.readfp(six.StringIO(conf_str))
|
||||
conf.read_file(six.StringIO(conf_str))
|
||||
return conf
|
||||
|
||||
def assertRaisesWithMessage(self, exc_class, message, f, *args, **kwargs):
|
||||
|
@ -2429,9 +2429,11 @@ class TestUtils(unittest.TestCase):
|
||||
def _test_validate_hash_conf(self, sections, options, should_raise_error):
|
||||
|
||||
class FakeConfigParser(object):
|
||||
def readfp(self, fp):
|
||||
def read_file(self, fp):
|
||||
pass
|
||||
|
||||
readfp = read_file
|
||||
|
||||
def get(self, section, option):
|
||||
if section not in sections:
|
||||
raise NoSectionError('section error')
|
||||
@ -2770,54 +2772,63 @@ log_name = %(yarr)s'''
|
||||
"Expected %d < 100" % diff_from_target_ms)
|
||||
|
||||
def test_ratelimit_sleep(self):
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'ratelimit_sleep\(\) is deprecated')
|
||||
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
for i in range(100):
|
||||
running_time = utils.ratelimit_sleep(running_time, -5)
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
for i in range(100):
|
||||
running_time = utils.ratelimit_sleep(running_time, -5)
|
||||
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=1)
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=1)
|
||||
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
for i in range(100):
|
||||
running_time = utils.ratelimit_sleep(running_time, 0)
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
for i in range(100):
|
||||
running_time = utils.ratelimit_sleep(running_time, 0)
|
||||
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=1)
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=1)
|
||||
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
for i in range(50):
|
||||
running_time = utils.ratelimit_sleep(running_time, 200)
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
for i in range(50):
|
||||
running_time = utils.ratelimit_sleep(running_time, 200)
|
||||
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=250)
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=250)
|
||||
|
||||
def test_ratelimit_sleep_with_incr(self):
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'ratelimit_sleep\(\) is deprecated')
|
||||
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
vals = [5, 17, 0, 3, 11, 30,
|
||||
40, 4, 13, 2, -1] * 2 # adds up to 248
|
||||
total = 0
|
||||
for i in vals:
|
||||
running_time = utils.ratelimit_sleep(running_time,
|
||||
500, incr_by=i)
|
||||
total += i
|
||||
self.assertEqual(248, total)
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
vals = [5, 17, 0, 3, 11, 30,
|
||||
40, 4, 13, 2, -1] * 2 # adds up to 248
|
||||
total = 0
|
||||
for i in vals:
|
||||
running_time = utils.ratelimit_sleep(running_time,
|
||||
500, incr_by=i)
|
||||
total += i
|
||||
self.assertEqual(248, total)
|
||||
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=500)
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=500)
|
||||
|
||||
def test_ratelimit_sleep_with_sleep(self):
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'ratelimit_sleep\(\) is deprecated')
|
||||
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
sleeps = [0] * 7 + [.2] * 3 + [0] * 30
|
||||
for i in sleeps:
|
||||
running_time = utils.ratelimit_sleep(running_time, 40,
|
||||
rate_buffer=1)
|
||||
time.sleep(i)
|
||||
def testfunc():
|
||||
running_time = 0
|
||||
sleeps = [0] * 7 + [.2] * 3 + [0] * 30
|
||||
for i in sleeps:
|
||||
running_time = utils.ratelimit_sleep(running_time, 40,
|
||||
rate_buffer=1)
|
||||
time.sleep(i)
|
||||
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=900)
|
||||
self.verify_under_pseudo_time(testfunc, target_runtime_ms=900)
|
||||
|
||||
def test_search_tree(self):
|
||||
# file match & ext miss
|
||||
@ -5425,12 +5436,18 @@ class TestStatsdLogging(unittest.TestCase):
|
||||
# note: set_statsd_prefix is deprecated
|
||||
logger2 = utils.get_logger({'log_statsd_host': 'some.host.com'},
|
||||
'other-name', log_route='some-route')
|
||||
logger.set_statsd_prefix('some-name.more-specific')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
logger.set_statsd_prefix('some-name.more-specific')
|
||||
self.assertEqual(logger.logger.statsd_client._prefix,
|
||||
'some-name.more-specific.')
|
||||
self.assertEqual(logger2.logger.statsd_client._prefix,
|
||||
'some-name.more-specific.')
|
||||
logger.set_statsd_prefix('')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
logger.set_statsd_prefix('')
|
||||
self.assertEqual(logger.logger.statsd_client._prefix, '')
|
||||
self.assertEqual(logger2.logger.statsd_client._prefix, '')
|
||||
|
||||
@ -5452,10 +5469,16 @@ class TestStatsdLogging(unittest.TestCase):
|
||||
'tomato.sauce.some-name.more-specific.')
|
||||
|
||||
# note: set_statsd_prefix is deprecated
|
||||
logger.set_statsd_prefix('some-name.more-specific')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
logger.set_statsd_prefix('some-name.more-specific')
|
||||
self.assertEqual(logger.logger.statsd_client._prefix,
|
||||
'tomato.sauce.some-name.more-specific.')
|
||||
logger.set_statsd_prefix('')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
logger.set_statsd_prefix('')
|
||||
self.assertEqual(logger.logger.statsd_client._prefix, 'tomato.sauce.')
|
||||
self.assertEqual(logger.logger.statsd_client._host, 'another.host.com')
|
||||
self.assertEqual(logger.logger.statsd_client._port, 9876)
|
||||
@ -5491,10 +5514,10 @@ class TestStatsdLogging(unittest.TestCase):
|
||||
logger.set_statsd_prefix('some-name.more-specific')
|
||||
msgs = [str(warning.message)
|
||||
for warning in cm
|
||||
if str(warning.message).startswith('set_prefix')]
|
||||
if str(warning.message).startswith('set_statsd_prefix')]
|
||||
self.assertEqual(
|
||||
['set_prefix() is deprecated; use the ``tail_prefix`` argument of '
|
||||
'the constructor when instantiating the class instead.'],
|
||||
['set_statsd_prefix() is deprecated; use the '
|
||||
'``statsd_tail_prefix`` argument to ``get_logger`` instead.'],
|
||||
msgs)
|
||||
|
||||
def test_ipv4_or_ipv6_hostname_defaults_to_ipv4(self):
|
||||
@ -6274,7 +6297,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
||||
self.logger.update_stats, 'another.counter', 42)
|
||||
|
||||
# Each call can override the sample_rate (also, bonus prefix test)
|
||||
self.logger.set_statsd_prefix('pfx')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
self.logger.set_statsd_prefix('pfx')
|
||||
self.assertStat('pfx.some.counter:1|c|@0.972', self.logger.increment,
|
||||
'some.counter', sample_rate=0.972)
|
||||
self.assertStat('pfx.some.counter:-1|c|@0.972', self.logger.decrement,
|
||||
@ -6290,7 +6316,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
||||
sample_rate=0.972)
|
||||
|
||||
# Can override sample_rate with non-keyword arg
|
||||
self.logger.set_statsd_prefix('')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
self.logger.set_statsd_prefix('')
|
||||
self.assertStat('some.counter:1|c|@0.939', self.logger.increment,
|
||||
'some.counter', 0.939)
|
||||
self.assertStat('some.counter:-1|c|@0.939', self.logger.decrement,
|
||||
@ -6338,7 +6367,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
||||
sample_rate=0.9912)
|
||||
|
||||
# Can override sample_rate with non-keyword arg
|
||||
self.logger.set_statsd_prefix('')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
self.logger.set_statsd_prefix('')
|
||||
self.assertStat('some.counter:1|c|@0.987654', self.logger.increment,
|
||||
'some.counter', 0.987654)
|
||||
self.assertStat('some.counter:-1|c|@0.987654', self.logger.decrement,
|
||||
@ -6371,7 +6403,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
|
||||
self.assertStat('alpha.beta.pfx.another.counter:3|c',
|
||||
self.logger.update_stats, 'another.counter', 3)
|
||||
|
||||
self.logger.set_statsd_prefix('')
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings(
|
||||
'ignore', r'set_statsd_prefix\(\) is deprecated')
|
||||
self.logger.set_statsd_prefix('')
|
||||
self.assertStat('alpha.beta.some.counter:1|c|@0.9912',
|
||||
self.logger.increment, 'some.counter',
|
||||
sample_rate=0.9912)
|
||||
|
@ -2139,8 +2139,8 @@ class TestContainerBroker(unittest.TestCase):
|
||||
|
||||
iters = 100
|
||||
for i in range(iters):
|
||||
policy_index = random.randint(0, iters * 0.1)
|
||||
name = 'object-%s' % random.randint(0, iters * 0.1)
|
||||
policy_index = random.randint(0, iters // 10)
|
||||
name = 'object-%s' % random.randint(0, iters // 10)
|
||||
size = random.randint(0, iters)
|
||||
broker.put_object(name, next(ts).internal, size, 'text/plain',
|
||||
'5af83e3196bf99f440f31f2e1a6c9afe',
|
||||
|
@ -25,7 +25,7 @@ from contextlib import closing
|
||||
from gzip import GzipFile
|
||||
from tempfile import mkdtemp
|
||||
import time
|
||||
|
||||
import warnings
|
||||
|
||||
from eventlet import spawn, wsgi
|
||||
import mock
|
||||
@ -215,6 +215,10 @@ def setup_servers(the_object_server=object_server, extra_conf=None):
|
||||
logging_prosv = proxy_logging.ProxyLoggingMiddleware(
|
||||
listing_formats.ListingFilter(prosrv, {}, logger=prosrv.logger),
|
||||
conf, logger=prosrv.logger)
|
||||
# Yes, eventlet, we know -- we have to support bad clients, though
|
||||
warnings.filterwarnings(
|
||||
'ignore', module='eventlet',
|
||||
message='capitalize_response_headers is disabled')
|
||||
prospa = spawn(wsgi.server, prolis, logging_prosv, nl,
|
||||
protocol=SwiftHttpProtocol,
|
||||
capitalize_response_headers=False)
|
||||
|
@ -27,6 +27,8 @@ from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
import textwrap
|
||||
from os.path import dirname, basename
|
||||
|
||||
from test import BaseTestCase
|
||||
from test.debug_logger import debug_logger
|
||||
from test.unit import (
|
||||
DEFAULT_TEST_EC_TYPE, make_timestamp_iter, patch_policies,
|
||||
@ -111,7 +113,7 @@ class FakeRing2(object):
|
||||
return (1, nodes)
|
||||
|
||||
|
||||
class TestAuditorBase(unittest.TestCase):
|
||||
class TestAuditorBase(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
skip_if_no_xattrs()
|
||||
@ -1715,23 +1717,23 @@ class TestAuditWatchers(TestAuditorBase):
|
||||
# irrelevant; what matters is that it finds all the things.
|
||||
calls[2:5] = sorted(calls[2:5], key=lambda item: item[1]['name'])
|
||||
|
||||
self.assertDictContainsSubset({'name': '/a/c/o0',
|
||||
'X-Object-Meta-Flavor': 'banana'},
|
||||
calls[2][1])
|
||||
self._assertDictContainsSubset({'name': '/a/c/o0',
|
||||
'X-Object-Meta-Flavor': 'banana'},
|
||||
calls[2][1])
|
||||
self.assertIn('node/sda/objects/0/', calls[2][2]) # data_file_path
|
||||
self.assertTrue(calls[2][2].endswith('.data')) # data_file_path
|
||||
self.assertEqual({}, calls[2][3])
|
||||
|
||||
self.assertDictContainsSubset({'name': '/a/c/o1',
|
||||
'X-Object-Meta-Flavor': 'orange'},
|
||||
calls[3][1])
|
||||
self._assertDictContainsSubset({'name': '/a/c/o1',
|
||||
'X-Object-Meta-Flavor': 'orange'},
|
||||
calls[3][1])
|
||||
self.assertIn('node/sda/objects/0/', calls[3][2]) # data_file_path
|
||||
self.assertTrue(calls[3][2].endswith('.data')) # data_file_path
|
||||
self.assertEqual({}, calls[3][3])
|
||||
|
||||
self.assertDictContainsSubset({'name': '/a/c_ec/o',
|
||||
'X-Object-Meta-Flavor': 'peach'},
|
||||
calls[4][1])
|
||||
self._assertDictContainsSubset({'name': '/a/c_ec/o',
|
||||
'X-Object-Meta-Flavor': 'peach'},
|
||||
calls[4][1])
|
||||
self.assertIn('node/sda/objects-2/0/', calls[4][2]) # data_file_path
|
||||
self.assertTrue(calls[4][2].endswith('.data')) # data_file_path
|
||||
self.assertEqual({}, calls[4][3])
|
||||
|
@ -19,7 +19,6 @@ import six.moves.cPickle as pickle
|
||||
import os
|
||||
import errno
|
||||
import itertools
|
||||
from unittest.util import safe_repr
|
||||
import mock
|
||||
import unittest
|
||||
import email
|
||||
@ -40,6 +39,7 @@ import pyeclib.ec_iface
|
||||
|
||||
from eventlet import hubs, timeout, tpool
|
||||
from swift.obj.diskfile import MD5_OF_EMPTY_STRING, update_auditor_status
|
||||
from test import BaseTestCase
|
||||
from test.debug_logger import debug_logger
|
||||
from test.unit import (mock as unit_mock, temptree, mock_check_drive,
|
||||
patch_policies, EMPTY_ETAG, make_timestamp_iter,
|
||||
@ -1012,35 +1012,6 @@ class BaseDiskFileTestMixin(object):
|
||||
return '.'.join([
|
||||
mgr_cls.__module__, mgr_cls.__name__, manager_attribute_name])
|
||||
|
||||
def _assertDictContainsSubset(self, subset, dictionary, msg=None):
|
||||
"""Checks whether dictionary is a superset of subset."""
|
||||
# This is almost identical to the method in python3.4 version of
|
||||
# unitest.case.TestCase.assertDictContainsSubset, reproduced here to
|
||||
# avoid the deprecation warning in the original when using python3.
|
||||
missing = []
|
||||
mismatched = []
|
||||
for key, value in subset.items():
|
||||
if key not in dictionary:
|
||||
missing.append(key)
|
||||
elif value != dictionary[key]:
|
||||
mismatched.append('%s, expected: %s, actual: %s' %
|
||||
(safe_repr(key), safe_repr(value),
|
||||
safe_repr(dictionary[key])))
|
||||
|
||||
if not (missing or mismatched):
|
||||
return
|
||||
|
||||
standardMsg = ''
|
||||
if missing:
|
||||
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
|
||||
missing)
|
||||
if mismatched:
|
||||
if standardMsg:
|
||||
standardMsg += '; '
|
||||
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
|
||||
|
||||
self.fail(self._formatMessage(msg, standardMsg))
|
||||
|
||||
|
||||
class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
"""
|
||||
@ -2008,7 +1979,7 @@ class DiskFileManagerMixin(BaseDiskFileTestMixin):
|
||||
|
||||
|
||||
@patch_policies
|
||||
class TestDiskFileManager(DiskFileManagerMixin, unittest.TestCase):
|
||||
class TestDiskFileManager(DiskFileManagerMixin, BaseTestCase):
|
||||
|
||||
mgr_cls = diskfile.DiskFileManager
|
||||
|
||||
@ -2319,7 +2290,7 @@ class TestDiskFileManager(DiskFileManagerMixin, unittest.TestCase):
|
||||
|
||||
|
||||
@patch_policies(with_ec_default=True)
|
||||
class TestECDiskFileManager(DiskFileManagerMixin, unittest.TestCase):
|
||||
class TestECDiskFileManager(DiskFileManagerMixin, BaseTestCase):
|
||||
|
||||
mgr_cls = diskfile.ECDiskFileManager
|
||||
|
||||
|
@ -40,7 +40,7 @@ from eventlet.green import httplib
|
||||
|
||||
from swift import __version__ as swift_version
|
||||
from swift.common.http import is_success
|
||||
from test import listen_zero
|
||||
from test import listen_zero, BaseTestCase
|
||||
from test.debug_logger import debug_logger
|
||||
from test.unit import mocked_http_conn, \
|
||||
make_timestamp_iter, DEFAULT_TEST_EC_TYPE, skip_if_no_xattrs, \
|
||||
@ -134,7 +134,7 @@ class TestTpoolSize(unittest.TestCase):
|
||||
|
||||
|
||||
@patch_policies(test_policies)
|
||||
class TestObjectController(unittest.TestCase):
|
||||
class TestObjectController(BaseTestCase):
|
||||
"""Test swift.obj.server.ObjectController"""
|
||||
|
||||
def setUp(self):
|
||||
@ -3996,7 +3996,7 @@ class TestObjectController(unittest.TestCase):
|
||||
'X-Backend-Durable-Timestamp': ts_0.internal,
|
||||
'X-Object-Sysmeta-Ec-Frag-Index': '0',
|
||||
'X-Object-Meta-Test': 'abc'}
|
||||
self.assertDictContainsSubset(expect, resp.headers)
|
||||
self._assertDictContainsSubset(expect, resp.headers)
|
||||
self.assertEqual(backend_frags, json.loads(
|
||||
resp.headers['X-Backend-Fragments']))
|
||||
|
||||
@ -4007,7 +4007,7 @@ class TestObjectController(unittest.TestCase):
|
||||
'X-Backend-Timestamp': ts_2.internal,
|
||||
'X-Backend-Data-Timestamp': ts_2.internal,
|
||||
'X-Backend-Durable-Timestamp': ts_2.internal}
|
||||
self.assertDictContainsSubset(expect, resp.headers)
|
||||
self._assertDictContainsSubset(expect, resp.headers)
|
||||
self.assertNotIn('X-Object-Meta-Test', resp.headers)
|
||||
|
||||
# Sanity check: Request with no preferences should default to the
|
||||
@ -4067,7 +4067,7 @@ class TestObjectController(unittest.TestCase):
|
||||
'X-Backend-Data-Timestamp': ts_2.internal,
|
||||
'X-Backend-Durable-Timestamp': ts_0.internal,
|
||||
'X-Object-Sysmeta-Ec-Frag-Index': '2'}
|
||||
self.assertDictContainsSubset(expect, resp.headers)
|
||||
self._assertDictContainsSubset(expect, resp.headers)
|
||||
self.assertEqual(backend_frags, json.loads(
|
||||
resp.headers['X-Backend-Fragments']))
|
||||
self.assertNotIn('X-Object-Meta-Test', resp.headers)
|
||||
|
Loading…
Reference in New Issue
Block a user