From c8131c6abb17b00b043a9b95e65be819e1fd3c9d Mon Sep 17 00:00:00 2001 From: janonymous Date: Sun, 12 Jul 2015 14:48:35 +0530 Subject: [PATCH 01/70] sys.exc_type/exc_value/exc_traceback are Deprecated sys.exc_info() contains a tuple of these three. Change-Id: I530cbeb37c43da98b4924db41f6604871077bd47 --- test/unit/common/test_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index be98ed8cfb..3a927140ea 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -4228,7 +4228,7 @@ class TestThreadPool(unittest.TestCase): except ZeroDivisionError: # NB: format is (filename, line number, function name, text) tb_func = [elem[2] for elem - in traceback.extract_tb(sys.exc_traceback)] + in traceback.extract_tb(sys.exc_info()[2])] else: self.fail("Expected ZeroDivisionError") From 2289137164231d7872731c2cf3d81b86f34f01a4 Mon Sep 17 00:00:00 2001 From: John Dickinson Date: Sat, 23 May 2015 15:40:03 -0700 Subject: [PATCH 02/70] do container listing updates in another (green)thread The actual server-side changes are simple. The tests are a different matter. Many changes were needed to the object server tests to handle the now-async calls to the container server. In an effort to test this properly, some drive-by changes were made to improve tests. I tested this patch by doing zero-byte object writes to one container as fast as possible. Then I did it again while also saturating 2 of the container replica's disks. The results are linked below. https://gist.github.com/notmyname/2bb85acfd8fbc7fc312a DocImpact Change-Id: I737bd0af3f124a4ce3e0862a155e97c1f0ac3e52 --- doc/manpages/object-server.conf.5 | 2 + doc/source/deployment_guide.rst | 150 +++++++------ etc/object-server.conf-sample | 2 + swift/obj/server.py | 37 +++- test/unit/obj/test_server.py | 348 +++++++++++++++++++++--------- 5 files changed, 360 insertions(+), 179 deletions(-) diff --git a/doc/manpages/object-server.conf.5 b/doc/manpages/object-server.conf.5 index fb2297421a..518e72586e 100644 --- a/doc/manpages/object-server.conf.5 +++ b/doc/manpages/object-server.conf.5 @@ -129,6 +129,8 @@ Logging address. The default is /dev/log. Request timeout to external services. The default is 3 seconds. .IP \fBconn_timeout\fR Connection timeout to external services. The default is 0.5 seconds. +.IP \fBcontainer_update_timeout\fR +Request timeout to do a container update on an object update. The default is 1 second. .RE .PD diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index b26f3ceff1..bec3f55ecd 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -405,76 +405,86 @@ The following configuration options are available: [DEFAULT] -=================== ========== ============================================= -Option Default Description -------------------- ---------- --------------------------------------------- -swift_dir /etc/swift Swift configuration directory -devices /srv/node Parent directory of where devices are mounted -mount_check true Whether or not check if the devices are - mounted to prevent accidentally writing - to the root device -bind_ip 0.0.0.0 IP Address for server to bind to -bind_port 6000 Port for server to bind to -bind_timeout 30 Seconds to attempt bind before giving up -workers auto Override the number of pre-forked workers - that will accept connections. If set it - should be an integer, zero means no fork. If - unset, it will try to default to the number - of effective cpu cores and fallback to one. - Increasing the number of workers helps slow - filesystem operations in one request from - negatively impacting other requests, but only - the :ref:`servers_per_port - ` - option provides complete I/O isolation with - no measurable overhead. -servers_per_port 0 If each disk in each storage policy ring has - unique port numbers for its "ip" value, you - can use this setting to have each - object-server worker only service requests - for the single disk matching the port in the - ring. The value of this setting determines - how many worker processes run for each port - (disk) in the ring. If you have 24 disks - per server, and this setting is 4, then - each storage node will have 1 + (24 * 4) = - 97 total object-server processes running. - This gives complete I/O isolation, drastically - reducing the impact of slow disks on storage - node performance. The object-replicator and - object-reconstructor need to see this setting - too, so it must be in the [DEFAULT] section. - See :ref:`server-per-port-configuration`. -max_clients 1024 Maximum number of clients one worker can - process simultaneously (it will actually - accept(2) N + 1). Setting this to one (1) - will only handle one request at a time, - without accepting another request - concurrently. -disable_fallocate false Disable "fast fail" fallocate checks if the - underlying filesystem does not support it. -log_max_line_length 0 Caps the length of log lines to the - value given; no limit if set to 0, the - default. -log_custom_handlers None Comma-separated list of functions to call - to setup custom log handlers. -eventlet_debug false If true, turn on debug logging for eventlet -fallocate_reserve 0 You can set fallocate_reserve to the number of - bytes you'd like fallocate to reserve, whether - there is space for the given file size or not. - This is useful for systems that behave badly - when they completely run out of space; you can - make the services pretend they're out of space - early. -conn_timeout 0.5 Time to wait while attempting to connect to - another backend node. -node_timeout 3 Time to wait while sending each chunk of data - to another backend node. -client_timeout 60 Time to wait while receiving each chunk of - data from a client or another backend node. -network_chunk_size 65536 Size of chunks to read/write over the network -disk_chunk_size 65536 Size of chunks to read/write to disk -=================== ========== ============================================= +======================== ========== ========================================== +Option Default Description +------------------------ ---------- ------------------------------------------ +swift_dir /etc/swift Swift configuration directory +devices /srv/node Parent directory of where devices are + mounted +mount_check true Whether or not check if the devices are + mounted to prevent accidentally writing + to the root device +bind_ip 0.0.0.0 IP Address for server to bind to +bind_port 6000 Port for server to bind to +bind_timeout 30 Seconds to attempt bind before giving up +workers auto Override the number of pre-forked workers + that will accept connections. If set it + should be an integer, zero means no fork. + If unset, it will try to default to the + number of effective cpu cores and fallback + to one. Increasing the number of workers + helps slow filesystem operations in one + request from negatively impacting other + requests, but only the + :ref:`servers_per_port + ` option + provides complete I/O isolation with no + measurable overhead. +servers_per_port 0 If each disk in each storage policy ring + has unique port numbers for its "ip" + value, you can use this setting to have + each object-server worker only service + requests for the single disk matching the + port in the ring. The value of this + setting determines how many worker + processes run for each port (disk) in the + ring. If you have 24 disks per server, and + this setting is 4, then each storage node + will have 1 + (24 * 4) = 97 total + object-server processes running. This + gives complete I/O isolation, drastically + reducing the impact of slow disks on + storage node performance. The + object-replicator and object-reconstructor + need to see this setting too, so it must + be in the [DEFAULT] section. + See :ref:`server-per-port-configuration`. +max_clients 1024 Maximum number of clients one worker can + process simultaneously (it will actually + accept(2) N + 1). Setting this to one (1) + will only handle one request at a time, + without accepting another request + concurrently. +disable_fallocate false Disable "fast fail" fallocate checks if + the underlying filesystem does not support + it. +log_max_line_length 0 Caps the length of log lines to the + value given; no limit if set to 0, the + default. +log_custom_handlers None Comma-separated list of functions to call + to setup custom log handlers. +eventlet_debug false If true, turn on debug logging for + eventlet +fallocate_reserve 0 You can set fallocate_reserve to the + number of bytes you'd like fallocate to + reserve, whether there is space for the + given file size or not. This is useful for + systems that behave badly when they + completely run out of space; you can + make the services pretend they're out of + space early. +conn_timeout 0.5 Time to wait while attempting to connect + to another backend node. +node_timeout 3 Time to wait while sending each chunk of + data to another backend node. +client_timeout 60 Time to wait while receiving each chunk of + data from a client or another backend node +network_chunk_size 65536 Size of chunks to read/write over the + network +disk_chunk_size 65536 Size of chunks to read/write to disk +container_update_timeout 1 Time to wait while sending a container + update on object update. +======================== ========== ========================================== .. _object-server-options: diff --git a/etc/object-server.conf-sample b/etc/object-server.conf-sample index b36ec29aa6..31bd160a3e 100644 --- a/etc/object-server.conf-sample +++ b/etc/object-server.conf-sample @@ -60,6 +60,8 @@ bind_port = 6000 # conn_timeout = 0.5 # Time to wait while sending each chunk of data to another backend node. # node_timeout = 3 +# Time to wait while sending a container update on object update. +# container_update_timeout = 1.0 # Time to wait while receiving each chunk of data from a client or another # backend node. # client_timeout = 60 diff --git a/swift/obj/server.py b/swift/obj/server.py index 85c85544e4..fbe534ac60 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -28,6 +28,7 @@ from swift import gettext_ as _ from hashlib import md5 from eventlet import sleep, wsgi, Timeout +from eventlet.greenthread import spawn from swift.common.utils import public, get_logger, \ config_true_value, timing_stats, replication, \ @@ -108,7 +109,9 @@ class ObjectController(BaseStorageServer): """ super(ObjectController, self).__init__(conf) self.logger = logger or get_logger(conf, log_route='object-server') - self.node_timeout = int(conf.get('node_timeout', 3)) + self.node_timeout = float(conf.get('node_timeout', 3)) + self.container_update_timeout = float( + conf.get('container_update_timeout', 1)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) self.client_timeout = int(conf.get('client_timeout', 60)) self.disk_chunk_size = int(conf.get('disk_chunk_size', 65536)) @@ -198,7 +201,8 @@ class ObjectController(BaseStorageServer): device, partition, account, container, obj, policy, **kwargs) def async_update(self, op, account, container, obj, host, partition, - contdevice, headers_out, objdevice, policy): + contdevice, headers_out, objdevice, policy, + logger_thread_locals=None): """ Sends or saves an async update. @@ -213,7 +217,12 @@ class ObjectController(BaseStorageServer): request :param objdevice: device name that the object is in :param policy: the associated BaseStoragePolicy instance + :param logger_thread_locals: The thread local values to be set on the + self.logger to retain transaction + logging information. """ + if logger_thread_locals: + self.logger.thread_locals = logger_thread_locals headers_out['user-agent'] = 'object-server %s' % os.getpid() full_path = '/%s/%s/%s' % (account, container, obj) if all([host, partition, contdevice]): @@ -285,10 +294,28 @@ class ObjectController(BaseStorageServer): headers_out['x-trans-id'] = headers_in.get('x-trans-id', '-') headers_out['referer'] = request.as_referer() headers_out['X-Backend-Storage-Policy-Index'] = int(policy) + update_greenthreads = [] for conthost, contdevice in updates: - self.async_update(op, account, container, obj, conthost, - contpartition, contdevice, headers_out, - objdevice, policy) + gt = spawn(self.async_update, op, account, container, obj, + conthost, contpartition, contdevice, headers_out, + objdevice, policy, + logger_thread_locals=self.logger.thread_locals) + update_greenthreads.append(gt) + # Wait a little bit to see if the container updates are successful. + # If we immediately return after firing off the greenthread above, then + # we're more likely to confuse the end-user who does a listing right + # after getting a successful response to the object create. The + # `container_update_timeout` bounds the length of time we wait so that + # one slow container server doesn't make the entire request lag. + try: + with Timeout(self.container_update_timeout): + for gt in update_greenthreads: + gt.wait() + except Timeout: + # updates didn't go through, log it and return + self.logger.debug( + 'Container update timeout (%.4fs) waiting for %s', + self.container_update_timeout, updates) def delete_at_update(self, op, delete_at, account, container, obj, request, objdevice, policy): diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index bff913cb57..1e7a303ea4 100755 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -33,8 +33,9 @@ from tempfile import mkdtemp from hashlib import md5 import itertools import tempfile +from contextlib import contextmanager -from eventlet import sleep, spawn, wsgi, listen, Timeout, tpool +from eventlet import sleep, spawn, wsgi, listen, Timeout, tpool, greenthread from eventlet.green import httplib from nose import SkipTest @@ -67,6 +68,35 @@ test_policies = [ ] +@contextmanager +def fake_spawn(): + """ + Spawn and capture the result so we can later wait on it. This means we can + test code executing in a greenthread but still wait() on the result to + ensure that the method has completed. + """ + + orig = object_server.spawn + greenlets = [] + + def _inner_fake_spawn(func, *a, **kw): + gt = greenthread.spawn(func, *a, **kw) + greenlets.append(gt) + return gt + + object_server.spawn = _inner_fake_spawn + + try: + yield + finally: + for gt in greenlets: + try: + gt.wait() + except: # noqa + pass # real spawn won't do anything but pollute logs + object_server.spawn = orig + + @patch_policies(test_policies) class TestObjectController(unittest.TestCase): """Test swift.obj.server.ObjectController""" @@ -371,55 +401,54 @@ class TestObjectController(unittest.TestCase): return lambda *args, **kwargs: FakeConn(response, with_exc) - old_http_connect = object_server.http_connect - try: - ts = time() - timestamp = normalize_timestamp(ts) - req = Request.blank( - '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Timestamp': timestamp, - 'Content-Type': 'text/plain', - 'Content-Length': '0'}) + ts = time() + timestamp = normalize_timestamp(ts) + req = Request.blank( + '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': timestamp, + 'Content-Type': 'text/plain', + 'Content-Length': '0'}) + resp = req.get_response(self.object_controller) + self.assertEquals(resp.status_int, 201) + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Timestamp': normalize_timestamp(ts + 1), + 'X-Container-Host': '1.2.3.4:0', + 'X-Container-Partition': '3', + 'X-Container-Device': 'sda1', + 'X-Container-Timestamp': '1', + 'Content-Type': 'application/new1'}) + with mock.patch.object(object_server, 'http_connect', + mock_http_connect(202)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - req = Request.blank( - '/sda1/p/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'X-Timestamp': normalize_timestamp(ts + 1), - 'X-Container-Host': '1.2.3.4:0', - 'X-Container-Partition': '3', - 'X-Container-Device': 'sda1', - 'X-Container-Timestamp': '1', - 'Content-Type': 'application/new1'}) - object_server.http_connect = mock_http_connect(202) + self.assertEquals(resp.status_int, 202) + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Timestamp': normalize_timestamp(ts + 2), + 'X-Container-Host': '1.2.3.4:0', + 'X-Container-Partition': '3', + 'X-Container-Device': 'sda1', + 'X-Container-Timestamp': '1', + 'Content-Type': 'application/new1'}) + with mock.patch.object(object_server, 'http_connect', + mock_http_connect(202, with_exc=True)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) - req = Request.blank( - '/sda1/p/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'X-Timestamp': normalize_timestamp(ts + 2), - 'X-Container-Host': '1.2.3.4:0', - 'X-Container-Partition': '3', - 'X-Container-Device': 'sda1', - 'X-Container-Timestamp': '1', - 'Content-Type': 'application/new1'}) - object_server.http_connect = mock_http_connect(202, with_exc=True) + self.assertEquals(resp.status_int, 202) + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'POST'}, + headers={'X-Timestamp': normalize_timestamp(ts + 3), + 'X-Container-Host': '1.2.3.4:0', + 'X-Container-Partition': '3', + 'X-Container-Device': 'sda1', + 'X-Container-Timestamp': '1', + 'Content-Type': 'application/new2'}) + with mock.patch.object(object_server, 'http_connect', + mock_http_connect(500)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) - req = Request.blank( - '/sda1/p/a/c/o', - environ={'REQUEST_METHOD': 'POST'}, - headers={'X-Timestamp': normalize_timestamp(ts + 3), - 'X-Container-Host': '1.2.3.4:0', - 'X-Container-Partition': '3', - 'X-Container-Device': 'sda1', - 'X-Container-Timestamp': '1', - 'Content-Type': 'application/new2'}) - object_server.http_connect = mock_http_connect(500) - resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) - finally: - object_server.http_connect = old_http_connect + self.assertEquals(resp.status_int, 202) def test_POST_quarantine_zbyte(self): timestamp = normalize_timestamp(time()) @@ -1219,52 +1248,54 @@ class TestObjectController(unittest.TestCase): return lambda *args, **kwargs: FakeConn(response, with_exc) - old_http_connect = object_server.http_connect - try: - timestamp = normalize_timestamp(time()) - req = Request.blank( - '/sda1/p/a/c/o', - environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Timestamp': timestamp, - 'X-Container-Host': '1.2.3.4:0', - 'X-Container-Partition': '3', - 'X-Container-Device': 'sda1', - 'X-Container-Timestamp': '1', - 'Content-Type': 'application/new1', - 'Content-Length': '0'}) - object_server.http_connect = mock_http_connect(201) - resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - timestamp = normalize_timestamp(time()) - req = Request.blank( - '/sda1/p/a/c/o', - environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Timestamp': timestamp, - 'X-Container-Host': '1.2.3.4:0', - 'X-Container-Partition': '3', - 'X-Container-Device': 'sda1', - 'X-Container-Timestamp': '1', - 'Content-Type': 'application/new1', - 'Content-Length': '0'}) - object_server.http_connect = mock_http_connect(500) - resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - timestamp = normalize_timestamp(time()) - req = Request.blank( - '/sda1/p/a/c/o', - environ={'REQUEST_METHOD': 'PUT'}, - headers={'X-Timestamp': timestamp, - 'X-Container-Host': '1.2.3.4:0', - 'X-Container-Partition': '3', - 'X-Container-Device': 'sda1', - 'X-Container-Timestamp': '1', - 'Content-Type': 'application/new1', - 'Content-Length': '0'}) - object_server.http_connect = mock_http_connect(500, with_exc=True) - resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - finally: - object_server.http_connect = old_http_connect + timestamp = normalize_timestamp(time()) + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': timestamp, + 'X-Container-Host': '1.2.3.4:0', + 'X-Container-Partition': '3', + 'X-Container-Device': 'sda1', + 'X-Container-Timestamp': '1', + 'Content-Type': 'application/new1', + 'Content-Length': '0'}) + with mock.patch.object(object_server, 'http_connect', + mock_http_connect(201)): + with fake_spawn(): + resp = req.get_response(self.object_controller) + self.assertEquals(resp.status_int, 201) + timestamp = normalize_timestamp(time()) + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': timestamp, + 'X-Container-Host': '1.2.3.4:0', + 'X-Container-Partition': '3', + 'X-Container-Device': 'sda1', + 'X-Container-Timestamp': '1', + 'Content-Type': 'application/new1', + 'Content-Length': '0'}) + with mock.patch.object(object_server, 'http_connect', + mock_http_connect(500)): + with fake_spawn(): + resp = req.get_response(self.object_controller) + self.assertEquals(resp.status_int, 201) + timestamp = normalize_timestamp(time()) + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': timestamp, + 'X-Container-Host': '1.2.3.4:0', + 'X-Container-Partition': '3', + 'X-Container-Device': 'sda1', + 'X-Container-Timestamp': '1', + 'Content-Type': 'application/new1', + 'Content-Length': '0'}) + with mock.patch.object(object_server, 'http_connect', + mock_http_connect(500, with_exc=True)): + with fake_spawn(): + resp = req.get_response(self.object_controller) + self.assertEquals(resp.status_int, 201) def test_PUT_ssync_multi_frag(self): timestamp = utils.Timestamp(time()).internal @@ -2407,7 +2438,8 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'text/plain'}) with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(1, len(container_updates)) @@ -2446,7 +2478,8 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'text/html'}) with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(1, len(container_updates)) @@ -2484,7 +2517,8 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'text/enriched'}) with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(1, len(container_updates)) @@ -2522,7 +2556,8 @@ class TestObjectController(unittest.TestCase): 'X-Container-Partition': 'p'}) with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 204) self.assertEquals(1, len(container_updates)) @@ -2553,7 +2588,8 @@ class TestObjectController(unittest.TestCase): 'X-Container-Partition': 'p'}) with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 404) self.assertEquals(1, len(container_updates)) @@ -3022,7 +3058,8 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(object_server, 'http_connect', fake_http_connect): - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertEqual(resp.status_int, 201) @@ -3135,7 +3172,8 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(object_server, 'http_connect', fake_http_connect): - req.get_response(self.object_controller) + with fake_spawn(): + req.get_response(self.object_controller) http_connect_args.sort(key=operator.itemgetter('ipaddr')) @@ -3212,7 +3250,8 @@ class TestObjectController(unittest.TestCase): '/sda1/p/a/c/o', method='PUT', body='', headers=headers) with mocked_http_conn( 500, 500, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(2, len(container_updates)) @@ -3448,7 +3487,8 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'text/plain'}, body='') with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEqual(len(container_updates), 1) @@ -3489,7 +3529,8 @@ class TestObjectController(unittest.TestCase): headers=headers, body='') with mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEqual(len(container_updates), 1) @@ -3529,7 +3570,8 @@ class TestObjectController(unittest.TestCase): diskfile_mgr = self.object_controller._diskfile_router[policy] diskfile_mgr.pickle_async_update = fake_pickle_async_update with mocked_http_conn(500) as fake_conn: - resp = req.get_response(self.object_controller) + with fake_spawn(): + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEqual(len(given_args), 7) @@ -3556,6 +3598,104 @@ class TestObjectController(unittest.TestCase): 'container': 'c', 'op': 'PUT'}) + def test_container_update_as_greenthread(self): + greenthreads = [] + saved_spawn_calls = [] + called_async_update_args = [] + + def local_fake_spawn(func, *a, **kw): + saved_spawn_calls.append((func, a, kw)) + return mock.MagicMock() + + def local_fake_async_update(*a, **kw): + # just capture the args to see that we would have called + called_async_update_args.append([a, kw]) + + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': '12345', + 'Content-Type': 'application/burrito', + 'Content-Length': '0', + 'X-Backend-Storage-Policy-Index': 0, + 'X-Container-Partition': '20', + 'X-Container-Host': '1.2.3.4:5', + 'X-Container-Device': 'sdb1'}) + with mock.patch.object(object_server, 'spawn', + local_fake_spawn): + with mock.patch.object(self.object_controller, + 'async_update', + local_fake_async_update): + resp = req.get_response(self.object_controller) + # check the response is completed and successful + self.assertEqual(resp.status_int, 201) + # check that async_update hasn't been called + self.assertFalse(len(called_async_update_args)) + # now do the work in greenthreads + for func, a, kw in saved_spawn_calls: + gt = spawn(func, *a, **kw) + greenthreads.append(gt) + # wait for the greenthreads to finish + for gt in greenthreads: + gt.wait() + # check that the calls to async_update have happened + headers_out = {'X-Size': '0', + 'X-Content-Type': 'application/burrito', + 'X-Timestamp': '0000012345.00000', + 'X-Trans-Id': '-', + 'Referer': 'PUT http://localhost/sda1/p/a/c/o', + 'X-Backend-Storage-Policy-Index': '0', + 'X-Etag': 'd41d8cd98f00b204e9800998ecf8427e'} + expected = [('PUT', 'a', 'c', 'o', '1.2.3.4:5', '20', 'sdb1', + headers_out, 'sda1', POLICIES[0]), + {'logger_thread_locals': (None, None)}] + self.assertEqual(called_async_update_args, [expected]) + + def test_container_update_as_greenthread_with_timeout(self): + ''' + give it one container to update (for only one greenthred) + fake the greenthred so it will raise a timeout + test that the right message is logged and the method returns None + ''' + called_async_update_args = [] + + def local_fake_spawn(func, *a, **kw): + m = mock.MagicMock() + + def wait_with_error(): + raise Timeout() + m.wait = wait_with_error # because raise can't be in a lambda + return m + + def local_fake_async_update(*a, **kw): + # just capture the args to see that we would have called + called_async_update_args.append([a, kw]) + + req = Request.blank( + '/sda1/p/a/c/o', + environ={'REQUEST_METHOD': 'PUT'}, + headers={'X-Timestamp': '12345', + 'Content-Type': 'application/burrito', + 'Content-Length': '0', + 'X-Backend-Storage-Policy-Index': 0, + 'X-Container-Partition': '20', + 'X-Container-Host': '1.2.3.4:5', + 'X-Container-Device': 'sdb1'}) + with mock.patch.object(object_server, 'spawn', + local_fake_spawn): + with mock.patch.object(self.object_controller, + 'container_update_timeout', + 1.414213562): + resp = req.get_response(self.object_controller) + # check the response is completed and successful + self.assertEqual(resp.status_int, 201) + # check that the timeout was logged + expected_logged_error = "Container update timeout (1.4142s) " \ + "waiting for [('1.2.3.4:5', 'sdb1')]" + self.assertTrue( + expected_logged_error in + self.object_controller.logger.get_lines_for_level('debug')) + def test_container_update_bad_args(self): policy = random.choice(list(POLICIES)) given_args = [] From cbcfb74132aba4d5834dc9cf33dc4f664b82d39a Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Wed, 22 Jul 2015 19:32:46 -0700 Subject: [PATCH 03/70] Cleanup nits from container_update_timeout Change-Id: Ib17fc9a027db267350abc5a7db4e3172dfff7913 --- doc/manpages/object-server.conf.5 | 2 +- test/unit/obj/test_server.py | 101 +++++++++++++----------------- 2 files changed, 44 insertions(+), 59 deletions(-) diff --git a/doc/manpages/object-server.conf.5 b/doc/manpages/object-server.conf.5 index 518e72586e..e82a56ffb5 100644 --- a/doc/manpages/object-server.conf.5 +++ b/doc/manpages/object-server.conf.5 @@ -130,7 +130,7 @@ Request timeout to external services. The default is 3 seconds. .IP \fBconn_timeout\fR Connection timeout to external services. The default is 0.5 seconds. .IP \fBcontainer_update_timeout\fR -Request timeout to do a container update on an object update. The default is 1 second. +Time to wait while sending a container update on object update. The default is 1 second. .RE .PD diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 1e7a303ea4..8eec9adac6 100755 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -76,7 +76,6 @@ def fake_spawn(): ensure that the method has completed. """ - orig = object_server.spawn greenlets = [] def _inner_fake_spawn(func, *a, **kw): @@ -85,16 +84,12 @@ def fake_spawn(): return gt object_server.spawn = _inner_fake_spawn - - try: - yield - finally: - for gt in greenlets: - try: + with mock.patch('swift.obj.server.spawn', _inner_fake_spawn): + try: + yield + finally: + for gt in greenlets: gt.wait() - except: # noqa - pass # real spawn won't do anything but pollute logs - object_server.spawn = orig @patch_policies(test_policies) @@ -109,7 +104,8 @@ class TestObjectController(unittest.TestCase): self.testdir = os.path.join(self.tmpdir, 'tmp_test_object_server_ObjectController') mkdirs(os.path.join(self.testdir, 'sda1')) - self.conf = {'devices': self.testdir, 'mount_check': 'false'} + self.conf = {'devices': self.testdir, 'mount_check': 'false', + 'container_update_timeout': 0.0} self.object_controller = object_server.ObjectController( self.conf, logger=debug_logger()) self.object_controller.bytes_per_sync = 1 @@ -1259,10 +1255,10 @@ class TestObjectController(unittest.TestCase): 'X-Container-Timestamp': '1', 'Content-Type': 'application/new1', 'Content-Length': '0'}) - with mock.patch.object(object_server, 'http_connect', - mock_http_connect(201)): - with fake_spawn(): - resp = req.get_response(self.object_controller) + with fake_spawn(), mock.patch.object( + object_server, 'http_connect', + mock_http_connect(201)): + resp = req.get_response(self.object_controller) self.assertEquals(resp.status_int, 201) timestamp = normalize_timestamp(time()) req = Request.blank( @@ -1275,10 +1271,10 @@ class TestObjectController(unittest.TestCase): 'X-Container-Timestamp': '1', 'Content-Type': 'application/new1', 'Content-Length': '0'}) - with mock.patch.object(object_server, 'http_connect', - mock_http_connect(500)): - with fake_spawn(): - resp = req.get_response(self.object_controller) + with fake_spawn(), mock.patch.object( + object_server, 'http_connect', + mock_http_connect(500)): + resp = req.get_response(self.object_controller) self.assertEquals(resp.status_int, 201) timestamp = normalize_timestamp(time()) req = Request.blank( @@ -1291,10 +1287,10 @@ class TestObjectController(unittest.TestCase): 'X-Container-Timestamp': '1', 'Content-Type': 'application/new1', 'Content-Length': '0'}) - with mock.patch.object(object_server, 'http_connect', - mock_http_connect(500, with_exc=True)): - with fake_spawn(): - resp = req.get_response(self.object_controller) + with fake_spawn(), mock.patch.object( + object_server, 'http_connect', + mock_http_connect(500, with_exc=True)): + resp = req.get_response(self.object_controller) self.assertEquals(resp.status_int, 201) def test_PUT_ssync_multi_frag(self): @@ -2436,10 +2432,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Device': 'sda1', 'X-Container-Partition': 'p', 'Content-Type': 'text/plain'}) - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(1, len(container_updates)) @@ -2476,10 +2471,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Device': 'sda1', 'X-Container-Partition': 'p', 'Content-Type': 'text/html'}) - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(1, len(container_updates)) @@ -2515,10 +2509,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Device': 'sda1', 'X-Container-Partition': 'p', 'Content-Type': 'text/enriched'}) - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(1, len(container_updates)) @@ -2554,10 +2547,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Host': '10.0.0.1:8080', 'X-Container-Device': 'sda1', 'X-Container-Partition': 'p'}) - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 204) self.assertEquals(1, len(container_updates)) @@ -2586,10 +2578,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Host': '10.0.0.1:8080', 'X-Container-Device': 'sda1', 'X-Container-Partition': 'p'}) - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 404) self.assertEquals(1, len(container_updates)) @@ -3056,10 +3047,9 @@ class TestObjectController(unittest.TestCase): 'X-Delete-At-Partition': '6237', 'X-Delete-At-Device': 'sdp,sdq'}) - with mock.patch.object(object_server, 'http_connect', - fake_http_connect): - with fake_spawn(): - resp = req.get_response(self.object_controller) + with fake_spawn(), mock.patch.object( + object_server, 'http_connect', fake_http_connect): + resp = req.get_response(self.object_controller) self.assertEqual(resp.status_int, 201) @@ -3170,10 +3160,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Host': '1.2.3.4:5, 6.7.8.9:10', 'X-Container-Device': 'sdb1, sdf1'}) - with mock.patch.object(object_server, 'http_connect', - fake_http_connect): - with fake_spawn(): - req.get_response(self.object_controller) + with fake_spawn(), mock.patch.object( + object_server, 'http_connect', fake_http_connect): + req.get_response(self.object_controller) http_connect_args.sort(key=operator.itemgetter('ipaddr')) @@ -3248,10 +3237,9 @@ class TestObjectController(unittest.TestCase): headers['X-Object-Sysmeta-Ec-Frag-Index'] = '2' req = Request.blank( '/sda1/p/a/c/o', method='PUT', body='', headers=headers) - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 500, 500, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEquals(2, len(container_updates)) @@ -3485,10 +3473,9 @@ class TestObjectController(unittest.TestCase): 'X-Container-Partition': 'cpartition', 'X-Container-Device': 'cdevice', 'Content-Type': 'text/plain'}, body='') - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEqual(len(container_updates), 1) @@ -3527,10 +3514,9 @@ class TestObjectController(unittest.TestCase): } req = Request.blank('/sda1/0/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers=headers, body='') - with mocked_http_conn( + with fake_spawn(), mocked_http_conn( 200, give_connect=capture_updates) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEqual(len(container_updates), 1) @@ -3569,9 +3555,8 @@ class TestObjectController(unittest.TestCase): given_args[:] = args diskfile_mgr = self.object_controller._diskfile_router[policy] diskfile_mgr.pickle_async_update = fake_pickle_async_update - with mocked_http_conn(500) as fake_conn: - with fake_spawn(): - resp = req.get_response(self.object_controller) + with fake_spawn(), mocked_http_conn(500) as fake_conn: + resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) self.assertEqual(len(given_args), 7) From dd2f1be3b124d3901ebbc176a7adc462b6449667 Mon Sep 17 00:00:00 2001 From: Ondrej Novy Date: Tue, 16 Jun 2015 17:42:58 +0200 Subject: [PATCH 04/70] Time synchronization check in recon. This change add call time to recon middleware and param --time to recon CLI. This is usefull for checking if time in cluster is synchronized. Change-Id: I62373e681f64d0bd71f4aeb287953dd3b2ea5662 --- AUTHORS | 1 + doc/manpages/swift-recon.1 | 6 +- doc/source/admin_guide.rst | 5 +- swift/cli/recon.py | 102 +++++++++++++++---- swift/common/middleware/recon.py | 8 ++ test/unit/cli/test_recon.py | 114 +++++++++++++++++++--- test/unit/common/middleware/test_recon.py | 20 ++++ 7 files changed, 219 insertions(+), 37 deletions(-) diff --git a/AUTHORS b/AUTHORS index ebde31a39a..1f628d9c3b 100644 --- a/AUTHORS +++ b/AUTHORS @@ -156,6 +156,7 @@ Maru Newby (mnewby@internap.com) Newptone (xingchao@unitedstack.com) Colin Nicholson (colin.nicholson@iomart.com) Zhenguo Niu (zhenguo@unitedstack.com) +Ondrej Novy (ondrej.novy@firma.seznam.cz) Timothy Okwii (tokwii@cisco.com) Matthew Oliver (matt@oliver.net.au) Hisashi Osanai (osanai.hisashi@jp.fujitsu.com) diff --git a/doc/manpages/swift-recon.1 b/doc/manpages/swift-recon.1 index c635861aca..3120405005 100644 --- a/doc/manpages/swift-recon.1 +++ b/doc/manpages/swift-recon.1 @@ -25,7 +25,7 @@ .SH SYNOPSIS .LP .B swift-recon -\ [-v] [--suppress] [-a] [-r] [-u] [-d] [-l] [--md5] [--auditor] [--updater] [--expirer] [--sockstat] +\ [-v] [--suppress] [-a] [-r] [-u] [-d] [-l] [-T] [--md5] [--auditor] [--updater] [--expirer] [--sockstat] .SH DESCRIPTION .PP @@ -80,8 +80,10 @@ Get md5sum of servers ring and compare to local copy Get cluster socket usage stats .IP "\fB--driveaudit\fR" Get drive audit error stats +.IP "\fB-T, --time\fR" +Check time synchronization .IP "\fB--all\fR" -Perform all checks. Equivalent to \-arudlq \-\-md5 +Perform all checks. Equivalent to \-arudlqT \-\-md5 .IP "\fB--region=REGION\fR" Only query servers in specified region .IP "\fB-z ZONE, --zone=ZONE\fR" diff --git a/doc/source/admin_guide.rst b/doc/source/admin_guide.rst index f27c20741e..5c151aabeb 100644 --- a/doc/source/admin_guide.rst +++ b/doc/source/admin_guide.rst @@ -555,7 +555,7 @@ This information can also be queried via the swift-recon command line utility:: fhines@ubuntu:~$ swift-recon -h Usage: usage: swift-recon [-v] [--suppress] [-a] [-r] [-u] [-d] - [-l] [--md5] [--auditor] [--updater] [--expirer] [--sockstat] + [-l] [-T] [--md5] [--auditor] [--updater] [--expirer] [--sockstat] account|container|object Defaults to object server. @@ -578,7 +578,8 @@ This information can also be queried via the swift-recon command line utility:: -q, --quarantined Get cluster quarantine stats --md5 Get md5sum of servers ring and compare to local copy --sockstat Get cluster socket usage stats - --all Perform all checks. Equal to -arudlq --md5 --sockstat + -T, --time Check time synchronization + --all Perform all checks. Equal to -arudlqT --md5 --sockstat -z ZONE, --zone=ZONE Only query servers in specified zone -t SECONDS, --timeout=SECONDS Time to wait for a response from a server diff --git a/swift/cli/recon.py b/swift/cli/recon.py index f57f75c22a..79e0721c04 100755 --- a/swift/cli/recon.py +++ b/swift/cli/recon.py @@ -100,11 +100,14 @@ class Scout(object): Obtain telemetry from a host running the swift recon middleware. :param host: host to check - :returns: tuple of (recon url used, response body, and status) + :returns: tuple of (recon url used, response body, status, time start + and time end) """ base_url = "http://%s:%s/recon/" % (host[0], host[1]) + ts_start = time.time() url, content, status = self.scout_host(base_url, self.recon_type) - return url, content, status + ts_end = time.time() + return url, content, status, ts_start, ts_end def scout_server_type(self, host): """ @@ -253,7 +256,8 @@ class SwiftRecon(object): if self.verbose: for ring_file, ring_sum in rings.items(): print("-> On disk %s md5sum: %s" % (ring_file, ring_sum)) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status != 200: errors = errors + 1 continue @@ -291,7 +295,8 @@ class SwiftRecon(object): printfn("[%s] Checking swift.conf md5sum" % self._ptime()) if self.verbose: printfn("-> On disk swift.conf md5sum: %s" % (conf_sum,)) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: if response[SWIFT_CONF_FILE] != conf_sum: printfn("!! %s (%s) doesn't match on disk md5sum" % @@ -317,7 +322,8 @@ class SwiftRecon(object): recon = Scout("async", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking async pendings" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: scan[url] = response['async_pending'] stats = self._gen_stats(scan.values(), 'async_pending') @@ -338,7 +344,8 @@ class SwiftRecon(object): recon = Scout("driveaudit", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking drive-audit errors" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: scan[url] = response['drive_audit_errors'] stats = self._gen_stats(scan.values(), 'drive_audit_errors') @@ -361,7 +368,8 @@ class SwiftRecon(object): self.timeout) print("[%s] Getting unmounted drives from %s hosts..." % (self._ptime(), len(hosts))) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: unmounted[url] = [] errors[url] = [] @@ -414,7 +422,8 @@ class SwiftRecon(object): recon = Scout("expirer/%s" % self.server_type, self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking on expirers" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: stats['object_expiration_pass'].append( response.get('object_expiration_pass')) @@ -447,7 +456,8 @@ class SwiftRecon(object): least_recent_url = None most_recent_time = 0 most_recent_url = None - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: stats['replication_time'].append( response.get('replication_time')) @@ -511,7 +521,8 @@ class SwiftRecon(object): least_recent_url = None most_recent_time = 0 most_recent_url = None - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: stats[url] = response['object_replication_time'] last = response.get('object_replication_last', 0) @@ -562,7 +573,8 @@ class SwiftRecon(object): recon = Scout("updater/%s" % self.server_type, self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking updater times" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: if response['%s_updater_sweep' % self.server_type]: stats.append(response['%s_updater_sweep' % @@ -592,7 +604,8 @@ class SwiftRecon(object): recon = Scout("auditor/%s" % self.server_type, self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking auditor stats" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: scan[url] = response if len(scan) < 1: @@ -665,7 +678,8 @@ class SwiftRecon(object): recon = Scout("auditor/object", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking auditor stats " % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: if response['object_auditor_stats_ALL']: all_scan[url] = response['object_auditor_stats_ALL'] @@ -736,7 +750,8 @@ class SwiftRecon(object): recon = Scout("load", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking load averages" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: load1[url] = response['1m'] load5[url] = response['5m'] @@ -765,7 +780,8 @@ class SwiftRecon(object): recon = Scout("quarantined", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking quarantine" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: objq[url] = response['objects'] conq[url] = response['containers'] @@ -799,7 +815,8 @@ class SwiftRecon(object): recon = Scout("sockstat", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking socket usage" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: inuse4[url] = response['tcp_in_use'] mem[url] = response['tcp_mem_allocated_bytes'] @@ -835,7 +852,8 @@ class SwiftRecon(object): recon = Scout("diskusage", self.verbose, self.suppress_errors, self.timeout) print("[%s] Checking disk usage now" % self._ptime()) - for url, response, status in self.pool.imap(recon.scout, hosts): + for url, response, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): if status == 200: hostusage = [] for entry in response: @@ -915,6 +933,47 @@ class SwiftRecon(object): host = urlparse(url).netloc.split(':')[0] print('%.02f%% %s' % (used, '%-15s %s' % (host, device))) + def time_check(self, hosts): + """ + Check a time synchronization of hosts with current time + + :param hosts: set of hosts to check. in the format of: + set([('127.0.0.1', 6020), ('127.0.0.2', 6030)]) + """ + + matches = 0 + errors = 0 + recon = Scout("time", self.verbose, self.suppress_errors, + self.timeout) + print("[%s] Checking time-sync" % self._ptime()) + for url, ts_remote, status, ts_start, ts_end in self.pool.imap( + recon.scout, hosts): + if status != 200: + errors = errors + 1 + continue + if (ts_remote < ts_start or ts_remote > ts_end): + diff = abs(ts_end - ts_remote) + ts_end_f = time.strftime( + "%Y-%m-%d %H:%M:%S", + time.localtime(ts_end)) + ts_remote_f = time.strftime( + "%Y-%m-%d %H:%M:%S", + time.localtime(ts_remote)) + + print("!! %s current time is %s, but remote is %s, " + "differs by %.2f sec" % ( + url, + ts_end_f, + ts_remote_f, + diff)) + continue + matches += 1 + if self.verbose: + print("-> %s matches." % url) + print("%s/%s hosts matched, %s error[s] while checking hosts." % ( + matches, len(hosts), errors)) + print("=" * 79) + def main(self): """ Retrieve and report cluster info from hosts running recon middleware. @@ -922,7 +981,7 @@ class SwiftRecon(object): print("=" * 79) usage = ''' usage: %prog [-v] [--suppress] [-a] [-r] [-u] [-d] - [-l] [--md5] [--auditor] [--updater] [--expirer] [--sockstat] + [-l] [-T] [--md5] [--auditor] [--updater] [--expirer] [--sockstat] [--human-readable] \taccount|container|object @@ -964,13 +1023,15 @@ class SwiftRecon(object): help="Get cluster socket usage stats") args.add_option('--driveaudit', action="store_true", help="Get drive audit error stats") + args.add_option('--time', '-T', action="store_true", + help="Check time synchronization") args.add_option('--top', type='int', metavar='COUNT', default=0, help='Also show the top COUNT entries in rank order.') args.add_option('--lowest', type='int', metavar='COUNT', default=0, help='Also show the lowest COUNT entries in rank \ order.') args.add_option('--all', action="store_true", - help="Perform all checks. Equal to \t\t\t-arudlq " + help="Perform all checks. Equal to \t\t\t-arudlqT " "--md5 --sockstat --auditor --updater --expirer") args.add_option('--region', type="int", help="Only query servers in specified region") @@ -1031,6 +1092,7 @@ class SwiftRecon(object): self.socket_usage(hosts) self.server_type_check(hosts) self.driveaudit_check(hosts) + self.time_check(hosts) else: if options.async: if self.server_type == 'object': @@ -1075,6 +1137,8 @@ class SwiftRecon(object): self.socket_usage(hosts) if options.driveaudit: self.driveaudit_check(hosts) + if options.time: + self.time_check(hosts) def main(): diff --git a/swift/common/middleware/recon.py b/swift/common/middleware/recon.py index 88d5243a4d..a20e197cfd 100644 --- a/swift/common/middleware/recon.py +++ b/swift/common/middleware/recon.py @@ -15,6 +15,7 @@ import errno import os +import time from swift import gettext_ as _ from swift import __version__ as swiftver @@ -328,6 +329,11 @@ class ReconMiddleware(object): raise return sockstat + def get_time(self): + """get current time""" + + return time.time() + def GET(self, req): root, rcheck, rtype = req.split_path(1, 3, True) all_rtypes = ['account', 'container', 'object'] @@ -368,6 +374,8 @@ class ReconMiddleware(object): content = self.get_version() elif rcheck == "driveaudit": content = self.get_driveaudit_error() + elif rcheck == "time": + content = self.get_time() else: content = "Invalid path: %s" % req.path return Response(request=req, status="404 Not Found", diff --git a/test/unit/cli/test_recon.py b/test/unit/cli/test_recon.py index dd53ae9d54..26dd6fb2c1 100644 --- a/test/unit/cli/test_recon.py +++ b/test/unit/cli/test_recon.py @@ -61,7 +61,7 @@ class TestScout(unittest.TestCase): @mock.patch('eventlet.green.urllib2.urlopen') def test_scout_ok(self, mock_urlopen): mock_urlopen.return_value.read = lambda: json.dumps([]) - url, content, status = self.scout_instance.scout( + url, content, status, ts_start, ts_end = self.scout_instance.scout( ("127.0.0.1", "8080")) self.assertEqual(url, self.url) self.assertEqual(content, []) @@ -70,7 +70,7 @@ class TestScout(unittest.TestCase): @mock.patch('eventlet.green.urllib2.urlopen') def test_scout_url_error(self, mock_urlopen): mock_urlopen.side_effect = urllib2.URLError("") - url, content, status = self.scout_instance.scout( + url, content, status, ts_start, ts_end = self.scout_instance.scout( ("127.0.0.1", "8080")) self.assertTrue(isinstance(content, urllib2.URLError)) self.assertEqual(url, self.url) @@ -80,7 +80,7 @@ class TestScout(unittest.TestCase): def test_scout_http_error(self, mock_urlopen): mock_urlopen.side_effect = urllib2.HTTPError( self.url, 404, "Internal error", None, None) - url, content, status = self.scout_instance.scout( + url, content, status, ts_start, ts_end = self.scout_instance.scout( ("127.0.0.1", "8080")) self.assertEqual(url, self.url) self.assertTrue(isinstance(content, urllib2.HTTPError)) @@ -218,7 +218,7 @@ class TestRecon(unittest.TestCase): '/etc/swift/object-1.ring.gz': empty_file_hash, } status = 200 - scout_instance.scout.return_value = (url, response, status) + scout_instance.scout.return_value = (url, response, status, 0, 0) mock_scout.return_value = scout_instance stdout = StringIO() mock_hash = mock.MagicMock() @@ -274,7 +274,7 @@ class TestRecon(unittest.TestCase): url = 'http://%s:%s/recon/quarantined' % host response = responses[host[1]] status = 200 - return url, response, status + return url, response, status, 0, 0 stdout = StringIO() patches = [ @@ -311,7 +311,7 @@ class TestRecon(unittest.TestCase): url = 'http://%s:%s/recon/driveaudit' % host response = responses[host[1]] status = 200 - return url, response, status + return url, response, status, 0, 0 stdout = StringIO() patches = [ @@ -491,7 +491,7 @@ class TestReconCommands(unittest.TestCase): return [('http://127.0.0.1:6010/recon/auditor/object', { 'object_auditor_stats_ALL': values, 'object_auditor_stats_ZBF': values, - }, 200)] + }, 200, 0, 0)] response = {} @@ -535,7 +535,9 @@ class TestReconCommands(unittest.TestCase): "avail": 15, "used": 85, "size": 100}, {"device": "sdd1", "mounted": True, "avail": 15, "used": 85, "size": 100}], - 200)] + 200, + 0, + 0)] cli = recon.SwiftRecon() cli.pool.imap = dummy_request @@ -586,11 +588,15 @@ class TestReconCommands(unittest.TestCase): ('http://127.0.0.1:6010/recon/replication/object', {"object_replication_time": 61, "object_replication_last": now}, - 200), + 200, + 0, + 0), ('http://127.0.0.1:6020/recon/replication/object', {"object_replication_time": 23, "object_replication_last": now}, - 200), + 200, + 0, + 0), ] cli = recon.SwiftRecon() @@ -625,7 +631,9 @@ class TestReconCommands(unittest.TestCase): "remote_merge": 0, "diff_capped": 0, "start": now, "hashmatch": 0, "diff": 0, "empty": 0}, "replication_time": 42}, - 200), + 200, + 0, + 0), ('http://127.0.0.1:6021/recon/replication/container', {"replication_last": now, "replication_stats": { @@ -634,7 +642,9 @@ class TestReconCommands(unittest.TestCase): "remote_merge": 0, "diff_capped": 0, "start": now, "hashmatch": 0, "diff": 0, "empty": 0}, "replication_time": 23}, - 200), + 200, + 0, + 0), ] cli = recon.SwiftRecon() @@ -671,11 +681,15 @@ class TestReconCommands(unittest.TestCase): ('http://127.0.0.1:6010/recon/load', {"1m": 0.2, "5m": 0.4, "15m": 0.25, "processes": 10000, "tasks": "1/128"}, - 200), + 200, + 0, + 0), ('http://127.0.0.1:6020/recon/load', {"1m": 0.4, "5m": 0.8, "15m": 0.75, "processes": 9000, "tasks": "1/200"}, - 200), + 200, + 0, + 0), ] cli = recon.SwiftRecon() @@ -695,3 +709,75 @@ class TestReconCommands(unittest.TestCase): # We need any_order=True because the order of calls depends on the dict # that is returned from the recon middleware, thus can't rely on it mock_print.assert_has_calls(default_calls, any_order=True) + + @mock.patch('__builtin__.print') + @mock.patch('time.time') + def test_time_check(self, mock_now, mock_print): + now = 1430000000.0 + mock_now.return_value = now + + def dummy_request(*args, **kwargs): + return [ + ('http://127.0.0.1:6010/recon/load', + now, + 200, + now - 0.5, + now + 0.5), + ('http://127.0.0.1:6020/recon/load', + now, + 200, + now, + now), + ] + + cli = recon.SwiftRecon() + cli.pool.imap = dummy_request + + default_calls = [ + mock.call('2/2 hosts matched, 0 error[s] while checking hosts.') + ] + + cli.time_check([('127.0.0.1', 6010), ('127.0.0.1', 6020)]) + # We need any_order=True because the order of calls depends on the dict + # that is returned from the recon middleware, thus can't rely on it + mock_print.assert_has_calls(default_calls, any_order=True) + + @mock.patch('__builtin__.print') + @mock.patch('time.time') + def test_time_check_mismatch(self, mock_now, mock_print): + now = 1430000000.0 + mock_now.return_value = now + + def dummy_request(*args, **kwargs): + return [ + ('http://127.0.0.1:6010/recon/time', + now, + 200, + now + 0.5, + now + 1.3), + ('http://127.0.0.1:6020/recon/time', + now, + 200, + now, + now), + ] + + cli = recon.SwiftRecon() + cli.pool.imap = dummy_request + + default_calls = [ + mock.call("!! http://127.0.0.1:6010/recon/time current time is " + "2015-04-25 22:13:21, but remote is " + "2015-04-25 22:13:20, differs by 1.30 sec"), + mock.call('1/2 hosts matched, 0 error[s] while checking hosts.'), + ] + + def mock_localtime(*args, **kwargs): + return time.gmtime(*args, **kwargs) + + with mock.patch("time.localtime", mock_localtime): + cli.time_check([('127.0.0.1', 6010), ('127.0.0.1', 6020)]) + + # We need any_order=True because the order of calls depends on the dict + # that is returned from the recon middleware, thus can't rely on it + mock_print.assert_has_calls(default_calls, any_order=True) diff --git a/test/unit/common/middleware/test_recon.py b/test/unit/common/middleware/test_recon.py index 05a11ce859..c383e43563 100644 --- a/test/unit/common/middleware/test_recon.py +++ b/test/unit/common/middleware/test_recon.py @@ -175,6 +175,9 @@ class FakeRecon(object): def fake_driveaudit(self): return {'driveaudittest': "1"} + def fake_time(self): + return {'timetest': "1"} + def nocontent(self): return None @@ -855,6 +858,15 @@ class TestReconSuccess(TestCase): '/var/cache/swift/drive.recon'), {})]) self.assertEquals(rv, {'drive_audit_errors': 7}) + def test_get_time(self): + def fake_time(): + return 1430000000.0 + + with mock.patch("time.time", fake_time): + now = fake_time() + rv = self.app.get_time() + self.assertEquals(rv, now) + class TestReconMiddleware(unittest.TestCase): @@ -884,6 +896,7 @@ class TestReconMiddleware(unittest.TestCase): self.app.get_quarantine_count = self.frecon.fake_quarantined self.app.get_socket_info = self.frecon.fake_sockstat self.app.get_driveaudit_error = self.frecon.fake_driveaudit + self.app.get_time = self.frecon.fake_time def test_recon_get_mem(self): get_mem_resp = ['{"memtest": "1"}'] @@ -1118,5 +1131,12 @@ class TestReconMiddleware(unittest.TestCase): resp = self.app(req.environ, start_response) self.assertEquals(resp, get_driveaudit_resp) + def test_recon_get_time(self): + get_time_resp = ['{"timetest": "1"}'] + req = Request.blank('/recon/time', + environ={'REQUEST_METHOD': 'GET'}) + resp = self.app(req.environ, start_response) + self.assertEquals(resp, get_time_resp) + if __name__ == '__main__': unittest.main() From c5b5cf91a984f80cc6cbe42735b242083d700542 Mon Sep 17 00:00:00 2001 From: janonymous Date: Tue, 28 Jul 2015 21:03:05 +0530 Subject: [PATCH 05/70] test/unit: Replace python print operator with print function (pep H233, py33) 'print' function is compatible with 2.x and 3.x python versions Link : https://www.python.org/dev/peps/pep-3105/ Python 2.6 has a __future__ import that removes print as language syntax, letting you use the functional form instead Change-Id: I94e1bc6bd83ad6b05695c7ebdf7cbfd8f6d9f9af --- test/unit/__init__.py | 7 ++++--- test/unit/common/test_db_replicator.py | 3 ++- test/unit/common/test_manager.py | 12 ++++++------ test/unit/common/test_utils.py | 14 +++++++------- test/unit/proxy/controllers/test_container.py | 3 ++- test/unit/proxy/test_server.py | 3 ++- test/unit/test_locale/test_locale.py | 3 ++- 7 files changed, 25 insertions(+), 20 deletions(-) diff --git a/test/unit/__init__.py b/test/unit/__init__.py index b67f44342c..16057e91ce 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -15,6 +15,7 @@ """ Swift tests """ +from __future__ import print_function import os import copy import logging @@ -572,8 +573,8 @@ class FakeLogger(logging.Logger, object): try: line = record.getMessage() except TypeError: - print 'WARNING: unable to format log message %r %% %r' % ( - record.msg, record.args) + print('WARNING: unable to format log message %r %% %r' % ( + record.msg, record.args)) raise self.lines_dict[record.levelname.lower()].append(line) @@ -597,7 +598,7 @@ class DebugLogger(FakeLogger): def handle(self, record): self._handle(record) - print self.formatter.format(record) + print(self.formatter.format(record)) class DebugLogAdapter(utils.LogAdapter): diff --git a/test/unit/common/test_db_replicator.py b/test/unit/common/test_db_replicator.py index f15a895e9f..91a5adfbfd 100644 --- a/test/unit/common/test_db_replicator.py +++ b/test/unit/common/test_db_replicator.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import unittest from contextlib import contextmanager import os @@ -1304,7 +1305,7 @@ def attach_fake_replication_rpc(rpc, replicate_hook=None): self.host = node['replication_ip'] def replicate(self, op, *sync_args): - print 'REPLICATE: %s, %s, %r' % (self.path, op, sync_args) + print('REPLICATE: %s, %s, %r' % (self.path, op, sync_args)) replicate_args = self.path.lstrip('/').split('/') args = [op] + list(sync_args) swob_response = rpc.dispatch(replicate_args, args) diff --git a/test/unit/common/test_manager.py b/test/unit/common/test_manager.py index d759a9ba97..ebdab67d26 100644 --- a/test/unit/common/test_manager.py +++ b/test/unit/common/test_manager.py @@ -12,7 +12,7 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. - +from __future__ import print_function import unittest from test.unit import temptree @@ -1188,9 +1188,9 @@ class TestServer(unittest.TestCase): pass def fail(self): - print >>self._stdout, 'mock process started' + print('mock process started', file=self._stdout) sleep(self.delay) # perform setup processing - print >>self._stdout, 'mock process failed to start' + print('mock process failed to start', file=self._stdout) self.close_stdout() def poll(self): @@ -1198,12 +1198,12 @@ class TestServer(unittest.TestCase): return self.returncode or None def run(self): - print >>self._stdout, 'mock process started' + print('mock process started', file=self._stdout) sleep(self.delay) # perform setup processing - print >>self._stdout, 'setup complete!' + print('setup complete!', file=self._stdout) self.close_stdout() sleep(self.delay) # do some more processing - print >>self._stdout, 'mock process finished' + print('mock process finished', file=self._stdout) self.finished = True class MockTime(object): diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index b5dc6edf32..1d2c105dd2 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -14,7 +14,7 @@ # limitations under the License. """Tests for swift.common.utils""" - +from __future__ import print_function from test.unit import temptree import ctypes @@ -1047,22 +1047,22 @@ class TestUtils(unittest.TestCase): lfo_stdout = utils.LoggerFileObject(logger) lfo_stderr = utils.LoggerFileObject(logger) lfo_stderr = utils.LoggerFileObject(logger, 'STDERR') - print 'test1' + print('test1') self.assertEquals(sio.getvalue(), '') sys.stdout = lfo_stdout - print 'test2' + print('test2') self.assertEquals(sio.getvalue(), 'STDOUT: test2\n') sys.stderr = lfo_stderr - print >> sys.stderr, 'test4' + print('test4', file=sys.stderr) self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDERR: test4\n') sys.stdout = orig_stdout - print 'test5' + print('test5') self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDERR: test4\n') - print >> sys.stderr, 'test6' + print('test6', file=sys.stderr) self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDERR: test4\n' 'STDERR: test6\n') sys.stderr = orig_stderr - print 'test8' + print('test8') self.assertEquals(sio.getvalue(), 'STDOUT: test2\nSTDERR: test4\n' 'STDERR: test6\n') lfo_stdout.writelines(['a', 'b', 'c']) diff --git a/test/unit/proxy/controllers/test_container.py b/test/unit/proxy/controllers/test_container.py index d2b7ce450e..69e8fedbc9 100644 --- a/test/unit/proxy/controllers/test_container.py +++ b/test/unit/proxy/controllers/test_container.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import mock import unittest @@ -164,7 +165,7 @@ class TestContainerController(TestRingBase): self.app._error_limiting = {} req = Request.blank('/v1/a/c', method=method) with mocked_http_conn(*statuses) as fake_conn: - print 'a' * 50 + print('a' * 50) resp = req.get_response(self.app) self.assertEqual(resp.status_int, expected) for req in fake_conn.requests: diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index dde59e3597..b2b87dbc0b 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import email.parser import logging import math @@ -9128,7 +9129,7 @@ class TestProxyObjectPerformance(unittest.TestCase): self.assertEqual(total, self.obj_len) end = time.time() - print "Run %02d took %07.03f" % (i, end - start) + print("Run %02d took %07.03f" % (i, end - start)) @patch_policies([StoragePolicy(0, 'migrated', object_ring=FakeRing()), diff --git a/test/unit/test_locale/test_locale.py b/test/unit/test_locale/test_locale.py index a0804ed0eb..a5973cd83f 100644 --- a/test/unit/test_locale/test_locale.py +++ b/test/unit/test_locale/test_locale.py @@ -15,6 +15,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import os import unittest import string @@ -75,4 +76,4 @@ if __name__ == "__main__": os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__) sys.path = string.split(sys.argv[1], ':') from swift import gettext_ as _ - print _('test message') + print(_('test message')) From 768d7ab074807175471d589de96d091e3239052e Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Thu, 30 Jul 2015 15:30:35 -0700 Subject: [PATCH 06/70] Add a probetest for HUP/reload This would have been enough to catch the regression, and we can extend them as we work on any future ehancements to our process management. Change-Id: I9a1b57aa15663380c45cf783afc8212ab4ffbace --- test/probe/common.py | 12 ++-- test/probe/test_wsgi_servers.py | 103 ++++++++++++++++++++++++++++++++ 2 files changed, 111 insertions(+), 4 deletions(-) create mode 100644 test/probe/test_wsgi_servers.py diff --git a/test/probe/common.py b/test/probe/common.py index bf0e61d298..07977f5cd7 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -255,16 +255,20 @@ def get_policy(**kwargs): raise SkipTest('No policy matching %s' % kwargs) +def resetswift(): + p = Popen("resetswift 2>&1", shell=True, stdout=PIPE) + stdout, _stderr = p.communicate() + print stdout + Manager(['all']).stop() + + class ProbeTest(unittest.TestCase): """ Don't instantiate this directly, use a child class instead. """ def setUp(self): - p = Popen("resetswift 2>&1", shell=True, stdout=PIPE) - stdout, _stderr = p.communicate() - print stdout - Manager(['all']).stop() + resetswift() self.pids = {} try: self.ipport2server = {} diff --git a/test/probe/test_wsgi_servers.py b/test/probe/test_wsgi_servers.py new file mode 100644 index 0000000000..437912dcf8 --- /dev/null +++ b/test/probe/test_wsgi_servers.py @@ -0,0 +1,103 @@ +#!/usr/bin/python -u +# Copyright (c) 2010-2012 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import httplib +import random + +from swift.common.storage_policy import POLICIES +from swift.common.ring import Ring +from swift.common.manager import Manager + +from test.probe.common import resetswift + + +def putrequest(conn, method, path, headers): + + conn.putrequest(method, path, skip_host=(headers and 'Host' in headers)) + if headers: + for header, value in headers.items(): + conn.putheader(header, str(value)) + conn.endheaders() + + +class TestWSGIServerProcessHandling(unittest.TestCase): + + def setUp(self): + resetswift() + + def _check_reload(self, server_name, ip, port): + manager = Manager([server_name]) + manager.start() + + starting_pids = set(pid for server in manager.servers + for (_, pid) in server.iter_pid_files()) + + body = 'test' * 10 + conn = httplib.HTTPConnection('%s:%s' % (ip, port)) + + # sanity request + putrequest(conn, 'PUT', 'blah', + headers={'Content-Length': len(body)}) + conn.send(body) + resp = conn.getresponse() + self.assertEqual(resp.status // 100, 4) + resp.read() + + manager.reload() + + post_reload_pids = set(pid for server in manager.servers + for (_, pid) in server.iter_pid_files()) + + # none of the pids we started with are being tracked after reload + msg = 'expected all pids from %r to have died, but found %r' % ( + starting_pids, post_reload_pids) + self.assertFalse(starting_pids & post_reload_pids, msg) + + # ... and yet we can keep using the same connection! + putrequest(conn, 'PUT', 'blah', + headers={'Content-Length': len(body)}) + conn.send(body) + resp = conn.getresponse() + self.assertEqual(resp.status // 100, 4) + resp.read() + + # close our connection + conn.close() + + # sanity + post_close_pids = set(pid for server in manager.servers + for (_, pid) in server.iter_pid_files()) + self.assertEqual(post_reload_pids, post_close_pids) + + def test_proxy_reload(self): + self._check_reload('proxy-server', 'localhost', 8080) + + def test_object_reload(self): + policy = random.choice(list(POLICIES)) + policy.load_ring('/etc/swift') + node = random.choice(policy.object_ring.get_part_nodes(1)) + self._check_reload('object', node['ip'], node['port']) + + def test_account_container_reload(self): + for server in ('account', 'container'): + ring = Ring('/etc/swift', ring_name=server) + node = random.choice(ring.get_part_nodes(1)) + self._check_reload(server, node['ip'], node['port']) + + +if __name__ == '__main__': + unittest.main() From 67513fc17c23066f6547b5e3c4ab2dc852e4f7f0 Mon Sep 17 00:00:00 2001 From: janonymous Date: Sat, 27 Jun 2015 23:26:47 +0530 Subject: [PATCH 07/70] Adding bandit for security static analysis testing in swift This change adds a basic bandit config for Swift. It can be invoked by running the tox environment for bandit; tox -e bandit This is an initial step for using bandit with Swift and it should be revisited to improve the testing as more is learned about the specific needs of the Swift code base.As per now some tests are excluded as they were used on purpose. https://wiki.openstack.org/wiki/Security/Projects/Bandit Implements: blueprint swift-bandit Change-Id: I621be9a68ae9311f3a6eadd1636b05e646260cf2 --- bandit.yaml | 149 ++++++++++++++++++++++++++++++++++++++++++ test-requirements.txt | 3 + tox.ini | 4 ++ 3 files changed, 156 insertions(+) create mode 100644 bandit.yaml diff --git a/bandit.yaml b/bandit.yaml new file mode 100644 index 0000000000..6599ee50b8 --- /dev/null +++ b/bandit.yaml @@ -0,0 +1,149 @@ +# optional: after how many files to update progress +#show_progress_every: 100 + +# optional: plugins directory name +#plugins_dir: 'plugins' + +# optional: plugins discovery name pattern +plugin_name_pattern: '*.py' + +# optional: terminal escape sequences to display colors +#output_colors: +# DEFAULT: '\033[0m' +# HEADER: '\033[95m' +# LOW: '\033[94m' +# MEDIUM: '\033[93m' +# HIGH: '\033[91m' + +# optional: log format string +#log_format: "[%(module)s]\t%(levelname)s\t%(message)s" + +# globs of files which should be analyzed +include: + - '*.py' + +# a list of strings, which if found in the path will cause files to be +# excluded +# for example /tests/ - to remove all all files in tests directory +#exclude_dirs: +# - '/tests/' + +#configured for swift +profiles: + gate: + include: + - blacklist_calls + - blacklist_imports + - exec_used + - linux_commands_wildcard_injection + - request_with_no_cert_validation + - set_bad_file_permissions + - subprocess_popen_with_shell_equals_true + - ssl_with_bad_version + - password_config_option_not_marked_secret + +# - any_other_function_with_shell_equals_true +# - ssl_with_bad_defaults +# - jinja2_autoescape_false +# - use_of_mako_templates +# - subprocess_without_shell_equals_true +# - any_other_function_with_shell_equals_true +# - start_process_with_a_shell +# - start_process_with_no_shell +# - hardcoded_sql_expressions +# - hardcoded_tmp_director +# - linux_commands_wildcard_injection +#For now some items are commented which could be included as per use later. +blacklist_calls: + bad_name_sets: +# - pickle: +# qualnames: [pickle.loads, pickle.load, pickle.Unpickler, +# cPickle.loads, cPickle.load, cPickle.Unpickler] +# level: LOW +# message: "Pickle library appears to be in use, possible security +#issue." + + - marshal: + qualnames: [marshal.load, marshal.loads] + message: "Deserialization with the marshal module is possibly +dangerous." +# - md5: +# qualnames: [hashlib.md5] +# level: LOW +# message: "Use of insecure MD5 hash function." + - mktemp_q: + qualnames: [tempfile.mktemp] + message: "Use of insecure and deprecated function (mktemp)." +# - eval: +# qualnames: [eval] +# level: LOW +# message: "Use of possibly insecure function - consider using safer +#ast.literal_eval." + - mark_safe: + names: [mark_safe] + message: "Use of mark_safe() may expose cross-site scripting +vulnerabilities and should be reviewed." + - httpsconnection: + qualnames: [httplib.HTTPSConnection] + message: "Use of HTTPSConnection does not provide security, see +https://wiki.openstack.org/wiki/OSSN/OSSN-0033" + - yaml_load: + qualnames: [yaml.load] + message: "Use of unsafe yaml load. Allows instantiation of +arbitrary objects. Consider yaml.safe_load()." + - urllib_urlopen: + qualnames: [urllib.urlopen, urllib.urlretrieve, urllib.URLopener, +urllib.FancyURLopener, urllib2.urlopen, urllib2.Request] + message: "Audit url open for permitted schemes. Allowing use of +file:/ or custom schemes is often unexpected." + - paramiko_injection: + qualnames: [paramiko.exec_command, paramiko.invoke_shell] + message: "Paramiko exec_command() and invoke_shell() usage may +expose command injection vulnerabilities and should be reviewed." + +shell_injection: + # Start a process using the subprocess module, or one of its wrappers. + subprocess: [subprocess.Popen, subprocess.call, subprocess.check_call, + subprocess.check_output, utils.execute, +utils.execute_with_timeout] + # Start a process with a function vulnerable to shell injection. + shell: [os.system, os.popen, os.popen2, os.popen3, os.popen4, + popen2.popen2, popen2.popen3, popen2.popen4, popen2.Popen3, + popen2.Popen4, commands.getoutput, commands.getstatusoutput] + # Start a process with a function that is not vulnerable to shell + # injection. + no_shell: [os.execl, os.execle, os.execlp, os.execlpe, os.execv,os.execve, + os.execvp, os.execvpe, os.spawnl, os.spawnle, os.spawnlp, + os.spawnlpe, os.spawnv, os.spawnve, os.spawnvp, os.spawnvpe, + os.startfile] + +blacklist_imports: + bad_import_sets: + - telnet: + imports: [telnetlib] + level: HIGH + message: "Telnet is considered insecure. Use SSH or some other +encrypted protocol." + - info_libs: + imports: [Crypto] + level: LOW + message: "Consider possible security implications associated with +#{module} module." + +hardcoded_password: + word_list: "wordlist/default-passwords" + +ssl_with_bad_version: + bad_protocol_versions: + - 'PROTOCOL_SSLv2' + - 'SSLv2_METHOD' + - 'SSLv23_METHOD' + - 'PROTOCOL_SSLv3' # strict option + - 'PROTOCOL_TLSv1' # strict option + - 'SSLv3_METHOD' # strict option + - 'TLSv1_METHOD' # strict option + +password_config_option_not_marked_secret: + function_names: + - oslo.config.cfg.StrOpt + - oslo_config.cfg.StrOpt diff --git a/test-requirements.txt b/test-requirements.txt index b3f7eed5be..e8b3b42dfe 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -13,3 +13,6 @@ sphinx>=1.1.2,<1.2 mock>=1.0 python-swiftclient python-keystoneclient>=1.3.0 + +# Security checks +bandit>=0.10.1 diff --git a/tox.ini b/tox.ini index 8b7061a026..4ed27daed6 100644 --- a/tox.ini +++ b/tox.ini @@ -49,6 +49,10 @@ commands = {posargs} [testenv:docs] commands = python setup.py build_sphinx +[testenv:bandit] +deps = -r{toxinidir}/test-requirements.txt +commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate + [flake8] # it's not a bug that we aren't using all of hacking # H102 -> apache2 license exists From cf4d50bd68b658d35759fb56446557858dcd9572 Mon Sep 17 00:00:00 2001 From: Alistair Coles Date: Fri, 5 Jun 2015 13:06:29 +0100 Subject: [PATCH 08/70] Make test_proxy work independent of env vars test.unit.common.test_internal_client.TestSimpleClient.test_proxy will fail if the environment has http_proxy set and does not have no_proxy=127.0.0.1 set. This seems to be because urllib overrides any proxy arg passed to the Request with the env http_proxy var, unless the Request host is in no_proxy. We don't need to test urllib, so this patch changes the test to simply ensure that swift code does pass the correct proxy arg to urllib2.urlopen. That avoids testers having to make sure their env vars are compliant to successfully run unit tests. While it is reasonable to think that 127.0.0.1 is in the no_proxy list, it shouldn't be a requirement pass swift tests. Change-Id: Iff91acdb76fabe7056b3e90e2756e27fe5105817 --- test/unit/common/test_internal_client.py | 85 +++++++++++++----------- 1 file changed, 45 insertions(+), 40 deletions(-) diff --git a/test/unit/common/test_internal_client.py b/test/unit/common/test_internal_client.py index d2fbd61226..9fed678faf 100644 --- a/test/unit/common/test_internal_client.py +++ b/test/unit/common/test_internal_client.py @@ -25,7 +25,6 @@ import six from six import StringIO from six.moves import range from test.unit import FakeLogger -import eventlet from eventlet.green import urllib2 from swift.common import internal_client from swift.common import swob @@ -1266,47 +1265,53 @@ class TestSimpleClient(unittest.TestCase): self.assertEqual([None, None], retval) def test_proxy(self): - running = True - - def handle(sock): - while running: - try: - with eventlet.Timeout(0.1): - (conn, addr) = sock.accept() - except eventlet.Timeout: - continue - else: - conn.send('HTTP/1.1 503 Server Error') - conn.close() - sock.close() - - sock = eventlet.listen(('', 0)) - port = sock.getsockname()[1] - proxy = 'http://127.0.0.1:%s' % port + # check that proxy arg is passed through to the urllib Request + scheme = 'http' + proxy_host = '127.0.0.1:80' + proxy = '%s://%s' % (scheme, proxy_host) url = 'https://127.0.0.1:1/a' - server = eventlet.spawn(handle, sock) - try: - headers = {'Content-Length': '0'} - with mock.patch('swift.common.internal_client.sleep'): - try: - internal_client.put_object( - url, container='c', name='o1', headers=headers, - contents='', proxy=proxy, timeout=0.1, retries=0) - except urllib2.HTTPError as e: - self.assertEqual(e.code, 503) - except urllib2.URLError as e: - if 'ECONNREFUSED' in str(e): - self.fail( - "Got %s which probably means the http proxy " - "settings were not used" % e) - else: - raise e - else: - self.fail('Unexpected successful response') - finally: - running = False - server.wait() + class FakeConn(object): + def read(self): + return 'irrelevant' + + mocked = 'swift.common.internal_client.urllib2.urlopen' + + # module level methods + for func in (internal_client.put_object, + internal_client.delete_object): + with mock.patch(mocked) as mock_urlopen: + mock_urlopen.return_value = FakeConn() + func(url, container='c', name='o1', contents='', proxy=proxy, + timeout=0.1, retries=0) + self.assertEqual(1, mock_urlopen.call_count) + args, kwargs = mock_urlopen.call_args + self.assertEqual(1, len(args)) + self.assertEqual(1, len(kwargs)) + self.assertEqual(0.1, kwargs['timeout']) + self.assertTrue(isinstance(args[0], urllib2.Request)) + self.assertEqual(proxy_host, args[0].host) + self.assertEqual(scheme, args[0].type) + + # class methods + content = mock.MagicMock() + cl = internal_client.SimpleClient(url) + scenarios = ((cl.get_account, []), + (cl.get_container, ['c']), + (cl.put_container, ['c']), + (cl.put_object, ['c', 'o', content])) + for scenario in scenarios: + with mock.patch(mocked) as mock_urlopen: + mock_urlopen.return_value = FakeConn() + scenario[0](*scenario[1], proxy=proxy, timeout=0.1) + self.assertEqual(1, mock_urlopen.call_count) + args, kwargs = mock_urlopen.call_args + self.assertEqual(1, len(args)) + self.assertEqual(1, len(kwargs)) + self.assertEqual(0.1, kwargs['timeout']) + self.assertTrue(isinstance(args[0], urllib2.Request)) + self.assertEqual(proxy_host, args[0].host) + self.assertEqual(scheme, args[0].type) if __name__ == '__main__': unittest.main() From 44917c8a90e79a85cd6c6821d8b6d043f83b322d Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Mon, 20 Jul 2015 13:06:48 -0700 Subject: [PATCH 09/70] Handle removed suffix dirs the same as empty suffix dirs When hashes suffix dirs, the directory might have gotten cleaned up while still appearing in hashes.pkl. It would even get cleaned up the next time. For example, given this really old tombstone: objects/846/3d0/d3a20154d0a828a032aba6860397c3d0/1432267961.41808.ts Prior to this commit, a call to get_hashes() would reap the old tombstone and any empty containing dirs, but the resulting hashes.pkl would still contain {'3d0': 'd41d8cd98f00b204e9800998ecf8427e'} even though there's no such suffix dir any more. ("d41d8cd98f00b204e9800998ecf8427e" is the MD5 of the empty string.) Then, a *subsequent* get_hashes() call would omit 3d0 from the resulting hash, so then hashes.pkl would no longer contain 3d0. This difference would result in a little useless replication traffic while nodes without a particular part/suffix pair, but who disagreed on how that showed up in hashes.pkl, tried to push their version of nothing to one another. Now, an empty suffix dir doesn't appear in hashes.pkl at all, whether it's for replication or EC, or whether it's for the get_hashes() call that reaped the suffix dirs or not. Co-Author: Samuel Merritt Change-Id: Ie1bfb1cc56d0fc030c6db42f97b55d140695cf1f --- swift/obj/diskfile.py | 12 +++++----- test/unit/obj/test_diskfile.py | 40 ++++++++++------------------------ 2 files changed, 18 insertions(+), 34 deletions(-) diff --git a/swift/obj/diskfile.py b/swift/obj/diskfile.py index c11b16dcaa..51158a0fe4 100644 --- a/swift/obj/diskfile.py +++ b/swift/obj/diskfile.py @@ -626,16 +626,18 @@ class BaseDiskFileManager(object): os.rmdir(hsh_path) except OSError: pass - # we just deleted this hsh_path, why are we waiting - # until the next suffix hash to raise PathNotDir so that - # this suffix will get del'd from the suffix hashes? for filename in files: key, value = mapper(filename) hashes[key].update(value) try: os.rmdir(path) - except OSError: - pass + except OSError as e: + if e.errno == errno.ENOENT: + raise PathNotDir() + else: + # if we remove it, pretend like it wasn't there to begin with so + # that the suffix key gets removed + raise PathNotDir() return hashes def _hash_suffix(self, path, reclaim_age): diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 8a6ae0bee6..4f64eb8aba 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -4244,19 +4244,14 @@ class TestSuffixHashes(unittest.TestCase): df_mgr = self.df_router[policy] df = df_mgr.get_diskfile( 'sda1', '0', 'a', 'c', 'o', policy=policy) - suffix = os.path.basename(os.path.dirname(df._datadir)) # scale back this tests manager's reclaim age a bit df_mgr.reclaim_age = 1000 # write a tombstone that's just a *little* older old_time = time() - 1001 timestamp = Timestamp(old_time) df.delete(timestamp.internal) - expected = { - REPL_POLICY: {suffix: EMPTY_ETAG}, - EC_POLICY: {suffix: {}}, - }[policy.policy_type] hashes = df_mgr.get_hashes('sda1', '0', [], policy) - self.assertEqual(hashes, expected) + self.assertEqual(hashes, {}) def test_hash_suffix_one_datafile(self): for policy in self.iter_policies(): @@ -4432,20 +4427,17 @@ class TestSuffixHashes(unittest.TestCase): hashes = df_mgr.get_hashes('sda1', '0', [suffix], policy) # suffix dir cleaned up by get_hashes self.assertFalse(os.path.exists(suffix_path)) - expected = { - EC_POLICY: {'123': {}}, - REPL_POLICY: {'123': EMPTY_ETAG}, - }[policy.policy_type] - msg = 'expected %r != %r for policy %r' % (expected, hashes, - policy) + expected = {} + msg = 'expected %r != %r for policy %r' % ( + expected, hashes, policy) self.assertEqual(hashes, expected, msg) # now make the suffix path a file open(suffix_path, 'w').close() hashes = df_mgr.get_hashes('sda1', '0', [suffix], policy) expected = {} - msg = 'expected %r != %r for policy %r' % (expected, hashes, - policy) + msg = 'expected %r != %r for policy %r' % ( + expected, hashes, policy) self.assertEqual(hashes, expected, msg) def test_hash_suffix_listdir_enoent(self): @@ -4493,11 +4485,7 @@ class TestSuffixHashes(unittest.TestCase): df_mgr = self.df_router[policy] hashes = df_mgr.get_hashes(self.existing_device, '0', [suffix], policy) - expected = { - REPL_POLICY: {suffix: EMPTY_ETAG}, - EC_POLICY: {suffix: {}}, - }[policy.policy_type] - self.assertEqual(hashes, expected) + self.assertEqual(hashes, {}) # and hash path is quarantined self.assertFalse(os.path.exists(df._datadir)) # each device a quarantined directory @@ -4705,12 +4693,9 @@ class TestSuffixHashes(unittest.TestCase): self.assertNotEqual(new_hashes, hashes) # and the empty suffix path is removed self.assertFalse(os.path.exists(suffix_path)) - # ... but is hashed as "empty" - expected = { - EC_POLICY: {}, - REPL_POLICY: md5().hexdigest(), - }[policy.policy_type] - self.assertEqual({suffix: expected}, hashes) + # ... and the suffix key is removed + expected = {} + self.assertEqual(expected, hashes) def test_get_hashes_multi_file_multi_suffix(self): paths, suffix = find_paths_with_matching_suffixes(needed_matches=2, @@ -4887,10 +4872,7 @@ class TestSuffixHashes(unittest.TestCase): self.assertTrue(os.path.exists(suffix_path)) # sanity hashes = df_mgr.get_hashes(self.existing_device, '0', [suffix], policy) - expected = { - EC_POLICY: {'123': {}}, - REPL_POLICY: {'123': EMPTY_ETAG}, - }[policy.policy_type] + expected = {} msg = 'expected %r != %r for policy %r' % (expected, hashes, policy) self.assertEqual(hashes, expected, msg) From 21a7b4aaa6f991c0eeb8f74876b14fe22713586b Mon Sep 17 00:00:00 2001 From: Alistair Coles Date: Mon, 3 Aug 2015 14:58:24 +0100 Subject: [PATCH 10/70] Test that get_hashes ignores only removed hash dir Add test for case not yet covered by unit tests: suffix dir has two hash dirs, one with expired tombstone. That hash dir gets removed and its hash is not included in the suffix hash, but the remaining hash dir's hash is reported. Change-Id: I031a022daed6b8a66dfd04bea1b4d5eebcb882b3 --- test/unit/obj/test_diskfile.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 4f64eb8aba..2ab98307fd 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -4253,6 +4253,37 @@ class TestSuffixHashes(unittest.TestCase): hashes = df_mgr.get_hashes('sda1', '0', [], policy) self.assertEqual(hashes, {}) + def test_hash_suffix_one_reclaim_and_one_valid_tombstone(self): + for policy in self.iter_policies(): + paths, suffix = find_paths_with_matching_suffixes(2, 1) + df_mgr = self.df_router[policy] + a, c, o = paths[suffix][0] + df1 = df_mgr.get_diskfile( + 'sda1', '0', a, c, o, policy=policy) + # scale back this tests manager's reclaim age a bit + df_mgr.reclaim_age = 1000 + # write one tombstone that's just a *little* older + df1.delete(Timestamp(time() - 1001)) + # create another tombstone in same suffix dir that's newer + a, c, o = paths[suffix][1] + df2 = df_mgr.get_diskfile( + 'sda1', '0', a, c, o, policy=policy) + t_df2 = Timestamp(time() - 900) + df2.delete(t_df2) + + hashes = df_mgr.get_hashes('sda1', '0', [], policy) + + suffix = os.path.basename(os.path.dirname(df1._datadir)) + df2_tombstone_hash = md5(t_df2.internal + '.ts').hexdigest() + expected = { + REPL_POLICY: {suffix: df2_tombstone_hash}, + EC_POLICY: {suffix: { + # fi is None here because we have a tombstone + None: df2_tombstone_hash}}, + }[policy.policy_type] + + self.assertEqual(hashes, expected) + def test_hash_suffix_one_datafile(self): for policy in self.iter_policies(): df_mgr = self.df_router[policy] From 310ac18d61b001bdf297505243d8aa62e8fde8fe Mon Sep 17 00:00:00 2001 From: Brian Reitz Date: Mon, 3 Aug 2015 16:28:31 -0500 Subject: [PATCH 11/70] Convert docstring to a comment in test_denied_DELETE_of_versioned_object unit test. Docstrings cause issues for nose and created non standard test output for this test. Change-Id: I91e6450480567de0ee0b18d3efae01a4b94f0a86 --- test/unit/proxy/test_server.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 3171330ab7..17c88ac8f6 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -2811,10 +2811,9 @@ class TestObjectController(unittest.TestCase): StoragePolicy(1, 'one', True, object_ring=FakeRing()) ]) def test_denied_DELETE_of_versioned_object(self): - """ - Verify that a request with read access to a versions container - is unable to cause any write operations on the versioned container. - """ + # Verify that a request with read access to a versions container + # is unable to cause any write operations on the versioned container. + # reset the router post patch_policies self.app.obj_controller_router = proxy_server.ObjectControllerRouter() methods = set() From 8d06f56c1fac23bca463d3a93353da6b50ad5bed Mon Sep 17 00:00:00 2001 From: Takashi NATSUME Date: Tue, 4 Aug 2015 10:14:25 +0900 Subject: [PATCH 12/70] Fix typo of 'receive' Change-Id: Ic01defa8d48ba4b7f4bd6cbe4d59b9bab468a36c --- swift/obj/ssync_receiver.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/obj/ssync_receiver.py b/swift/obj/ssync_receiver.py index 8bff25c8ba..394d2b0158 100644 --- a/swift/obj/ssync_receiver.py +++ b/swift/obj/ssync_receiver.py @@ -164,7 +164,7 @@ class Receiver(object): self.node_index = int( self.request.headers['X-Backend-Ssync-Node-Index']) if self.node_index != self.frag_index: - # a primary node should only recieve it's own fragments + # a primary node should only receive it's own fragments raise swob.HTTPBadRequest( 'Frag-Index (%s) != Node-Index (%s)' % ( self.frag_index, self.node_index)) From 5b24b2249878e25ee702ee4237227c984036c6ec Mon Sep 17 00:00:00 2001 From: OpenStack Proposal Bot Date: Tue, 4 Aug 2015 06:29:11 +0000 Subject: [PATCH 13/70] Imported Translations from Transifex For more information about this automatic import see: https://wiki.openstack.org/wiki/Translations/Infrastructure Change-Id: I9f0e5fd25ec143fb87de1895794764dea4587f93 --- swift/locale/swift.pot | 110 ++++++++++++------------ swift/locale/zh_CN/LC_MESSAGES/swift.po | 4 +- 2 files changed, 57 insertions(+), 57 deletions(-) diff --git a/swift/locale/swift.pot b/swift/locale/swift.pot index 250a6fda47..0947dfcd39 100644 --- a/swift/locale/swift.pot +++ b/swift/locale/swift.pot @@ -6,9 +6,9 @@ #, fuzzy msgid "" msgstr "" -"Project-Id-Version: swift 2.3.1.dev213\n" +"Project-Id-Version: swift 2.3.1.dev243\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2015-07-29 06:35+0000\n" +"POT-Creation-Date: 2015-08-04 06:29+0000\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -53,18 +53,18 @@ msgid "" " %(key)s across policies (%(sum)s)" msgstr "" -#: swift/account/auditor.py:149 +#: swift/account/auditor.py:148 #, python-format msgid "Audit Failed for %s: %s" msgstr "" -#: swift/account/auditor.py:153 +#: swift/account/auditor.py:152 #, python-format msgid "ERROR Could not get account info %s" msgstr "" #: swift/account/reaper.py:134 swift/common/utils.py:2147 -#: swift/obj/diskfile.py:480 swift/obj/updater.py:88 swift/obj/updater.py:131 +#: swift/obj/diskfile.py:296 swift/obj/updater.py:88 swift/obj/updater.py:131 #, python-format msgid "Skipping %s as it is not mounted" msgstr "" @@ -154,7 +154,7 @@ msgid "Exception with objects for container %(container)s for account %(account) msgstr "" #: swift/account/server.py:275 swift/container/server.py:586 -#: swift/obj/server.py:914 +#: swift/obj/server.py:911 #, python-format msgid "ERROR __call__ error with %(method)s %(path)s " msgstr "" @@ -732,8 +732,8 @@ msgstr "" msgid "ERROR: Failed to get paths to drive partitions: %s" msgstr "" -#: swift/container/updater.py:92 swift/obj/reconstructor.py:815 -#: swift/obj/replicator.py:498 swift/obj/replicator.py:586 +#: swift/container/updater.py:92 swift/obj/reconstructor.py:812 +#: swift/obj/replicator.py:497 swift/obj/replicator.py:585 #, python-format msgid "%s is not mounted" msgstr "" @@ -845,53 +845,53 @@ msgstr "" msgid "ERROR auditing: %s" msgstr "" -#: swift/obj/diskfile.py:327 swift/obj/diskfile.py:2320 -#, python-format -msgid "Quarantined %(hsh_path)s to %(quar_path)s because it is not a directory" -msgstr "" - -#: swift/obj/diskfile.py:418 swift/obj/diskfile.py:2388 -msgid "Error hashing suffix" -msgstr "" - -#: swift/obj/diskfile.py:490 swift/obj/updater.py:162 +#: swift/obj/diskfile.py:306 swift/obj/updater.py:162 #, python-format msgid "Directory %r does not map to a valid policy (%s)" msgstr "" -#: swift/obj/diskfile.py:741 +#: swift/obj/diskfile.py:619 +#, python-format +msgid "Quarantined %(hsh_path)s to %(quar_path)s because it is not a directory" +msgstr "" + +#: swift/obj/diskfile.py:700 +msgid "Error hashing suffix" +msgstr "" + +#: swift/obj/diskfile.py:821 #, python-format msgid "Quarantined %(object_path)s to %(quar_path)s because it is not a directory" msgstr "" -#: swift/obj/diskfile.py:941 +#: swift/obj/diskfile.py:1035 #, python-format msgid "Problem cleaning up %s" msgstr "" -#: swift/obj/diskfile.py:1259 +#: swift/obj/diskfile.py:1342 #, python-format msgid "ERROR DiskFile %(data_file)s close failure: %(exc)s : %(stack)s" msgstr "" -#: swift/obj/diskfile.py:1549 +#: swift/obj/diskfile.py:1612 #, python-format msgid "" "Client path %(client)s does not match path stored in object metadata " "%(meta)s" msgstr "" -#: swift/obj/diskfile.py:1805 +#: swift/obj/diskfile.py:2048 #, python-format msgid "No space left on device for %s (%s)" msgstr "" -#: swift/obj/diskfile.py:1814 +#: swift/obj/diskfile.py:2057 #, python-format msgid "Problem cleaning up %s (%s)" msgstr "" -#: swift/obj/diskfile.py:1817 +#: swift/obj/diskfile.py:2060 #, python-format msgid "Problem writing durable state file %s (%s)" msgstr "" @@ -925,7 +925,7 @@ msgstr "" msgid "Exception while deleting object %s %s %s" msgstr "" -#: swift/obj/reconstructor.py:208 swift/obj/reconstructor.py:490 +#: swift/obj/reconstructor.py:208 swift/obj/reconstructor.py:492 #, python-format msgid "Invalid response %(resp)s from %(full_path)s" msgstr "" @@ -948,14 +948,14 @@ msgid "" "%(time).2fs (%(rate).2f/sec, %(remaining)s remaining)" msgstr "" -#: swift/obj/reconstructor.py:369 swift/obj/replicator.py:430 +#: swift/obj/reconstructor.py:369 swift/obj/replicator.py:429 #, python-format msgid "" "%(checked)d suffixes checked - %(hashed).2f%% hashed, %(synced).2f%% " "synced" msgstr "" -#: swift/obj/reconstructor.py:376 swift/obj/replicator.py:437 +#: swift/obj/reconstructor.py:376 swift/obj/replicator.py:436 #, python-format msgid "Partition times: max %(max).4fs, min %(min).4fs, med %(med).4fs" msgstr "" @@ -965,7 +965,7 @@ msgstr "" msgid "Nothing reconstructed for %s seconds." msgstr "" -#: swift/obj/reconstructor.py:413 swift/obj/replicator.py:474 +#: swift/obj/reconstructor.py:413 swift/obj/replicator.py:473 msgid "Lockup detected.. killing live coros." msgstr "" @@ -979,122 +979,122 @@ msgstr "" msgid "%s responded as unmounted" msgstr "" -#: swift/obj/reconstructor.py:886 swift/obj/replicator.py:306 +#: swift/obj/reconstructor.py:883 swift/obj/replicator.py:305 #, python-format msgid "Removing partition: %s" msgstr "" -#: swift/obj/reconstructor.py:902 +#: swift/obj/reconstructor.py:899 msgid "Ring change detected. Aborting current reconstruction pass." msgstr "" -#: swift/obj/reconstructor.py:921 +#: swift/obj/reconstructor.py:918 msgid "Exception in top-levelreconstruction loop" msgstr "" -#: swift/obj/reconstructor.py:931 +#: swift/obj/reconstructor.py:928 msgid "Running object reconstructor in script mode." msgstr "" -#: swift/obj/reconstructor.py:940 +#: swift/obj/reconstructor.py:937 #, python-format msgid "Object reconstruction complete (once). (%.02f minutes)" msgstr "" -#: swift/obj/reconstructor.py:947 +#: swift/obj/reconstructor.py:944 msgid "Starting object reconstructor in daemon mode." msgstr "" -#: swift/obj/reconstructor.py:951 +#: swift/obj/reconstructor.py:948 msgid "Starting object reconstruction pass." msgstr "" -#: swift/obj/reconstructor.py:956 +#: swift/obj/reconstructor.py:953 #, python-format msgid "Object reconstruction complete. (%.02f minutes)" msgstr "" -#: swift/obj/replicator.py:145 +#: swift/obj/replicator.py:144 #, python-format msgid "Killing long-running rsync: %s" msgstr "" -#: swift/obj/replicator.py:159 +#: swift/obj/replicator.py:158 #, python-format msgid "Bad rsync return code: %(ret)d <- %(args)s" msgstr "" -#: swift/obj/replicator.py:166 swift/obj/replicator.py:170 +#: swift/obj/replicator.py:165 swift/obj/replicator.py:169 #, python-format msgid "Successful rsync of %(src)s at %(dst)s (%(time).03f)" msgstr "" -#: swift/obj/replicator.py:292 +#: swift/obj/replicator.py:291 #, python-format msgid "Removing %s objects" msgstr "" -#: swift/obj/replicator.py:300 +#: swift/obj/replicator.py:299 msgid "Error syncing handoff partition" msgstr "" -#: swift/obj/replicator.py:362 +#: swift/obj/replicator.py:361 #, python-format msgid "%(ip)s/%(device)s responded as unmounted" msgstr "" -#: swift/obj/replicator.py:367 +#: swift/obj/replicator.py:366 #, python-format msgid "Invalid response %(resp)s from %(ip)s" msgstr "" -#: swift/obj/replicator.py:402 +#: swift/obj/replicator.py:401 #, python-format msgid "Error syncing with node: %s" msgstr "" -#: swift/obj/replicator.py:406 +#: swift/obj/replicator.py:405 msgid "Error syncing partition" msgstr "" -#: swift/obj/replicator.py:419 +#: swift/obj/replicator.py:418 #, python-format msgid "" "%(replicated)d/%(total)d (%(percentage).2f%%) partitions replicated in " "%(time).2fs (%(rate).2f/sec, %(remaining)s remaining)" msgstr "" -#: swift/obj/replicator.py:445 +#: swift/obj/replicator.py:444 #, python-format msgid "Nothing replicated for %s seconds." msgstr "" -#: swift/obj/replicator.py:589 +#: swift/obj/replicator.py:588 msgid "Ring change detected. Aborting current replication pass." msgstr "" -#: swift/obj/replicator.py:610 +#: swift/obj/replicator.py:609 msgid "Exception in top-level replication loop" msgstr "" -#: swift/obj/replicator.py:619 +#: swift/obj/replicator.py:618 msgid "Running object replicator in script mode." msgstr "" -#: swift/obj/replicator.py:637 +#: swift/obj/replicator.py:636 #, python-format msgid "Object replication complete (once). (%.02f minutes)" msgstr "" -#: swift/obj/replicator.py:644 +#: swift/obj/replicator.py:643 msgid "Starting object replicator in daemon mode." msgstr "" -#: swift/obj/replicator.py:648 +#: swift/obj/replicator.py:647 msgid "Starting object replication pass." msgstr "" -#: swift/obj/replicator.py:653 +#: swift/obj/replicator.py:652 #, python-format msgid "Object replication complete. (%.02f minutes)" msgstr "" diff --git a/swift/locale/zh_CN/LC_MESSAGES/swift.po b/swift/locale/zh_CN/LC_MESSAGES/swift.po index f375ecd87f..823a9a1a8c 100644 --- a/swift/locale/zh_CN/LC_MESSAGES/swift.po +++ b/swift/locale/zh_CN/LC_MESSAGES/swift.po @@ -8,10 +8,10 @@ msgid "" msgstr "" "Project-Id-Version: Swift\n" "Report-Msgid-Bugs-To: EMAIL@ADDRESS\n" -"POT-Creation-Date: 2015-07-29 06:35+0000\n" +"POT-Creation-Date: 2015-08-04 06:29+0000\n" "PO-Revision-Date: 2015-07-28 00:33+0000\n" "Last-Translator: openstackjenkins \n" -"Language-Team: Chinese (China) (http://www.transifex.com/projects/p/swift/" +"Language-Team: Chinese (China) (http://www.transifex.com/openstack/swift/" "language/zh_CN/)\n" "Plural-Forms: nplurals=1; plural=0\n" "MIME-Version: 1.0\n" From 01848c565e5821d2500cb234261c70871e562ebe Mon Sep 17 00:00:00 2001 From: David Goetz Date: Fri, 26 Jun 2015 17:27:31 -0700 Subject: [PATCH 14/70] Speed up reaper for a big account delete and some better error handling In run_forever mode, run the reaper on all three copies of the account DB instead of just the first one and shard the containers between them. Change-Id: Id182e89dad70df9d6d9988523861b8085d74c765 --- swift/account/reaper.py | 51 +++++++++++++++-- test/unit/account/test_reaper.py | 98 ++++++++++++++++++++++++++------ 2 files changed, 126 insertions(+), 23 deletions(-) diff --git a/swift/account/reaper.py b/swift/account/reaper.py index c121bf0ea5..e11fea6a47 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -15,10 +15,12 @@ import os import random +import socket from swift import gettext_ as _ from logging import DEBUG from math import sqrt from time import time +from hashlib import md5 import itertools from eventlet import GreenPool, sleep, Timeout @@ -70,6 +72,7 @@ class AccountReaper(Daemon): self.node_timeout = int(conf.get('node_timeout', 10)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) self.myips = whataremyips(conf.get('bind_ip', '0.0.0.0')) + self.bind_port = int(conf.get('bind_port', 0)) self.concurrency = int(conf.get('concurrency', 25)) self.container_concurrency = self.object_concurrency = \ sqrt(self.concurrency) @@ -79,6 +82,7 @@ class AccountReaper(Daemon): self.delay_reaping = int(conf.get('delay_reaping') or 0) reap_warn_after = float(conf.get('reap_warn_after') or 86400 * 30) self.reap_not_done_after = reap_warn_after + self.delay_reaping + self.start_time = time() def get_account_ring(self): """The account :class:`swift.common.ring.Ring` for the cluster.""" @@ -161,9 +165,16 @@ class AccountReaper(Daemon): if not partition.isdigit(): continue nodes = self.get_account_ring().get_part_nodes(int(partition)) - if (not is_local_device(self.myips, None, nodes[0]['ip'], None) - or not os.path.isdir(partition_path)): + if not os.path.isdir(partition_path): continue + container_shard = None + for container_shard, node in enumerate(nodes): + if is_local_device(self.myips, None, node['ip'], None) and \ + (not self.bind_port or self.bind_port == node['port']): + break + else: + continue + for suffix in os.listdir(partition_path): suffix_path = os.path.join(partition_path, suffix) if not os.path.isdir(suffix_path): @@ -181,7 +192,9 @@ class AccountReaper(Daemon): AccountBroker(os.path.join(hsh_path, fname)) if broker.is_status_deleted() and \ not broker.empty(): - self.reap_account(broker, partition, nodes) + self.reap_account( + broker, partition, nodes, + container_shard=container_shard) def reset_stats(self): self.stats_return_codes = {} @@ -192,7 +205,7 @@ class AccountReaper(Daemon): self.stats_containers_possibly_remaining = 0 self.stats_objects_possibly_remaining = 0 - def reap_account(self, broker, partition, nodes): + def reap_account(self, broker, partition, nodes, container_shard=None): """ Called once per pass for each account this server is the primary for and attempts to delete the data for the given account. The reaper will @@ -219,6 +232,8 @@ class AccountReaper(Daemon): :param broker: The AccountBroker for the account to delete. :param partition: The partition in the account ring the account is on. :param nodes: The primary node dicts for the account to delete. + :param container_shard: int used to shard containers reaped. If None, + will reap all containers. .. seealso:: @@ -237,16 +252,24 @@ class AccountReaper(Daemon): account = info['account'] self.logger.info(_('Beginning pass on account %s'), account) self.reset_stats() + container_limit = 1000 + if container_shard is not None: + container_limit *= len(nodes) try: marker = '' while True: containers = \ - list(broker.list_containers_iter(1000, marker, None, None, - None)) + list(broker.list_containers_iter(container_limit, marker, + None, None, None)) if not containers: break try: for (container, _junk, _junk, _junk) in containers: + this_shard = int(md5(container).hexdigest(), 16) % \ + len(nodes) + if container_shard not in (this_shard, None): + continue + self.container_pool.spawn(self.reap_container, account, partition, nodes, container) self.container_pool.waitall() @@ -351,6 +374,10 @@ class AccountReaper(Daemon): self.stats_return_codes.get(err.http_status / 100, 0) + 1 self.logger.increment( 'return_codes.%d' % (err.http_status / 100,)) + except (Timeout, socket.error) as err: + self.logger.error( + _('Timeout Exception with %(ip)s:%(port)s/%(device)s'), + node) if not objects: break try: @@ -403,6 +430,12 @@ class AccountReaper(Daemon): self.stats_return_codes.get(err.http_status / 100, 0) + 1 self.logger.increment( 'return_codes.%d' % (err.http_status / 100,)) + except (Timeout, socket.error) as err: + self.logger.error( + _('Timeout Exception with %(ip)s:%(port)s/%(device)s'), + node) + failures += 1 + self.logger.increment('containers_failures') if successes > failures: self.stats_containers_deleted += 1 self.logger.increment('containers_deleted') @@ -473,6 +506,12 @@ class AccountReaper(Daemon): self.stats_return_codes.get(err.http_status / 100, 0) + 1 self.logger.increment( 'return_codes.%d' % (err.http_status / 100,)) + except (Timeout, socket.error) as err: + failures += 1 + self.logger.increment('objects_failures') + self.logger.error( + _('Timeout Exception with %(ip)s:%(port)s/%(device)s'), + node) if successes > failures: self.stats_objects_deleted += 1 self.logger.increment('objects_deleted') diff --git a/test/unit/account/test_reaper.py b/test/unit/account/test_reaper.py index b413a646a1..828fda0cd7 100644 --- a/test/unit/account/test_reaper.py +++ b/test/unit/account/test_reaper.py @@ -77,6 +77,7 @@ class FakeBroker(object): class FakeAccountBroker(object): def __init__(self, containers): self.containers = containers + self.containers_yielded = [] def get_info(self): info = {'account': 'a', @@ -101,11 +102,11 @@ class FakeRing(object): 'port': 6002, 'device': None}, {'id': '2', - 'ip': '10.10.10.1', + 'ip': '10.10.10.2', 'port': 6002, 'device': None}, {'id': '3', - 'ip': '10.10.10.1', + 'ip': '10.10.10.3', 'port': 6002, 'device': None}, ] @@ -504,24 +505,26 @@ class TestReaper(unittest.TestCase): self.called_amount = 0 self.r = r = self.init_reaper({}, fakelogger=True) r.start_time = time.time() - ctx = [patch('swift.account.reaper.AccountReaper.reap_container', - self.fake_reap_container), - patch('swift.account.reaper.AccountReaper.get_account_ring', - self.fake_account_ring)] - with nested(*ctx): + with patch('swift.account.reaper.AccountReaper.reap_container', + self.fake_reap_container), \ + patch('swift.account.reaper.AccountReaper.get_account_ring', + self.fake_account_ring): nodes = r.get_account_ring().get_part_nodes() - self.assertTrue(r.reap_account(broker, 'partition', nodes)) + for container_shard, node in enumerate(nodes): + self.assertTrue( + r.reap_account(broker, 'partition', nodes, + container_shard=container_shard)) self.assertEqual(self.called_amount, 4) info_lines = r.logger.get_lines_for_level('info') - self.assertEqual(len(info_lines), 2) - start_line, stat_line = info_lines - self.assertEqual(start_line, 'Beginning pass on account a') - self.assertTrue(stat_line.find('1 containers deleted')) - self.assertTrue(stat_line.find('1 objects deleted')) - self.assertTrue(stat_line.find('1 containers remaining')) - self.assertTrue(stat_line.find('1 objects remaining')) - self.assertTrue(stat_line.find('1 containers possibly remaining')) - self.assertTrue(stat_line.find('1 objects possibly remaining')) + self.assertEqual(len(info_lines), 6) + for start_line, stat_line in zip(*[iter(info_lines)] * 2): + self.assertEqual(start_line, 'Beginning pass on account a') + self.assertTrue(stat_line.find('1 containers deleted')) + self.assertTrue(stat_line.find('1 objects deleted')) + self.assertTrue(stat_line.find('1 containers remaining')) + self.assertTrue(stat_line.find('1 objects remaining')) + self.assertTrue(stat_line.find('1 containers possibly remaining')) + self.assertTrue(stat_line.find('1 objects possibly remaining')) def test_reap_account_no_container(self): broker = FakeAccountBroker(tuple()) @@ -584,6 +587,67 @@ class TestReaper(unittest.TestCase): r.reap_device('sda1') self.assertEqual(self.called_amount, 0) + def test_reap_device_with_sharding(self): + devices = self.prepare_data_dir() + conf = {'devices': devices} + r = self.init_reaper(conf, myips=['10.10.10.2']) + container_shard_used = [-1] + + def fake_reap_account(*args, **kwargs): + container_shard_used[0] = kwargs.get('container_shard') + + with patch('swift.account.reaper.AccountBroker', + FakeAccountBroker), \ + patch('swift.account.reaper.AccountReaper.get_account_ring', + self.fake_account_ring), \ + patch('swift.account.reaper.AccountReaper.reap_account', + fake_reap_account): + r.reap_device('sda1') + # 10.10.10.2 is second node from ring + self.assertEqual(container_shard_used[0], 1) + + def test_reap_account_with_sharding(self): + devices = self.prepare_data_dir() + self.called_amount = 0 + conf = {'devices': devices} + r = self.init_reaper(conf, myips=['10.10.10.2']) + + container_reaped = [0] + + def fake_list_containers_iter(self, *args): + for container in self.containers: + if container in self.containers_yielded: + continue + + yield container, None, None, None + self.containers_yielded.append(container) + + def fake_reap_container(self, account, account_partition, + account_nodes, container): + container_reaped[0] += 1 + + ctx = [patch('swift.account.reaper.AccountBroker', + FakeAccountBroker), + patch('swift.account.reaper.AccountBroker.list_containers_iter', + fake_list_containers_iter), + patch('swift.account.reaper.AccountReaper.reap_container', + fake_reap_container), ] + fake_ring = FakeRing() + with nested(*ctx): + fake_broker = FakeAccountBroker(['c', 'd', 'e']) + r.reap_account(fake_broker, 10, fake_ring.nodes, 0) + self.assertEqual(container_reaped[0], 1) + + fake_broker = FakeAccountBroker(['c', 'd', 'e']) + container_reaped[0] = 0 + r.reap_account(fake_broker, 10, fake_ring.nodes, 1) + self.assertEqual(container_reaped[0], 2) + + container_reaped[0] = 0 + fake_broker = FakeAccountBroker(['c', 'd', 'e']) + r.reap_account(fake_broker, 10, fake_ring.nodes, 2) + self.assertEqual(container_reaped[0], 0) + def test_run_once(self): def prepare_data_dir(): devices_path = tempfile.mkdtemp() From 7071762d3698c59fabdee76890e300f81417be74 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Tue, 4 Aug 2015 23:15:37 -0700 Subject: [PATCH 15/70] Fix TypeError if backend response doesn't have expected headers There was some debug logging mixed in with some error handling on PUTs that was relying on a very specific edge would only encounter a set of backend responses that included the expected set of headers to diagnoise the failure. But the backend responses may not always have the expected headers. The proxy debug logging should be more robust to missing headers. It's a little hard to follow, but if you look `_connect_put_node` in swift.proxy.controller.obj - you'll see that only a few connections can make their way out of the initial put connection handling with a "resp" attribute that is not None. In the happy path (e.g. 100-Continue) it's explictly set to None, and in most errors (Timeout, 503, 413, etc) a new connection will be established to the next node in the node iter. Some status code will however allow a conn to be returned for validation in `_check_failure_put_connections`, i.e. * 2XX (e.g. 0-byte PUT would not send Expect 100-Continue) * 409 - Conflict with another timestamp * 412 - If-None-Match that encounters another object ... so I added tests for those - fixing a TypeError along the way. Change-Id: Ibdad5a90fa14ce62d081e6aaf40aacfca31b94d2 --- swift/proxy/controllers/obj.py | 10 +++++-- test/unit/__init__.py | 4 ++- test/unit/proxy/controllers/test_obj.py | 37 +++++++++++++++++++++++++ 3 files changed, 47 insertions(+), 4 deletions(-) diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index a4bd0733c8..e86b35debe 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -657,13 +657,17 @@ class BaseObjectController(Controller): if any(conn for conn in conns if conn.resp and conn.resp.status == HTTP_CONFLICT): - timestamps = [HeaderKeyDict(conn.resp.getheaders()).get( - 'X-Backend-Timestamp') for conn in conns if conn.resp] + status_times = ['%(status)s (%(timestamp)s)' % { + 'status': conn.resp.status, + 'timestamp': HeaderKeyDict( + conn.resp.getheaders()).get( + 'X-Backend-Timestamp', 'unknown') + } for conn in conns if conn.resp] self.app.logger.debug( _('Object PUT returning 202 for 409: ' '%(req_timestamp)s <= %(timestamps)r'), {'req_timestamp': req.timestamp.internal, - 'timestamps': ', '.join(timestamps)}) + 'timestamps': ', '.join(status_times)}) raise HTTPAccepted(request=req) self._check_min_conn(req, conns, min_conns) diff --git a/test/unit/__init__.py b/test/unit/__init__.py index b67f44342c..fd9411b942 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -860,7 +860,9 @@ def fake_http_connect(*code_iter, **kwargs): headers = dict(self.expect_headers) if expect_status == 409: headers['X-Backend-Timestamp'] = self.timestamp - response = FakeConn(expect_status, headers=headers) + response = FakeConn(expect_status, + timestamp=self.timestamp, + headers=headers) response.status = expect_status return response diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index 2684927d02..22685ad178 100755 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -771,6 +771,43 @@ class TestReplicatedObjController(BaseObjectControllerMixin, resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) + def test_put_x_timestamp_conflict_with_missing_backend_timestamp(self): + ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) + req = swob.Request.blank( + '/v1/a/c/o', method='PUT', headers={ + 'Content-Length': 0, + 'X-Timestamp': ts.next().internal}) + ts_iter = iter([None, None, None]) + codes = [409] * self.obj_ring.replicas + with set_http_connect(*codes, timestamps=ts_iter): + resp = req.get_response(self.app) + self.assertEqual(resp.status_int, 202) + + def test_put_x_timestamp_conflict_with_other_weird_success_response(self): + ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) + req = swob.Request.blank( + '/v1/a/c/o', method='PUT', headers={ + 'Content-Length': 0, + 'X-Timestamp': ts.next().internal}) + ts_iter = iter([ts.next().internal, None, None]) + codes = [409] + [(201, 'notused')] * (self.obj_ring.replicas - 1) + with set_http_connect(*codes, timestamps=ts_iter): + resp = req.get_response(self.app) + self.assertEqual(resp.status_int, 202) + + def test_put_x_timestamp_conflict_with_if_none_match(self): + ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) + req = swob.Request.blank( + '/v1/a/c/o', method='PUT', headers={ + 'Content-Length': 0, + 'If-None-Match': '*', + 'X-Timestamp': ts.next().internal}) + ts_iter = iter([ts.next().internal, None, None]) + codes = [409] + [(412, 'notused')] * (self.obj_ring.replicas - 1) + with set_http_connect(*codes, timestamps=ts_iter): + resp = req.get_response(self.app) + self.assertEqual(resp.status_int, 412) + def test_container_sync_put_x_timestamp_race(self): ts = (utils.Timestamp(t) for t in itertools.count(int(time.time()))) test_indexes = [None] + [int(p) for p in POLICIES] From f0d51882b921075f5309ed6ff07385dc6103d4dc Mon Sep 17 00:00:00 2001 From: Charles Hsu Date: Wed, 5 Aug 2015 22:09:40 +0800 Subject: [PATCH 16/70] Add extra_header_count to document and config. Change-Id: Iec86b488d71553c295afe7098822ce2046df9546 --- doc/source/deployment_guide.rst | 7 +++++++ etc/swift.conf-sample | 2 +- test/sample.conf | 1 + 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 6969a331c1..5813f5d9bb 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -390,6 +390,13 @@ max_header_size 8192 max_header_size is the max number of bytes in See also include_service_catalog in proxy-server.conf-sample (documented in overview_auth.rst). +extra_header_count 0 By default the maximum number of allowed + headers depends on the number of max + allowed metadata settings plus a default + value of 32 for regular http headers. + If for some reason this is not enough (custom + middleware for example) it can be increased + with the extra_header_count constraint. =================== ========== ============================================= --------------------------- diff --git a/etc/swift.conf-sample b/etc/swift.conf-sample index 76d1e876ae..18cb047cf5 100644 --- a/etc/swift.conf-sample +++ b/etc/swift.conf-sample @@ -134,7 +134,7 @@ default = yes # headers. If for some reason this is not enough (custom middleware for # example) it can be increased with the extra_header_count constraint. -#extra_header_count = 32 +#extra_header_count = 0 # max_object_name_length is the max number of bytes in the utf8 encoding diff --git a/test/sample.conf b/test/sample.conf index d1aa030401..3cf3609e59 100644 --- a/test/sample.conf +++ b/test/sample.conf @@ -93,6 +93,7 @@ fake_syslog = False #max_meta_count = 90 #max_meta_overall_size = 4096 #max_header_size = 8192 +#extra_header_count = 0 #max_object_name_length = 1024 #container_listing_limit = 10000 #account_listing_limit = 10000 From 89f59062864e5cbfc839a6084c323ce35438aa57 Mon Sep 17 00:00:00 2001 From: Ben Martin Date: Mon, 27 Jul 2015 14:19:09 -0500 Subject: [PATCH 17/70] +Document method to avoid rsync filling root drive When rsync pushes to a remote node with an unmounted drive and if certain steps are not taken, rsync may attempt to write files to the local drive at the location where the drive was mounted. There are two suggested solutions for this issue: 1) Set the permissions for all mount points in /srv/node/ to root:root 755 2) Mount the drives elsewhere and symlink the drives to /srv/.../ The first method ensures that only root and not the swift user can write in the /srv/.../ directories. The second method will prompt a broken link issue if rsync attempts to write to an unmounted drive. Change-Id: I60ce4ed9ef8401768d5f78b6806cbb2e2a65303e Closes-Bug: #1470576 --- doc/source/admin_guide.rst | 4 ++++ doc/source/deployment_guide.rst | 4 ++++ doc/source/development_saio.rst | 19 +++++++++++++++++++ 3 files changed, 27 insertions(+) diff --git a/doc/source/admin_guide.rst b/doc/source/admin_guide.rst index f27c20741e..aca4bbca05 100644 --- a/doc/source/admin_guide.rst +++ b/doc/source/admin_guide.rst @@ -154,6 +154,10 @@ until it has been resolved. If the drive is going to be replaced immediately, then it is just best to replace the drive, format it, remount it, and let replication fill it up. +After the drive is unmounted, make sure the mount point is owned by root +(root:root 755). This ensures that rsync will not try to replicate into the +root drive once the failed drive is unmounted. + If the drive can't be replaced immediately, then it is best to leave it unmounted, and set the device weight to 0. This will allow all the replicas that were on that drive to be replicated elsewhere until the drive diff --git a/doc/source/deployment_guide.rst b/doc/source/deployment_guide.rst index 6969a331c1..3d86746260 100644 --- a/doc/source/deployment_guide.rst +++ b/doc/source/deployment_guide.rst @@ -1229,6 +1229,10 @@ For a standard swift install, all data drives are mounted directly under be sure to set the `devices` config option in all of the server configs to point to the correct directory. +The mount points for each drive in /srv/node/ should be owned by the root user +almost exclusively (root:root 755). This is required to prevent rsync from +syncing files into the root drive in the event a drive is unmounted. + Swift uses system calls to reserve space for new objects being written into the system. If your filesystem does not support `fallocate()` or `posix_fallocate()`, be sure to set the `disable_fallocate = true` config diff --git a/doc/source/development_saio.rst b/doc/source/development_saio.rst index 1e6825f661..e406fb02ef 100644 --- a/doc/source/development_saio.rst +++ b/doc/source/development_saio.rst @@ -95,6 +95,16 @@ another device when creating the VM, and follow these instructions: # **Make sure to include the trailing slash after /srv/$x/** for x in {1..4}; do sudo chown -R ${USER}:${USER} /srv/$x/; done + Note: We create the mount points and mount the storage disk under + /mnt/sdb1. This disk will contain one directory per simulated swift node, + each owned by the current swift user. + + We then create symlinks to these directories under /srv. + If the disk sdb is unmounted, files will not be written under + /srv/\*, because the symbolic link destination /mnt/sdb1/* will not + exist. This prevents disk sync operations from writing to the root + partition in the event a drive is unmounted. + #. Next, skip to :ref:`common-dev-section`. @@ -135,6 +145,15 @@ these instructions: # **Make sure to include the trailing slash after /srv/$x/** for x in {1..4}; do sudo chown -R ${USER}:${USER} /srv/$x/; done + Note: We create the mount points and mount the loopback file under + /mnt/sdb1. This file will contain one directory per simulated swift node, + each owned by the current swift user. + + We then create symlinks to these directories under /srv. + If the loopback file is unmounted, files will not be written under + /srv/\*, because the symbolic link destination /mnt/sdb1/* will not + exist. This prevents disk sync operations from writing to the root + partition in the event a drive is unmounted. .. _common-dev-section: From 6594bbebb582801c069c4966352fee19eadef2f8 Mon Sep 17 00:00:00 2001 From: janonymous Date: Wed, 5 Aug 2015 22:25:46 +0530 Subject: [PATCH 18/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3 in dir : test/unit/account/* Change-Id: I70415197df1b0d58d4d6e219733c13a9c9cdfff7 --- test/unit/account/test_backend.py | 8 +++---- test/unit/account/test_reaper.py | 2 +- test/unit/account/test_server.py | 40 +++++++++++++++---------------- 3 files changed, 25 insertions(+), 25 deletions(-) diff --git a/test/unit/account/test_backend.py b/test/unit/account/test_backend.py index e1306d5be6..5571b50e15 100644 --- a/test/unit/account/test_backend.py +++ b/test/unit/account/test_backend.py @@ -52,9 +52,9 @@ class TestAccountBroker(unittest.TestCase): pass except DatabaseConnectionError as e: self.assertTrue(hasattr(e, 'path')) - self.assertEquals(e.path, ':memory:') + self.assertEqual(e.path, ':memory:') self.assertTrue(hasattr(e, 'msg')) - self.assertEquals(e.msg, "DB doesn't exist") + self.assertEqual(e.msg, "DB doesn't exist") except Exception as e: self.fail("Unexpected exception raised: %r" % e) else: @@ -584,8 +584,8 @@ class TestAccountBroker(unittest.TestCase): broker2.merge_items(json.loads(json.dumps(broker1.get_items_since( broker2.get_sync(id1), 1000))), id1) items = broker2.get_items_since(-1, 1000) - self.assertEquals(['b', snowman], - sorted([rec['name'] for rec in items])) + self.assertEqual(['b', snowman], + sorted([rec['name'] for rec in items])) items_by_name = dict((rec['name'], rec) for rec in items) self.assertEqual(items_by_name[snowman]['object_count'], 2) diff --git a/test/unit/account/test_reaper.py b/test/unit/account/test_reaper.py index b413a646a1..e9776ecc3c 100644 --- a/test/unit/account/test_reaper.py +++ b/test/unit/account/test_reaper.py @@ -387,7 +387,7 @@ class TestReaper(unittest.TestCase): '1429117638.86767') # verify calls to direct_delete_container - self.assertEquals(mocks['direct_delete_container'].call_count, 3) + self.assertEqual(mocks['direct_delete_container'].call_count, 3) for i, call_args in enumerate( mocks['direct_delete_container'].call_args_list): anode = acc_nodes[i % len(acc_nodes)] diff --git a/test/unit/account/test_server.py b/test/unit/account/test_server.py index 00acb2598d..5b7a9f0853 100644 --- a/test/unit/account/test_server.py +++ b/test/unit/account/test_server.py @@ -63,13 +63,13 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = server_handler.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE'.split(): self.assertTrue( verb in resp.headers['Allow'].split(', ')) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 7) - self.assertEquals(resp.headers['Server'], - (server_handler.server_type + '/' + swift_version)) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 7) + self.assertEqual(resp.headers['Server'], + (server_handler.server_type + '/' + swift_version)) def test_DELETE_not_found(self): req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE', @@ -599,11 +599,11 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'POST', 'HTTP_X_TIMESTAMP': '2'}) resp = req.get_response(self.controller) @@ -1095,7 +1095,7 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Put-Timestamp': '1', 'X-Delete-Timestamp': '0', @@ -1103,7 +1103,7 @@ class TestAccountController(unittest.TestCase): 'X-Bytes-Used': '0', 'X-Timestamp': normalize_timestamp(0)}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'GET'}) req.accept = 'application/*' resp = req.get_response(self.controller) @@ -1687,8 +1687,8 @@ class TestAccountController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False} self.controller(env, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_GET_log_requests_true(self): self.controller.logger = FakeLogger() @@ -1748,15 +1748,15 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/sda1/p/a', method=method) resp = req.get_response(self.controller) self.assertEqual(resp.status_int // 100, 2) - self.assertEquals(resp.headers['X-Account-Object-Count'], '2') - self.assertEquals(resp.headers['X-Account-Bytes-Used'], '4') - self.assertEquals( + self.assertEqual(resp.headers['X-Account-Object-Count'], '2') + self.assertEqual(resp.headers['X-Account-Bytes-Used'], '4') + self.assertEqual( resp.headers['X-Account-Storage-Policy-%s-Object-Count' % POLICIES[0].name], '2') - self.assertEquals( + self.assertEqual( resp.headers['X-Account-Storage-Policy-%s-Bytes-Used' % POLICIES[0].name], '4') - self.assertEquals( + self.assertEqual( resp.headers['X-Account-Storage-Policy-%s-Container-Count' % POLICIES[0].name], '1') @@ -1786,15 +1786,15 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/sda1/p/a', method=method) resp = req.get_response(self.controller) self.assertEqual(resp.status_int // 100, 2) - self.assertEquals(resp.headers['X-Account-Object-Count'], '2') - self.assertEquals(resp.headers['X-Account-Bytes-Used'], '4') - self.assertEquals( + self.assertEqual(resp.headers['X-Account-Object-Count'], '2') + self.assertEqual(resp.headers['X-Account-Bytes-Used'], '4') + self.assertEqual( resp.headers['X-Account-Storage-Policy-%s-Object-Count' % policy.name], '2') - self.assertEquals( + self.assertEqual( resp.headers['X-Account-Storage-Policy-%s-Bytes-Used' % policy.name], '4') - self.assertEquals( + self.assertEqual( resp.headers['X-Account-Storage-Policy-%s-Container-Count' % policy.name], '1') From f449e914728fdcaf0870c15b9fc5ebd16a9df916 Mon Sep 17 00:00:00 2001 From: janonymous Date: Wed, 5 Aug 2015 22:32:02 +0530 Subject: [PATCH 19/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3, fixing in: dir: test/unit/cli/* Change-Id: I9a2fc1f717beafd5fa8408942046e310e8de0318 --- test/unit/cli/test_info.py | 28 ++++++------ test/unit/cli/test_recon.py | 16 +++---- test/unit/cli/test_ringbuilder.py | 76 +++++++++++++++---------------- 3 files changed, 60 insertions(+), 60 deletions(-) diff --git a/test/unit/cli/test_info.py b/test/unit/cli/test_info.py index 0a573150e6..677f275d98 100644 --- a/test/unit/cli/test_info.py +++ b/test/unit/cli/test_info.py @@ -128,8 +128,8 @@ Metadata: No system metadata found in db file User Metadata: {'mydata': 'swift'}''' - self.assertEquals(sorted(out.getvalue().strip().split('\n')), - sorted(exp_out.split('\n'))) + self.assertEqual(sorted(out.getvalue().strip().split('\n')), + sorted(exp_out.split('\n'))) info = dict( account='acct', @@ -175,8 +175,8 @@ Metadata: X-Container-Foo: bar System Metadata: {'mydata': 'swift'} No user metadata found in db file''' % POLICIES[0].name - self.assertEquals(sorted(out.getvalue().strip().split('\n')), - sorted(exp_out.split('\n'))) + self.assertEqual(sorted(out.getvalue().strip().split('\n')), + sorted(exp_out.split('\n'))) def test_print_ring_locations_invalid_args(self): self.assertRaises(ValueError, print_ring_locations, @@ -306,7 +306,7 @@ No user metadata found in db file''' % POLICIES[0].name if exp_raised: exp_out = 'Does not appear to be a DB of type "account":' \ ' ./d49d0ecbb53be1fcc49624f2f7c7ccae.db' - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) else: self.fail("Expected an InfoSystemExit exception to be raised") @@ -334,8 +334,8 @@ class TestPrintObj(TestCliInfoBase): out = StringIO() with mock.patch('sys.stdout', out): self.assertRaises(InfoSystemExit, print_obj, datafile) - self.assertEquals(out.getvalue().strip(), - 'Invalid metadata') + self.assertEqual(out.getvalue().strip(), + 'Invalid metadata') def test_print_obj_valid(self): out = StringIO() @@ -489,7 +489,7 @@ Other Metadata: No metadata found''' % ( utils.Timestamp(106.3).internal) - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({ 'X-Object-Sysmeta-Mtime': '107.3', @@ -514,7 +514,7 @@ Other Metadata: No metadata found''' % ( utils.Timestamp(106.3).internal) - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({ 'X-Object-Meta-Mtime': '107.3', @@ -539,7 +539,7 @@ Other Metadata: X-Object-Mtime: 107.3''' % ( utils.Timestamp(106.3).internal) - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({}) out = StringIO() @@ -560,7 +560,7 @@ Other Metadata: No metadata found''' % ( utils.Timestamp(106.3).internal) - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) metadata['name'] = '/a-s' @@ -583,7 +583,7 @@ Other Metadata: No metadata found''' % ( utils.Timestamp(106.3).internal) - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) del metadata['Content-Type'] @@ -605,7 +605,7 @@ Other Metadata: No metadata found''' % ( utils.Timestamp(106.3).internal) - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) metadata = get_metadata({'X-Object-Meta-Mtime': '107.3'}) del metadata['X-Timestamp'] @@ -626,7 +626,7 @@ User Metadata: Other Metadata: No metadata found''' - self.assertEquals(out.getvalue().strip(), exp_out) + self.assertEqual(out.getvalue().strip(), exp_out) class TestPrintObjWeirdPath(TestPrintObjFullMeta): diff --git a/test/unit/cli/test_recon.py b/test/unit/cli/test_recon.py index f48d44cac6..734b0d325e 100644 --- a/test/unit/cli/test_recon.py +++ b/test/unit/cli/test_recon.py @@ -290,10 +290,10 @@ class TestRecon(unittest.TestCase): m = r.match(line) if m: ex = expected.pop(m.group(1)) - self.assertEquals(m.group(2), - " low: %s, high: %s, avg: %s, total: %s," - " Failed: %s%%, no_result: %s, reported: %s" - % ex) + self.assertEqual(m.group(2), + " low: %s, high: %s, avg: %s, total: %s," + " Failed: %s%%, no_result: %s, reported: %s" + % ex) self.assertFalse(expected) def test_drive_audit_check(self): @@ -328,10 +328,10 @@ class TestRecon(unittest.TestCase): for line in lines: m = r.match(line) if m: - self.assertEquals(m.group(2), - " low: %s, high: %s, avg: %s, total: %s," - " Failed: %s%%, no_result: %s, reported: %s" - % expected) + self.assertEqual(m.group(2), + " low: %s, high: %s, avg: %s, total: %s," + " Failed: %s%%, no_result: %s, reported: %s" + % expected) class TestReconCommands(unittest.TestCase): diff --git a/test/unit/cli/test_ringbuilder.py b/test/unit/cli/test_ringbuilder.py index aa0957f1ea..29ed88d7e7 100644 --- a/test/unit/cli/test_ringbuilder.py +++ b/test/unit/cli/test_ringbuilder.py @@ -147,7 +147,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_search_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_find_parts(self): rb = RingBuilder(8, 3, 0) @@ -185,7 +185,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_list_parts_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_parse_add_values_number_of_arguments(self): # Test Number of arguments abnormal @@ -195,7 +195,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_add_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_weight_values_no_devices(self): # Test no devices @@ -204,7 +204,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._set_weight_values([], 100) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_parse_set_weight_values_number_of_arguments(self): # Test Number of arguments abnormal @@ -214,7 +214,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_set_weight_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) argv = ["--region", "2"] err = None @@ -222,7 +222,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_set_weight_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_info_values_no_devices(self): # Test no devices @@ -231,7 +231,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._set_info_values([], 100) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_parse_set_info_values_number_of_arguments(self): # Test Number of arguments abnormal @@ -241,7 +241,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_set_info_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_parse_remove_values_number_of_arguments(self): # Test Number of arguments abnormal @@ -251,7 +251,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder._parse_remove_values(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_create_ring(self): argv = ["", self.tmpfile, "create", "6", "3.14159265359", "1"] @@ -405,7 +405,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_add_device_already_exists(self): # Test Add a device that already exists @@ -416,7 +416,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_remove_device(self): for search_value in self.search_values: @@ -692,7 +692,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_remove_device_no_matching(self): self.create_sample_ring() @@ -704,7 +704,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_weight(self): for search_value in self.search_values: @@ -903,7 +903,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_weight_no_matching(self): self.create_sample_ring() @@ -915,7 +915,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_info(self): for search_value in self.search_values: @@ -1195,7 +1195,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_info_no_matching(self): self.create_sample_ring() @@ -1207,7 +1207,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_info_already_exists(self): self.create_sample_ring() @@ -1230,7 +1230,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_min_part_hours(self): self.create_sample_ring() @@ -1247,7 +1247,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_replicas(self): self.create_sample_ring() @@ -1321,7 +1321,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_set_replicas_invalid_value(self): # Test not a valid number @@ -1331,7 +1331,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) # Test new replicas is 0 argv = ["", self.tmpfile, "set_replicas", "0"] @@ -1340,7 +1340,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_validate(self): self.create_sample_ring() @@ -1358,7 +1358,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_validate_corrupted_file(self): self.create_sample_ring() @@ -1376,7 +1376,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_validate_non_existent_file(self): rand_file = '%s/%s' % ('/tmp', str(uuid.uuid4())) @@ -1386,7 +1386,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_validate_non_accessible_file(self): with mock.patch.object( @@ -1398,7 +1398,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_validate_generic_error(self): with mock.patch.object( @@ -1410,7 +1410,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_search_device_ipv4_old_format(self): self.create_sample_ring() @@ -1510,7 +1510,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_search_device_no_matching(self): self.create_sample_ring() @@ -1522,7 +1522,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_list_parts_ipv4_old_format(self): self.create_sample_ring() @@ -1622,7 +1622,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_list_parts_no_matching(self): self.create_sample_ring() @@ -1634,7 +1634,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_unknown(self): argv = ["", self.tmpfile, "unknown"] @@ -1643,7 +1643,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_default(self): self.create_sample_ring() @@ -1669,7 +1669,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 1) + self.assertEqual(err.code, 1) def test_rebalance_no_devices(self): # Test no devices @@ -1681,7 +1681,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_write_ring(self): self.create_sample_ring() @@ -1702,7 +1702,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_warn_at_risk(self): self.create_sample_ring() @@ -1715,7 +1715,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as e: err = e - self.assertEquals(err.code, 1) + self.assertEqual(err.code, 1) def test_invalid_device_name(self): self.create_sample_ring() @@ -1731,7 +1731,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as exc: err = exc - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) argv = ["", self.tmpfile, @@ -1746,7 +1746,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): ringbuilder.main(argv) except SystemExit as exc: err = exc - self.assertEquals(err.code, 2) + self.assertEqual(err.code, 2) def test_dispersion_command(self): self.create_sample_ring() From ed3aec2146b114c85bb3b196c999bf3f44fe159e Mon Sep 17 00:00:00 2001 From: janonymous Date: Thu, 6 Aug 2015 00:18:52 +0530 Subject: [PATCH 20/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3 in dir: test/unit/container/* Change-Id: I3333022ed63ce03198bc73147246d91d2442a440 --- test/unit/container/test_auditor.py | 12 +- test/unit/container/test_backend.py | 574 +++++++++++++------------- test/unit/container/test_server.py | 610 ++++++++++++++-------------- test/unit/container/test_sync.py | 236 +++++------ test/unit/container/test_updater.py | 44 +- 5 files changed, 738 insertions(+), 738 deletions(-) diff --git a/test/unit/container/test_auditor.py b/test/unit/container/test_auditor.py index 4e1826bdce..6d9b28d640 100644 --- a/test/unit/container/test_auditor.py +++ b/test/unit/container/test_auditor.py @@ -90,8 +90,8 @@ class TestAuditor(unittest.TestCase): with mock.patch('swift.container.auditor.audit_location_generator', fake_audit_location_generator): self.assertRaises(ValueError, test_auditor.run_forever) - self.assertEquals(test_auditor.container_failures, 2 * call_times) - self.assertEquals(test_auditor.container_passes, 3 * call_times) + self.assertEqual(test_auditor.container_failures, 2 * call_times) + self.assertEqual(test_auditor.container_passes, 3 * call_times) @mock.patch('swift.container.auditor.ContainerBroker', FakeContainerBroker) def test_run_once(self): @@ -105,8 +105,8 @@ class TestAuditor(unittest.TestCase): with mock.patch('swift.container.auditor.audit_location_generator', fake_audit_location_generator): test_auditor.run_once() - self.assertEquals(test_auditor.container_failures, 2) - self.assertEquals(test_auditor.container_passes, 3) + self.assertEqual(test_auditor.container_failures, 2) + self.assertEqual(test_auditor.container_passes, 3) @mock.patch('swift.container.auditor.ContainerBroker', FakeContainerBroker) def test_container_auditor(self): @@ -116,8 +116,8 @@ class TestAuditor(unittest.TestCase): for f in files: path = os.path.join(self.testdir, f) test_auditor.container_audit(path) - self.assertEquals(test_auditor.container_failures, 2) - self.assertEquals(test_auditor.container_passes, 3) + self.assertEqual(test_auditor.container_failures, 2) + self.assertEqual(test_auditor.container_passes, 3) class TestAuditorMigrations(unittest.TestCase): diff --git a/test/unit/container/test_backend.py b/test/unit/container/test_backend.py index 1572265c2d..2a7aafc10e 100644 --- a/test/unit/container/test_backend.py +++ b/test/unit/container/test_backend.py @@ -84,7 +84,7 @@ class TestContainerBroker(unittest.TestCase): first_conn = conn try: with broker.get() as conn: - self.assertEquals(first_conn, conn) + self.assertEqual(first_conn, conn) raise Exception('OMG') except Exception: pass @@ -109,44 +109,44 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 1) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 0) broker.reclaim(Timestamp(time() - 999).internal, time()) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 1) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 0) sleep(.00001) broker.delete_object('o', Timestamp(time()).internal) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 0) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 1) broker.reclaim(Timestamp(time() - 999).internal, time()) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 0) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 1) sleep(.00001) broker.reclaim(Timestamp(time()).internal, time()) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 0) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 0) # Test the return values of reclaim() @@ -215,19 +215,19 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 1) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 0) sleep(.00001) broker.delete_object('o', Timestamp(time()).internal) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 0").fetchone()[0], 0) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT count(*) FROM object " "WHERE deleted = 1").fetchone()[0], 1) @@ -242,20 +242,20 @@ class TestContainerBroker(unittest.TestCase): 'application/x-test', '5af83e3196bf99f440f31f2e1a6c9afe') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 123) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], '5af83e3196bf99f440f31f2e1a6c9afe') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # Reput same event @@ -263,20 +263,20 @@ class TestContainerBroker(unittest.TestCase): 'application/x-test', '5af83e3196bf99f440f31f2e1a6c9afe') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 123) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], '5af83e3196bf99f440f31f2e1a6c9afe') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # Put new event @@ -286,20 +286,20 @@ class TestContainerBroker(unittest.TestCase): 'application/x-test', 'aa0749bacbc79ec65fe206943d8fe449') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 124) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], 'aa0749bacbc79ec65fe206943d8fe449') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # Put old event @@ -308,20 +308,20 @@ class TestContainerBroker(unittest.TestCase): 'application/x-test', 'aa0749bacbc79ec65fe206943d8fe449') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 124) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], 'aa0749bacbc79ec65fe206943d8fe449') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # Put old delete event @@ -329,20 +329,20 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('"{}"', dtimestamp, 0, '', '', deleted=1) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 124) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], 'aa0749bacbc79ec65fe206943d8fe449') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # Put new delete event @@ -351,12 +351,12 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('"{}"', timestamp, 0, '', '', deleted=1) with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 1) # Put new event @@ -366,20 +366,20 @@ class TestContainerBroker(unittest.TestCase): 'application/x-test', '5af83e3196bf99f440f31f2e1a6c9afe') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 123) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], '5af83e3196bf99f440f31f2e1a6c9afe') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # We'll use this later @@ -391,21 +391,21 @@ class TestContainerBroker(unittest.TestCase): previous_timestamp = timestamp timestamp = Timestamp(time()).internal with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], previous_timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 123) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], '5af83e3196bf99f440f31f2e1a6c9afe') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) # Put event from after last put but before last post @@ -414,20 +414,20 @@ class TestContainerBroker(unittest.TestCase): 'application/x-test3', '6af83e3196bf99f440f31f2e1a6c9afe') with broker.get() as conn: - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT name FROM object").fetchone()[0], '"{}"') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT created_at FROM object").fetchone()[0], timestamp) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT size FROM object").fetchone()[0], 456) - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT content_type FROM object").fetchone()[0], 'application/x-test3') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT etag FROM object").fetchone()[0], '6af83e3196bf99f440f31f2e1a6c9afe') - self.assertEquals(conn.execute( + self.assertEqual(conn.execute( "SELECT deleted FROM object").fetchone()[0], 0) @patch_policies @@ -563,35 +563,35 @@ class TestContainerBroker(unittest.TestCase): broker.initialize(Timestamp(1).internal) info = broker.get_info() - self.assertEquals(info['account'], 'test1') - self.assertEquals(info['container'], 'test2') - self.assertEquals(info['hash'], '00000000000000000000000000000000') + self.assertEqual(info['account'], 'test1') + self.assertEqual(info['container'], 'test2') + self.assertEqual(info['hash'], '00000000000000000000000000000000') self.assertEqual(info['put_timestamp'], Timestamp(1).internal) self.assertEqual(info['delete_timestamp'], '0') info = broker.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) + self.assertEqual(info['object_count'], 0) + self.assertEqual(info['bytes_used'], 0) policy_stats = broker.get_policy_stats() # Act as policy-0 self.assertTrue(0 in policy_stats) - self.assertEquals(policy_stats[0]['bytes_used'], 0) - self.assertEquals(policy_stats[0]['object_count'], 0) + self.assertEqual(policy_stats[0]['bytes_used'], 0) + self.assertEqual(policy_stats[0]['object_count'], 0) broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 123) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 123) policy_stats = broker.get_policy_stats() self.assertTrue(0 in policy_stats) - self.assertEquals(policy_stats[0]['object_count'], 1) - self.assertEquals(policy_stats[0]['bytes_used'], 123) + self.assertEqual(policy_stats[0]['object_count'], 1) + self.assertEqual(policy_stats[0]['bytes_used'], 123) def test_get_info(self): # Test ContainerBroker.get_info @@ -600,9 +600,9 @@ class TestContainerBroker(unittest.TestCase): broker.initialize(Timestamp('1').internal, 0) info = broker.get_info() - self.assertEquals(info['account'], 'test1') - self.assertEquals(info['container'], 'test2') - self.assertEquals(info['hash'], '00000000000000000000000000000000') + self.assertEqual(info['account'], 'test1') + self.assertEqual(info['container'], 'test2') + self.assertEqual(info['hash'], '00000000000000000000000000000000') self.assertEqual(info['put_timestamp'], Timestamp(1).internal) self.assertEqual(info['delete_timestamp'], '0') if self.__class__ in (TestContainerBrokerBeforeMetadata, @@ -614,44 +614,44 @@ class TestContainerBroker(unittest.TestCase): Timestamp(1).internal) info = broker.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) + self.assertEqual(info['object_count'], 0) + self.assertEqual(info['bytes_used'], 0) broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 123) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 123) sleep(.00001) broker.put_object('o2', Timestamp(time()).internal, 123, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 246) + self.assertEqual(info['object_count'], 2) + self.assertEqual(info['bytes_used'], 246) sleep(.00001) broker.put_object('o2', Timestamp(time()).internal, 1000, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 1123) + self.assertEqual(info['object_count'], 2) + self.assertEqual(info['bytes_used'], 1123) sleep(.00001) broker.delete_object('o1', Timestamp(time()).internal) info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 1000) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 1000) sleep(.00001) broker.delete_object('o2', Timestamp(time()).internal) info = broker.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) + self.assertEqual(info['object_count'], 0) + self.assertEqual(info['bytes_used'], 0) info = broker.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) + self.assertEqual(info['x_container_sync_point1'], -1) + self.assertEqual(info['x_container_sync_point2'], -1) def test_set_x_syncs(self): broker = ContainerBroker(':memory:', account='test1', @@ -659,13 +659,13 @@ class TestContainerBroker(unittest.TestCase): broker.initialize(Timestamp('1').internal, 0) info = broker.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) + self.assertEqual(info['x_container_sync_point1'], -1) + self.assertEqual(info['x_container_sync_point2'], -1) broker.set_x_container_sync_points(1, 2) info = broker.get_info() - self.assertEquals(info['x_container_sync_point1'], 1) - self.assertEquals(info['x_container_sync_point2'], 2) + self.assertEqual(info['x_container_sync_point1'], 1) + self.assertEqual(info['x_container_sync_point2'], 2) def test_get_report_info(self): broker = ContainerBroker(':memory:', account='test1', @@ -673,66 +673,66 @@ class TestContainerBroker(unittest.TestCase): broker.initialize(Timestamp('1').internal, 0) info = broker.get_info() - self.assertEquals(info['account'], 'test1') - self.assertEquals(info['container'], 'test2') - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) - self.assertEquals(info['reported_object_count'], 0) - self.assertEquals(info['reported_bytes_used'], 0) + self.assertEqual(info['account'], 'test1') + self.assertEqual(info['container'], 'test2') + self.assertEqual(info['object_count'], 0) + self.assertEqual(info['bytes_used'], 0) + self.assertEqual(info['reported_object_count'], 0) + self.assertEqual(info['reported_bytes_used'], 0) broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 123) - self.assertEquals(info['reported_object_count'], 0) - self.assertEquals(info['reported_bytes_used'], 0) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 123) + self.assertEqual(info['reported_object_count'], 0) + self.assertEqual(info['reported_bytes_used'], 0) sleep(.00001) broker.put_object('o2', Timestamp(time()).internal, 123, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 246) - self.assertEquals(info['reported_object_count'], 0) - self.assertEquals(info['reported_bytes_used'], 0) + self.assertEqual(info['object_count'], 2) + self.assertEqual(info['bytes_used'], 246) + self.assertEqual(info['reported_object_count'], 0) + self.assertEqual(info['reported_bytes_used'], 0) sleep(.00001) broker.put_object('o2', Timestamp(time()).internal, 1000, 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 1123) - self.assertEquals(info['reported_object_count'], 0) - self.assertEquals(info['reported_bytes_used'], 0) + self.assertEqual(info['object_count'], 2) + self.assertEqual(info['bytes_used'], 1123) + self.assertEqual(info['reported_object_count'], 0) + self.assertEqual(info['reported_bytes_used'], 0) put_timestamp = Timestamp(time()).internal sleep(.001) delete_timestamp = Timestamp(time()).internal broker.reported(put_timestamp, delete_timestamp, 2, 1123) info = broker.get_info() - self.assertEquals(info['object_count'], 2) - self.assertEquals(info['bytes_used'], 1123) - self.assertEquals(info['reported_put_timestamp'], put_timestamp) - self.assertEquals(info['reported_delete_timestamp'], delete_timestamp) - self.assertEquals(info['reported_object_count'], 2) - self.assertEquals(info['reported_bytes_used'], 1123) + self.assertEqual(info['object_count'], 2) + self.assertEqual(info['bytes_used'], 1123) + self.assertEqual(info['reported_put_timestamp'], put_timestamp) + self.assertEqual(info['reported_delete_timestamp'], delete_timestamp) + self.assertEqual(info['reported_object_count'], 2) + self.assertEqual(info['reported_bytes_used'], 1123) sleep(.00001) broker.delete_object('o1', Timestamp(time()).internal) info = broker.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 1000) - self.assertEquals(info['reported_object_count'], 2) - self.assertEquals(info['reported_bytes_used'], 1123) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 1000) + self.assertEqual(info['reported_object_count'], 2) + self.assertEqual(info['reported_bytes_used'], 1123) sleep(.00001) broker.delete_object('o2', Timestamp(time()).internal) info = broker.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) - self.assertEquals(info['reported_object_count'], 2) - self.assertEquals(info['reported_bytes_used'], 1123) + self.assertEqual(info['object_count'], 0) + self.assertEqual(info['bytes_used'], 0) + self.assertEqual(info['reported_object_count'], 2) + self.assertEqual(info['reported_bytes_used'], 1123) def test_list_objects_iter(self): # Test ContainerBroker.list_objects_iter @@ -754,103 +754,103 @@ class TestContainerBroker(unittest.TestCase): 'd41d8cd98f00b204e9800998ecf8427e') listing = broker.list_objects_iter(100, '', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0/0000') - self.assertEquals(listing[-1][0], '0/0099') + self.assertEqual(len(listing), 100) + self.assertEqual(listing[0][0], '0/0000') + self.assertEqual(listing[-1][0], '0/0099') listing = broker.list_objects_iter(100, '', '0/0050', None, '') - self.assertEquals(len(listing), 50) - self.assertEquals(listing[0][0], '0/0000') - self.assertEquals(listing[-1][0], '0/0049') + self.assertEqual(len(listing), 50) + self.assertEqual(listing[0][0], '0/0000') + self.assertEqual(listing[-1][0], '0/0049') listing = broker.list_objects_iter(100, '0/0099', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0/0100') - self.assertEquals(listing[-1][0], '1/0074') + self.assertEqual(len(listing), 100) + self.assertEqual(listing[0][0], '0/0100') + self.assertEqual(listing[-1][0], '1/0074') listing = broker.list_objects_iter(55, '1/0074', None, None, '') - self.assertEquals(len(listing), 55) - self.assertEquals(listing[0][0], '1/0075') - self.assertEquals(listing[-1][0], '2/0004') + self.assertEqual(len(listing), 55) + self.assertEqual(listing[0][0], '1/0075') + self.assertEqual(listing[-1][0], '2/0004') listing = broker.list_objects_iter(10, '', None, '0/01', '') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0/0100') - self.assertEquals(listing[-1][0], '0/0109') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '0/0100') + self.assertEqual(listing[-1][0], '0/0109') listing = broker.list_objects_iter(10, '', None, '0/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0/0000') - self.assertEquals(listing[-1][0], '0/0009') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '0/0000') + self.assertEqual(listing[-1][0], '0/0009') # Same as above, but using the path argument. listing = broker.list_objects_iter(10, '', None, None, '', '0') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0/0000') - self.assertEquals(listing[-1][0], '0/0009') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '0/0000') + self.assertEqual(listing[-1][0], '0/0009') listing = broker.list_objects_iter(10, '', None, '', '/') - self.assertEquals(len(listing), 4) - self.assertEquals([row[0] for row in listing], - ['0/', '1/', '2/', '3/']) + self.assertEqual(len(listing), 4) + self.assertEqual([row[0] for row in listing], + ['0/', '1/', '2/', '3/']) listing = broker.list_objects_iter(10, '2', None, None, '/') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['2/', '3/']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['2/', '3/']) listing = broker.list_objects_iter(10, '2/', None, None, '/') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['3/']) + self.assertEqual(len(listing), 1) + self.assertEqual([row[0] for row in listing], ['3/']) listing = broker.list_objects_iter(10, '2/0050', None, '2/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '2/0051') - self.assertEquals(listing[1][0], '2/0051/') - self.assertEquals(listing[2][0], '2/0052') - self.assertEquals(listing[-1][0], '2/0059') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '2/0051') + self.assertEqual(listing[1][0], '2/0051/') + self.assertEqual(listing[2][0], '2/0052') + self.assertEqual(listing[-1][0], '2/0059') listing = broker.list_objects_iter(10, '3/0045', None, '3/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3/0045/', '3/0046', '3/0046/', '3/0047', - '3/0047/', '3/0048', '3/0048/', '3/0049', - '3/0049/', '3/0050']) + self.assertEqual(len(listing), 10) + self.assertEqual([row[0] for row in listing], + ['3/0045/', '3/0046', '3/0046/', '3/0047', + '3/0047/', '3/0048', '3/0048/', '3/0049', + '3/0049/', '3/0050']) broker.put_object('3/0049/', Timestamp(time()).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') listing = broker.list_objects_iter(10, '3/0048', None, None, None) - self.assertEquals(len(listing), 10) - self.assertEquals( + self.assertEqual(len(listing), 10) + self.assertEqual( [row[0] for row in listing], ['3/0048/0049', '3/0049', '3/0049/', '3/0049/0049', '3/0050', '3/0050/0049', '3/0051', '3/0051/0049', '3/0052', '3/0052/0049']) listing = broker.list_objects_iter(10, '3/0048', None, '3/', '/') - self.assertEquals(len(listing), 10) - self.assertEquals( + self.assertEqual(len(listing), 10) + self.assertEqual( [row[0] for row in listing], ['3/0048/', '3/0049', '3/0049/', '3/0050', '3/0050/', '3/0051', '3/0051/', '3/0052', '3/0052/', '3/0053']) listing = broker.list_objects_iter(10, None, None, '3/0049/', '/') - self.assertEquals(len(listing), 2) - self.assertEquals( + self.assertEqual(len(listing), 2) + self.assertEqual( [row[0] for row in listing], ['3/0049/', '3/0049/0049']) listing = broker.list_objects_iter(10, None, None, None, None, '3/0049') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['3/0049/0049']) + self.assertEqual(len(listing), 1) + self.assertEqual([row[0] for row in listing], ['3/0049/0049']) listing = broker.list_objects_iter(2, None, None, '3/', '/') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['3/0000', '3/0000/']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['3/0000', '3/0000/']) listing = broker.list_objects_iter(2, None, None, None, None, '3') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['3/0000', '3/0001']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['3/0000', '3/0001']) def test_list_objects_iter_non_slash(self): # Test ContainerBroker.list_objects_iter using a @@ -873,87 +873,87 @@ class TestContainerBroker(unittest.TestCase): 'd41d8cd98f00b204e9800998ecf8427e') listing = broker.list_objects_iter(100, '', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0:0000') - self.assertEquals(listing[-1][0], '0:0099') + self.assertEqual(len(listing), 100) + self.assertEqual(listing[0][0], '0:0000') + self.assertEqual(listing[-1][0], '0:0099') listing = broker.list_objects_iter(100, '', '0:0050', None, '') - self.assertEquals(len(listing), 50) - self.assertEquals(listing[0][0], '0:0000') - self.assertEquals(listing[-1][0], '0:0049') + self.assertEqual(len(listing), 50) + self.assertEqual(listing[0][0], '0:0000') + self.assertEqual(listing[-1][0], '0:0049') listing = broker.list_objects_iter(100, '0:0099', None, None, '') - self.assertEquals(len(listing), 100) - self.assertEquals(listing[0][0], '0:0100') - self.assertEquals(listing[-1][0], '1:0074') + self.assertEqual(len(listing), 100) + self.assertEqual(listing[0][0], '0:0100') + self.assertEqual(listing[-1][0], '1:0074') listing = broker.list_objects_iter(55, '1:0074', None, None, '') - self.assertEquals(len(listing), 55) - self.assertEquals(listing[0][0], '1:0075') - self.assertEquals(listing[-1][0], '2:0004') + self.assertEqual(len(listing), 55) + self.assertEqual(listing[0][0], '1:0075') + self.assertEqual(listing[-1][0], '2:0004') listing = broker.list_objects_iter(10, '', None, '0:01', '') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0:0100') - self.assertEquals(listing[-1][0], '0:0109') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '0:0100') + self.assertEqual(listing[-1][0], '0:0109') listing = broker.list_objects_iter(10, '', None, '0:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '0:0000') - self.assertEquals(listing[-1][0], '0:0009') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '0:0000') + self.assertEqual(listing[-1][0], '0:0009') # Same as above, but using the path argument, so nothing should be # returned since path uses a '/' as a delimiter. listing = broker.list_objects_iter(10, '', None, None, '', '0') - self.assertEquals(len(listing), 0) + self.assertEqual(len(listing), 0) listing = broker.list_objects_iter(10, '', None, '', ':') - self.assertEquals(len(listing), 4) - self.assertEquals([row[0] for row in listing], - ['0:', '1:', '2:', '3:']) + self.assertEqual(len(listing), 4) + self.assertEqual([row[0] for row in listing], + ['0:', '1:', '2:', '3:']) listing = broker.list_objects_iter(10, '2', None, None, ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['2:', '3:']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['2:', '3:']) listing = broker.list_objects_iter(10, '2:', None, None, ':') - self.assertEquals(len(listing), 1) - self.assertEquals([row[0] for row in listing], ['3:']) + self.assertEqual(len(listing), 1) + self.assertEqual([row[0] for row in listing], ['3:']) listing = broker.list_objects_iter(10, '2:0050', None, '2:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals(listing[0][0], '2:0051') - self.assertEquals(listing[1][0], '2:0051:') - self.assertEquals(listing[2][0], '2:0052') - self.assertEquals(listing[-1][0], '2:0059') + self.assertEqual(len(listing), 10) + self.assertEqual(listing[0][0], '2:0051') + self.assertEqual(listing[1][0], '2:0051:') + self.assertEqual(listing[2][0], '2:0052') + self.assertEqual(listing[-1][0], '2:0059') listing = broker.list_objects_iter(10, '3:0045', None, '3:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals([row[0] for row in listing], - ['3:0045:', '3:0046', '3:0046:', '3:0047', - '3:0047:', '3:0048', '3:0048:', '3:0049', - '3:0049:', '3:0050']) + self.assertEqual(len(listing), 10) + self.assertEqual([row[0] for row in listing], + ['3:0045:', '3:0046', '3:0046:', '3:0047', + '3:0047:', '3:0048', '3:0048:', '3:0049', + '3:0049:', '3:0050']) broker.put_object('3:0049:', Timestamp(time()).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') listing = broker.list_objects_iter(10, '3:0048', None, None, None) - self.assertEquals(len(listing), 10) - self.assertEquals( + self.assertEqual(len(listing), 10) + self.assertEqual( [row[0] for row in listing], ['3:0048:0049', '3:0049', '3:0049:', '3:0049:0049', '3:0050', '3:0050:0049', '3:0051', '3:0051:0049', '3:0052', '3:0052:0049']) listing = broker.list_objects_iter(10, '3:0048', None, '3:', ':') - self.assertEquals(len(listing), 10) - self.assertEquals( + self.assertEqual(len(listing), 10) + self.assertEqual( [row[0] for row in listing], ['3:0048:', '3:0049', '3:0049:', '3:0050', '3:0050:', '3:0051', '3:0051:', '3:0052', '3:0052:', '3:0053']) listing = broker.list_objects_iter(10, None, None, '3:0049:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals( + self.assertEqual(len(listing), 2) + self.assertEqual( [row[0] for row in listing], ['3:0049:', '3:0049:0049']) @@ -961,14 +961,14 @@ class TestContainerBroker(unittest.TestCase): # returned since path uses a '/' as a delimiter. listing = broker.list_objects_iter(10, None, None, None, None, '3:0049') - self.assertEquals(len(listing), 0) + self.assertEqual(len(listing), 0) listing = broker.list_objects_iter(2, None, None, '3:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['3:0000', '3:0000:']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['3:0000', '3:0000:']) listing = broker.list_objects_iter(2, None, None, None, None, '3') - self.assertEquals(len(listing), 0) + self.assertEqual(len(listing), 0) def test_list_objects_iter_prefix_delim(self): # Test ContainerBroker.list_objects_iter @@ -997,14 +997,14 @@ class TestContainerBroker(unittest.TestCase): # def list_objects_iter(self, limit, marker, prefix, delimiter, # path=None, format=None): listing = broker.list_objects_iter(100, None, None, '/pets/f', '/') - self.assertEquals([row[0] for row in listing], - ['/pets/fish/', '/pets/fish_info.txt']) + self.assertEqual([row[0] for row in listing], + ['/pets/fish/', '/pets/fish_info.txt']) listing = broker.list_objects_iter(100, None, None, '/pets/fish', '/') - self.assertEquals([row[0] for row in listing], - ['/pets/fish/', '/pets/fish_info.txt']) + self.assertEqual([row[0] for row in listing], + ['/pets/fish/', '/pets/fish_info.txt']) listing = broker.list_objects_iter(100, None, None, '/pets/fish/', '/') - self.assertEquals([row[0] for row in listing], - ['/pets/fish/a', '/pets/fish/b']) + self.assertEqual([row[0] for row in listing], + ['/pets/fish/a', '/pets/fish/b']) def test_double_check_trailing_delimiter(self): # Test ContainerBroker.list_objects_iter for a @@ -1056,35 +1056,35 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('1/0', Timestamp(time()).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') listing = broker.list_objects_iter(25, None, None, None, None) - self.assertEquals(len(listing), 22) - self.assertEquals( + self.assertEqual(len(listing), 22) + self.assertEqual( [row[0] for row in listing], ['0', '0/', '0/0', '0/00', '0/1', '0/1/', '0/1/0', '00', '1', '1/', '1/0', 'a', 'a/', 'a/0', 'a/a', 'a/a/a', 'a/a/b', 'a/b', 'b', 'b/a', 'b/b', 'c']) listing = broker.list_objects_iter(25, None, None, '', '/') - self.assertEquals(len(listing), 10) - self.assertEquals( + self.assertEqual(len(listing), 10) + self.assertEqual( [row[0] for row in listing], ['0', '0/', '00', '1', '1/', 'a', 'a/', 'b', 'b/', 'c']) listing = broker.list_objects_iter(25, None, None, 'a/', '/') - self.assertEquals(len(listing), 5) - self.assertEquals( + self.assertEqual(len(listing), 5) + self.assertEqual( [row[0] for row in listing], ['a/', 'a/0', 'a/a', 'a/a/', 'a/b']) listing = broker.list_objects_iter(25, None, None, '0/', '/') - self.assertEquals(len(listing), 5) - self.assertEquals( + self.assertEqual(len(listing), 5) + self.assertEqual( [row[0] for row in listing], ['0/', '0/0', '0/00', '0/1', '0/1/']) listing = broker.list_objects_iter(25, None, None, '0/1/', '/') - self.assertEquals(len(listing), 2) - self.assertEquals( + self.assertEqual(len(listing), 2) + self.assertEqual( [row[0] for row in listing], ['0/1/', '0/1/0']) listing = broker.list_objects_iter(25, None, None, 'b/', '/') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['b/a', 'b/b']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['b/a', 'b/b']) def test_double_check_trailing_delimiter_non_slash(self): # Test ContainerBroker.list_objects_iter for a @@ -1136,35 +1136,35 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('1:0', Timestamp(time()).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') listing = broker.list_objects_iter(25, None, None, None, None) - self.assertEquals(len(listing), 22) - self.assertEquals( + self.assertEqual(len(listing), 22) + self.assertEqual( [row[0] for row in listing], ['0', '00', '0:', '0:0', '0:00', '0:1', '0:1:', '0:1:0', '1', '1:', '1:0', 'a', 'a:', 'a:0', 'a:a', 'a:a:a', 'a:a:b', 'a:b', 'b', 'b:a', 'b:b', 'c']) listing = broker.list_objects_iter(25, None, None, '', ':') - self.assertEquals(len(listing), 10) - self.assertEquals( + self.assertEqual(len(listing), 10) + self.assertEqual( [row[0] for row in listing], ['0', '00', '0:', '1', '1:', 'a', 'a:', 'b', 'b:', 'c']) listing = broker.list_objects_iter(25, None, None, 'a:', ':') - self.assertEquals(len(listing), 5) - self.assertEquals( + self.assertEqual(len(listing), 5) + self.assertEqual( [row[0] for row in listing], ['a:', 'a:0', 'a:a', 'a:a:', 'a:b']) listing = broker.list_objects_iter(25, None, None, '0:', ':') - self.assertEquals(len(listing), 5) - self.assertEquals( + self.assertEqual(len(listing), 5) + self.assertEqual( [row[0] for row in listing], ['0:', '0:0', '0:00', '0:1', '0:1:']) listing = broker.list_objects_iter(25, None, None, '0:1:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals( + self.assertEqual(len(listing), 2) + self.assertEqual( [row[0] for row in listing], ['0:1:', '0:1:0']) listing = broker.list_objects_iter(25, None, None, 'b:', ':') - self.assertEquals(len(listing), 2) - self.assertEquals([row[0] for row in listing], ['b:a', 'b:b']) + self.assertEqual(len(listing), 2) + self.assertEqual([row[0] for row in listing], ['b:a', 'b:b']) def test_chexor(self): broker = ContainerBroker(':memory:', account='a', container='c') @@ -1177,13 +1177,13 @@ class TestContainerBroker(unittest.TestCase): hashb = hashlib.md5('%s-%s' % ('b', Timestamp(2).internal)).digest() hashc = ''.join( ('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb))) - self.assertEquals(broker.get_info()['hash'], hashc) + self.assertEqual(broker.get_info()['hash'], hashc) broker.put_object('b', Timestamp(3).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') hashb = hashlib.md5('%s-%s' % ('b', Timestamp(3).internal)).digest() hashc = ''.join( ('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb))) - self.assertEquals(broker.get_info()['hash'], hashc) + self.assertEqual(broker.get_info()['hash'], hashc) def test_newid(self): # test DatabaseBroker.newid @@ -1203,8 +1203,8 @@ class TestContainerBroker(unittest.TestCase): broker.put_object('b', Timestamp(2).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') items = broker.get_items_since(max_row, 1000) - self.assertEquals(len(items), 1) - self.assertEquals(items[0]['name'], 'b') + self.assertEqual(len(items), 1) + self.assertEqual(items[0]['name'], 'b') def test_sync_merging(self): # exercise the DatabaseBroker sync functions a bit @@ -1212,10 +1212,10 @@ class TestContainerBroker(unittest.TestCase): broker1.initialize(Timestamp('1').internal, 0) broker2 = ContainerBroker(':memory:', account='a', container='c') broker2.initialize(Timestamp('1').internal, 0) - self.assertEquals(broker2.get_sync('12345'), -1) + self.assertEqual(broker2.get_sync('12345'), -1) broker1.merge_syncs([{'sync_point': 3, 'remote_id': '12345'}]) broker2.merge_syncs(broker1.get_syncs()) - self.assertEquals(broker2.get_sync('12345'), 3) + self.assertEqual(broker2.get_sync('12345'), 3) def test_merge_items(self): broker1 = ContainerBroker(':memory:', account='a', container='c') @@ -1230,16 +1230,16 @@ class TestContainerBroker(unittest.TestCase): broker2.merge_items(broker1.get_items_since( broker2.get_sync(id), 1000), id) items = broker2.get_items_since(-1, 1000) - self.assertEquals(len(items), 2) - self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) + self.assertEqual(len(items), 2) + self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items])) broker1.put_object('c', Timestamp(3).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') broker2.merge_items(broker1.get_items_since( broker2.get_sync(id), 1000), id) items = broker2.get_items_since(-1, 1000) - self.assertEquals(len(items), 3) - self.assertEquals(['a', 'b', 'c'], - sorted([rec['name'] for rec in items])) + self.assertEqual(len(items), 3) + self.assertEqual(['a', 'b', 'c'], + sorted([rec['name'] for rec in items])) def test_merge_items_overwrite_unicode(self): # test DatabaseBroker.merge_items @@ -1260,13 +1260,13 @@ class TestContainerBroker(unittest.TestCase): broker2.merge_items(json.loads(json.dumps(broker1.get_items_since( broker2.get_sync(id), 1000))), id) items = broker2.get_items_since(-1, 1000) - self.assertEquals(['b', snowman], - sorted([rec['name'] for rec in items])) + self.assertEqual(['b', snowman], + sorted([rec['name'] for rec in items])) for rec in items: if rec['name'] == snowman: - self.assertEquals(rec['created_at'], Timestamp(4).internal) + self.assertEqual(rec['created_at'], Timestamp(4).internal) if rec['name'] == 'b': - self.assertEquals(rec['created_at'], Timestamp(3).internal) + self.assertEqual(rec['created_at'], Timestamp(3).internal) def test_merge_items_overwrite(self): # test DatabaseBroker.merge_items @@ -1286,12 +1286,12 @@ class TestContainerBroker(unittest.TestCase): broker2.merge_items(broker1.get_items_since( broker2.get_sync(id), 1000), id) items = broker2.get_items_since(-1, 1000) - self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) + self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items])) for rec in items: if rec['name'] == 'a': - self.assertEquals(rec['created_at'], Timestamp(4).internal) + self.assertEqual(rec['created_at'], Timestamp(4).internal) if rec['name'] == 'b': - self.assertEquals(rec['created_at'], Timestamp(3).internal) + self.assertEqual(rec['created_at'], Timestamp(3).internal) def test_merge_items_post_overwrite_out_of_order(self): # test DatabaseBroker.merge_items @@ -1311,32 +1311,32 @@ class TestContainerBroker(unittest.TestCase): broker2.merge_items(broker1.get_items_since( broker2.get_sync(id), 1000), id) items = broker2.get_items_since(-1, 1000) - self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) + self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items])) for rec in items: if rec['name'] == 'a': - self.assertEquals(rec['created_at'], Timestamp(4).internal) + self.assertEqual(rec['created_at'], Timestamp(4).internal) if rec['name'] == 'b': - self.assertEquals(rec['created_at'], Timestamp(3).internal) - self.assertEquals(rec['content_type'], 'text/plain') + self.assertEqual(rec['created_at'], Timestamp(3).internal) + self.assertEqual(rec['content_type'], 'text/plain') items = broker2.get_items_since(-1, 1000) - self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) + self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items])) for rec in items: if rec['name'] == 'a': - self.assertEquals(rec['created_at'], Timestamp(4).internal) + self.assertEqual(rec['created_at'], Timestamp(4).internal) if rec['name'] == 'b': - self.assertEquals(rec['created_at'], Timestamp(3).internal) + self.assertEqual(rec['created_at'], Timestamp(3).internal) broker1.put_object('b', Timestamp(5).internal, 0, 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') broker2.merge_items(broker1.get_items_since( broker2.get_sync(id), 1000), id) items = broker2.get_items_since(-1, 1000) - self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) + self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items])) for rec in items: if rec['name'] == 'a': - self.assertEquals(rec['created_at'], Timestamp(4).internal) + self.assertEqual(rec['created_at'], Timestamp(4).internal) if rec['name'] == 'b': - self.assertEquals(rec['created_at'], Timestamp(5).internal) - self.assertEquals(rec['content_type'], 'text/plain') + self.assertEqual(rec['created_at'], Timestamp(5).internal) + self.assertEqual(rec['content_type'], 'text/plain') def test_set_storage_policy_index(self): ts = (Timestamp(t).internal for t in @@ -1407,9 +1407,9 @@ class TestContainerBroker(unittest.TestCase): broker = ContainerBroker(':memory:', account='test_account', container='test_container') broker.initialize(Timestamp('1').internal, 0) - self.assertEquals(-1, broker.get_reconciler_sync()) + self.assertEqual(-1, broker.get_reconciler_sync()) broker.update_reconciler_sync(10) - self.assertEquals(10, broker.get_reconciler_sync()) + self.assertEqual(10, broker.get_reconciler_sync()) @with_tempdir def test_legacy_pending_files(self, tempdir): diff --git a/test/unit/container/test_server.py b/test/unit/container/test_server.py index e500fdd9ed..820bdebcc0 100644 --- a/test/unit/container/test_server.py +++ b/test/unit/container/test_server.py @@ -142,9 +142,9 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) response = req.get_response(self.controller) self.assertTrue(response.status.startswith('204')) - self.assertEquals(response.headers.get('x-container-read'), '.r:*') - self.assertEquals(response.headers.get('x-container-write'), - 'account:user') + self.assertEqual(response.headers.get('x-container-read'), '.r:*') + self.assertEqual(response.headers.get('x-container-write'), + 'account:user') # Ensure we can clear acls on POST req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, @@ -168,9 +168,9 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c2', environ={'REQUEST_METHOD': 'HEAD'}) response = req.get_response(self.controller) self.assertTrue(response.status.startswith('204')) - self.assertEquals(response.headers.get('x-container-read'), '.r:*') - self.assertEquals(response.headers.get('x-container-write'), - 'account:user') + self.assertEqual(response.headers.get('x-container-read'), '.r:*') + self.assertEqual(response.headers.get('x-container-write'), + 'account:user') def test_HEAD(self): start = int(time.time()) @@ -277,7 +277,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'HEAD', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_HEAD_insufficient_storage(self): self.controller = container_server.ContainerController( @@ -286,14 +286,14 @@ class TestContainerController(unittest.TestCase): '/sda-null/p/a/c', environ={'REQUEST_METHOD': 'HEAD', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) def test_HEAD_invalid_content_type(self): req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}, headers={'Accept': 'application/plain'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 406) + self.assertEqual(resp.status_int, 406) def test_HEAD_invalid_format(self): format = '%D1%BD%8A9' # invalid UTF-8; should be %E1%BD%8A9 (E -> D) @@ -301,7 +301,7 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c?format=' + format, environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_OPTIONS(self): server_handler = container_server.ContainerController( @@ -309,25 +309,25 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = server_handler.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE'.split(): self.assertTrue( verb in resp.headers['Allow'].split(', ')) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 7) - self.assertEquals(resp.headers['Server'], - (self.controller.server_type + '/' + swift_version)) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 7) + self.assertEqual(resp.headers['Server'], + (self.controller.server_type + '/' + swift_version)) def test_PUT(self): req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) def test_PUT_simulated_create_race(self): state = ['initial'] @@ -376,7 +376,7 @@ class TestContainerController(unittest.TestCase): headers={'X-Timestamp': '1', 'X-Size': '0', 'X-Content-Type': 'text/plain', 'X-ETag': 'e'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_PUT_good_policy_specified(self): policy = random.choice(list(POLICIES)) @@ -386,30 +386,30 @@ class TestContainerController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': policy.idx}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) # now make sure we read it back req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) def test_PUT_no_policy_specified(self): # Set metadata header req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(1).internal}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(POLICIES.default.idx)) + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(POLICIES.default.idx)) # now make sure the default was used (pol 1) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(POLICIES.default.idx)) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(POLICIES.default.idx)) def test_PUT_bad_policy_specified(self): # Set metadata header @@ -418,7 +418,7 @@ class TestContainerController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': 'nada'}) resp = req.get_response(self.controller) # make sure we get bad response - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) self.assertFalse('X-Backend-Storage-Policy-Index' in resp.headers) def test_PUT_no_policy_change(self): @@ -429,13 +429,13 @@ class TestContainerController(unittest.TestCase): 'X-Timestamp': next(ts), 'X-Backend-Storage-Policy-Index': policy.idx}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # make sure we get the right index back - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) # now try to update w/o changing the policy for method in ('POST', 'PUT'): @@ -444,13 +444,13 @@ class TestContainerController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': policy.idx }) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int // 100, 2) + self.assertEqual(resp.status_int // 100, 2) # make sure we get the right index back req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) def test_PUT_bad_policy_change(self): ts = (Timestamp(t).internal for t in itertools.count(time.time())) @@ -460,13 +460,13 @@ class TestContainerController(unittest.TestCase): 'X-Timestamp': next(ts), 'X-Backend-Storage-Policy-Index': policy.idx}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # make sure we get the right index back - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) other_policies = [p for p in POLICIES if p != policy] for other_policy in other_policies: @@ -476,18 +476,18 @@ class TestContainerController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': other_policy.idx }) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 409) - self.assertEquals( + self.assertEqual(resp.status_int, 409) + self.assertEqual( resp.headers.get('X-Backend-Storage-Policy-Index'), str(policy.idx)) # and make sure there is no change! req = Request.blank('/sda1/p/a/c') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # make sure we get the right index back - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) def test_POST_ignores_policy_change(self): ts = (Timestamp(t).internal for t in itertools.count(time.time())) @@ -496,13 +496,13 @@ class TestContainerController(unittest.TestCase): 'X-Timestamp': next(ts), 'X-Backend-Storage-Policy-Index': policy.idx}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # make sure we get the right index back - self.assertEquals(resp.headers.get('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.headers.get('X-Backend-Storage-Policy-Index'), + str(policy.idx)) other_policies = [p for p in POLICIES if p != policy] for other_policy in other_policies: @@ -513,16 +513,16 @@ class TestContainerController(unittest.TestCase): }) resp = req.get_response(self.controller) # valid request - self.assertEquals(resp.status_int // 100, 2) + self.assertEqual(resp.status_int // 100, 2) # but it does nothing req = Request.blank('/sda1/p/a/c') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # make sure we get the right index back - self.assertEquals(resp.headers.get - ('X-Backend-Storage-Policy-Index'), - str(policy.idx)) + self.assertEqual(resp.headers.get + ('X-Backend-Storage-Policy-Index'), + str(policy.idx)) def test_PUT_no_policy_for_existing_default(self): ts = (Timestamp(t).internal for t in @@ -631,57 +631,57 @@ class TestContainerController(unittest.TestCase): headers={'X-Timestamp': Timestamp(1).internal, 'X-Container-Meta-Test': 'Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), 'Value') # Set another metadata header, ensuring old one doesn't disappear req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(1).internal, 'X-Container-Meta-Test2': 'Value2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value') - self.assertEquals(resp.headers.get('x-container-meta-test2'), 'Value2') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), 'Value') + self.assertEqual(resp.headers.get('x-container-meta-test2'), 'Value2') # Update metadata header req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(3).internal, 'X-Container-Meta-Test': 'New Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), + 'New Value') # Send old update to metadata header req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(2).internal, 'X-Container-Meta-Test': 'Old Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), + 'New Value') # Remove metadata header (by setting it to empty) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(4).internal, 'X-Container-Meta-Test': ''}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) self.assertTrue('x-container-meta-test' not in resp.headers) def test_PUT_GET_sys_metadata(self): @@ -693,60 +693,60 @@ class TestContainerController(unittest.TestCase): headers={'X-Timestamp': Timestamp(1).internal, key: 'Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), 'Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), 'Value') # Set another metadata header, ensuring old one doesn't disappear req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(1).internal, key2: 'Value2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), 'Value') - self.assertEquals(resp.headers.get(key2.lower()), 'Value2') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), 'Value') + self.assertEqual(resp.headers.get(key2.lower()), 'Value2') # Update metadata header req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(3).internal, key: 'New Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), + 'New Value') # Send old update to metadata header req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(2).internal, key: 'Old Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), + 'New Value') # Remove metadata header (by setting it to empty) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(4).internal, key: ''}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) self.assertTrue(key.lower() not in resp.headers) def test_PUT_invalid_partition(self): req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_PUT_timestamp_not_float(self): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT', @@ -755,7 +755,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': 'not-float'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_PUT_insufficient_storage(self): self.controller = container_server.ContainerController( @@ -764,59 +764,59 @@ class TestContainerController(unittest.TestCase): '/sda-null/p/a/c', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) def test_POST_HEAD_metadata(self): req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(1).internal}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # Set metadata header req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(1).internal, 'X-Container-Meta-Test': 'Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), 'Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), 'Value') # Update metadata header req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(3).internal, 'X-Container-Meta-Test': 'New Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), + 'New Value') # Send old update to metadata header req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(2).internal, 'X-Container-Meta-Test': 'Old Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('x-container-meta-test'), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('x-container-meta-test'), + 'New Value') # Remove metadata header (by setting it to empty) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(4).internal, 'X-Container-Meta-Test': ''}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) self.assertTrue('x-container-meta-test' not in resp.headers) def test_POST_HEAD_sys_metadata(self): @@ -825,55 +825,55 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': Timestamp(1).internal}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # Set metadata header req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(1).internal, key: 'Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), 'Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), 'Value') # Update metadata header req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(3).internal, key: 'New Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), + 'New Value') # Send old update to metadata header req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(2).internal, key: 'Old Value'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get(key.lower()), - 'New Value') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get(key.lower()), + 'New Value') # Remove metadata header (by setting it to empty) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': Timestamp(4).internal, key: ''}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) self.assertTrue(key.lower() not in resp.headers) def test_POST_invalid_partition(self): req = Request.blank('/sda1/./a/c', environ={'REQUEST_METHOD': 'POST', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_POST_timestamp_not_float(self): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT', @@ -882,7 +882,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': 'not-float'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_POST_insufficient_storage(self): self.controller = container_server.ContainerController( @@ -891,7 +891,7 @@ class TestContainerController(unittest.TestCase): '/sda-null/p/a/c', environ={'REQUEST_METHOD': 'POST', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) def test_POST_invalid_container_sync_to(self): self.controller = container_server.ContainerController( @@ -901,7 +901,7 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_TIMESTAMP': '1'}, headers={'x-container-sync-to': '192.168.0.1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_POST_after_DELETE_not_found(self): req = Request.blank('/sda1/p/a/c', @@ -916,7 +916,7 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': '3'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_DELETE_obj_not_found(self): req = Request.blank( @@ -924,17 +924,17 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_DELETE_container_not_found(self): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_PUT_utf8(self): snowman = u'\u2603' @@ -944,7 +944,7 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_account_update_mismatched_host_device(self): req = Request.blank( @@ -957,7 +957,7 @@ class TestContainerController(unittest.TestCase): 'X-Account-Device': 'sda1,sda2'}) broker = self.controller._get_container_broker('sda1', 'p', 'a', 'c') resp = self.controller.account_update(req, 'a', 'c', broker) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_account_update_account_override_deleted(self): bindsock = listen(('127.0.0.1', 0)) @@ -975,7 +975,7 @@ class TestContainerController(unittest.TestCase): new_connect = fake_http_connect(200, count=123) swift.container.server.http_connect = new_connect resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_account_update(self): bindsock = listen(('127.0.0.1', 0)) @@ -989,16 +989,16 @@ class TestContainerController(unittest.TestCase): out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' % return_code) out.flush() - self.assertEquals(inc.readline(), - 'PUT /sda1/123/a/c HTTP/1.1\r\n') + self.assertEqual(inc.readline(), + 'PUT /sda1/123/a/c HTTP/1.1\r\n') headers = {} line = inc.readline() while line and line != '\r\n': headers[line.split(':')[0].lower()] = \ line.split(':')[1].strip() line = inc.readline() - self.assertEquals(headers['x-put-timestamp'], - expected_timestamp) + self.assertEqual(headers['x-put-timestamp'], + expected_timestamp) except BaseException as err: return err return None @@ -1014,7 +1014,7 @@ class TestContainerController(unittest.TestCase): try: with Timeout(3): resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) finally: err = event.wait() if err: @@ -1024,7 +1024,7 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, @@ -1036,7 +1036,7 @@ class TestContainerController(unittest.TestCase): try: with Timeout(3): resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) finally: err = event.wait() if err: @@ -1067,37 +1067,37 @@ class TestContainerController(unittest.TestCase): headers={'x-timestamp': '1', 'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) + self.assertEqual(info['x_container_sync_point1'], -1) + self.assertEqual(info['x_container_sync_point2'], -1) db.set_x_container_sync_points(123, 456) info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], 123) - self.assertEquals(info['x_container_sync_point2'], 456) + self.assertEqual(info['x_container_sync_point1'], 123) + self.assertEqual(info['x_container_sync_point2'], 456) # Set to same value req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'x-timestamp': '1', 'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], 123) - self.assertEquals(info['x_container_sync_point2'], 456) + self.assertEqual(info['x_container_sync_point1'], 123) + self.assertEqual(info['x_container_sync_point2'], 456) # Set to new value req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'x-timestamp': '1', 'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) + self.assertEqual(info['x_container_sync_point1'], -1) + self.assertEqual(info['x_container_sync_point2'], -1) def test_POST_reset_container_sync(self): req = Request.blank( @@ -1105,68 +1105,68 @@ class TestContainerController(unittest.TestCase): headers={'x-timestamp': '1', 'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) + self.assertEqual(info['x_container_sync_point1'], -1) + self.assertEqual(info['x_container_sync_point2'], -1) db.set_x_container_sync_points(123, 456) info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], 123) - self.assertEquals(info['x_container_sync_point2'], 456) + self.assertEqual(info['x_container_sync_point1'], 123) + self.assertEqual(info['x_container_sync_point2'], 456) # Set to same value req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'x-timestamp': '1', 'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], 123) - self.assertEquals(info['x_container_sync_point2'], 456) + self.assertEqual(info['x_container_sync_point1'], 123) + self.assertEqual(info['x_container_sync_point2'], 456) # Set to new value req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, headers={'x-timestamp': '1', 'x-container-sync-to': 'http://127.0.0.1:12345/v1/a/c2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') info = db.get_info() - self.assertEquals(info['x_container_sync_point1'], -1) - self.assertEquals(info['x_container_sync_point2'], -1) + self.assertEqual(info['x_container_sync_point1'], -1) + self.assertEqual(info['x_container_sync_point2'], -1) def test_DELETE(self): req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': '3'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_DELETE_PUT_recreate(self): path = '/sda1/p/a/c' req = Request.blank(path, method='PUT', headers={'X-Timestamp': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank(path, method='DELETE', headers={'X-Timestamp': '2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank(path, method='GET') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) # sanity + self.assertEqual(resp.status_int, 404) # sanity # backend headers expectations = { 'x-backend-put-timestamp': Timestamp(1).internal, @@ -1180,20 +1180,20 @@ class TestContainerController(unittest.TestCase): db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') self.assertEqual(True, db.is_deleted()) info = db.get_info() - self.assertEquals(info['put_timestamp'], Timestamp('1').internal) - self.assertEquals(info['delete_timestamp'], Timestamp('2').internal) - self.assertEquals(info['status_changed_at'], Timestamp('2').internal) + self.assertEqual(info['put_timestamp'], Timestamp('1').internal) + self.assertEqual(info['delete_timestamp'], Timestamp('2').internal) + self.assertEqual(info['status_changed_at'], Timestamp('2').internal) # recreate req = Request.blank(path, method='PUT', headers={'X-Timestamp': '4'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') self.assertEqual(False, db.is_deleted()) info = db.get_info() - self.assertEquals(info['put_timestamp'], Timestamp('4').internal) - self.assertEquals(info['delete_timestamp'], Timestamp('2').internal) - self.assertEquals(info['status_changed_at'], Timestamp('4').internal) + self.assertEqual(info['put_timestamp'], Timestamp('4').internal) + self.assertEqual(info['delete_timestamp'], Timestamp('2').internal) + self.assertEqual(info['status_changed_at'], Timestamp('4').internal) for method in ('GET', 'HEAD'): req = Request.blank(path) resp = req.get_response(self.controller) @@ -1214,15 +1214,15 @@ class TestContainerController(unittest.TestCase): req = Request.blank(path, method='PUT', headers={'X-Timestamp': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') req = Request.blank(path, method='DELETE', headers={'X-Timestamp': '2'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank(path, method='GET') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) # sanity + self.assertEqual(resp.status_int, 404) # sanity self.assertEqual(True, db.is_deleted()) # now save a copy of this db (and remove it from the "current node") db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') @@ -1232,7 +1232,7 @@ class TestContainerController(unittest.TestCase): # that should make it missing on this node req = Request.blank(path, method='GET') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) # sanity + self.assertEqual(resp.status_int, 404) # sanity # setup the race in os.path.exists (first time no, then yes) mock_called = [] @@ -1261,8 +1261,8 @@ class TestContainerController(unittest.TestCase): [(exists, db.db_file) for exists in (False, True)]) # info was updated info = db.get_info() - self.assertEquals(info['put_timestamp'], Timestamp('4').internal) - self.assertEquals(info['delete_timestamp'], Timestamp('2').internal) + self.assertEqual(info['put_timestamp'], Timestamp('4').internal) + self.assertEqual(info['delete_timestamp'], Timestamp('2').internal) def test_DELETE_not_found(self): # Even if the container wasn't previously heard of, the container @@ -1272,7 +1272,7 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_change_storage_policy_via_DELETE_then_PUT(self): ts = (Timestamp(t).internal for t in @@ -1344,33 +1344,33 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', method='PUT', headers={ 'X-Timestamp': Timestamp(2).internal}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', method='PUT', headers={ 'X-Timestamp': Timestamp(0).internal, 'X-Size': 1, 'X-Content-Type': 'text/plain', 'X-Etag': 'x'}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) ts = (Timestamp(t).internal for t in itertools.count(3)) req = Request.blank('/sda1/p/a/c', method='DELETE', headers={ 'X-Timestamp': next(ts)}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) req = Request.blank('/sda1/p/a/c/o', method='DELETE', headers={ 'X-Timestamp': next(ts)}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', method='DELETE', headers={ 'X-Timestamp': next(ts)}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c', method='GET', headers={ 'X-Timestamp': next(ts)}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_object_update_with_offset(self): ts = (Timestamp(t).internal for t in @@ -1394,7 +1394,7 @@ class TestContainerController(unittest.TestCase): 'X-Content-Type': 'text/plain', 'X-Etag': 'x'}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # check listing req = Request.blank('/sda1/p/a/c', method='GET', query_string='format=json') @@ -1417,7 +1417,7 @@ class TestContainerController(unittest.TestCase): 'X-Content-Type': 'text/html', 'X-Etag': 'y'}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # check updated listing req = Request.blank('/sda1/p/a/c', method='GET', query_string='format=json') @@ -1439,7 +1439,7 @@ class TestContainerController(unittest.TestCase): 'X-Timestamp': delete_timestamp}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # check empty listing req = Request.blank('/sda1/p/a/c', method='GET', query_string='format=json') @@ -1457,7 +1457,7 @@ class TestContainerController(unittest.TestCase): 'X-Content-Type': 'text/enriched', 'X-Etag': 'z'}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # check un-deleted listing req = Request.blank('/sda1/p/a/c', method='GET', query_string='format=json') @@ -1479,7 +1479,7 @@ class TestContainerController(unittest.TestCase): 'X-Timestamp': delete_timestamp}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # check empty listing req = Request.blank('/sda1/p/a/c', method='GET', query_string='format=json') @@ -1502,16 +1502,16 @@ class TestContainerController(unittest.TestCase): out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' % return_code) out.flush() - self.assertEquals(inc.readline(), - 'PUT /sda1/123/a/c HTTP/1.1\r\n') + self.assertEqual(inc.readline(), + 'PUT /sda1/123/a/c HTTP/1.1\r\n') headers = {} line = inc.readline() while line and line != '\r\n': headers[line.split(':')[0].lower()] = \ line.split(':')[1].strip() line = inc.readline() - self.assertEquals(headers['x-delete-timestamp'], - expected_timestamp) + self.assertEqual(headers['x-delete-timestamp'], + expected_timestamp) except BaseException as err: return err return None @@ -1520,7 +1520,7 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}, @@ -1532,7 +1532,7 @@ class TestContainerController(unittest.TestCase): try: with Timeout(3): resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) finally: err = event.wait() if err: @@ -1541,7 +1541,7 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', method='PUT', headers={ 'X-Timestamp': Timestamp(2).internal}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}, @@ -1553,7 +1553,7 @@ class TestContainerController(unittest.TestCase): try: with Timeout(3): resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) finally: err = event.wait() if err: @@ -1562,7 +1562,7 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', method='PUT', headers={ 'X-Timestamp': Timestamp(4).internal}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}, @@ -1588,7 +1588,7 @@ class TestContainerController(unittest.TestCase): '/sda1/./a/c', environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_DELETE_timestamp_not_float(self): req = Request.blank( @@ -1599,7 +1599,7 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': 'not-float'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_DELETE_insufficient_storage(self): self.controller = container_server.ContainerController( @@ -1608,7 +1608,7 @@ class TestContainerController(unittest.TestCase): '/sda-null/p/a/c', environ={'REQUEST_METHOD': 'DELETE', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) def test_GET_over_limit(self): req = Request.blank( @@ -1616,7 +1616,7 @@ class TestContainerController(unittest.TestCase): (constraints.CONTAINER_LISTING_LIMIT + 1), environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_GET_json(self): # make a container @@ -1629,8 +1629,8 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/jsonc?format=json', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(simplejson.loads(resp.body), []) + self.assertEqual(resp.status_int, 200) + self.assertEqual(simplejson.loads(resp.body), []) # fill the container for i in range(3): req = Request.blank( @@ -1642,7 +1642,7 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # test format json_body = [{"name": "0", "hash": "x", @@ -1664,15 +1664,15 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/jsonc?format=json', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/json') - self.assertEquals(simplejson.loads(resp.body), json_body) - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/json') + self.assertEqual(simplejson.loads(resp.body), json_body) + self.assertEqual(resp.charset, 'utf-8') req = Request.blank( '/sda1/p/a/jsonc?format=json', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/json') + self.assertEqual(resp.content_type, 'application/json') for accept in ('application/json', 'application/json;q=1.0,*/*;q=0.9', '*/*;q=0.9,application/json;q=1.0', 'application/*'): @@ -1681,10 +1681,10 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}) req.accept = accept resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( simplejson.loads(resp.body), json_body, 'Invalid body for Accept: %s' % accept) - self.assertEquals( + self.assertEqual( resp.content_type, 'application/json', 'Invalid content_type for Accept: %s' % accept) @@ -1693,7 +1693,7 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}) req.accept = accept resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( resp.content_type, 'application/json', 'Invalid content_type for Accept: %s' % accept) @@ -1707,7 +1707,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank( '/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) # fill the container for i in range(3): req = Request.blank( @@ -1719,20 +1719,20 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) plain_body = '0\n1\n2\n' req = Request.blank('/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'text/plain') - self.assertEquals(resp.body, plain_body) - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'text/plain') + self.assertEqual(resp.body, plain_body) + self.assertEqual(resp.charset, 'utf-8') req = Request.blank('/sda1/p/a/plainc', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'text/plain') + self.assertEqual(resp.content_type, 'text/plain') for accept in ('', 'text/plain', 'application/xml;q=0.8,*/*;q=0.9', '*/*;q=0.9,application/xml;q=0.8', '*/*', @@ -1742,10 +1742,10 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}) req.accept = accept resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( resp.body, plain_body, 'Invalid body for Accept: %s' % accept) - self.assertEquals( + self.assertEqual( resp.content_type, 'text/plain', 'Invalid content_type for Accept: %s' % accept) @@ -1754,7 +1754,7 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}) req.accept = accept resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( resp.content_type, 'text/plain', 'Invalid content_type for Accept: %s' % accept) @@ -1764,17 +1764,17 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}) req.accept = 'application/json' resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'text/plain') - self.assertEquals(resp.body, plain_body) + self.assertEqual(resp.content_type, 'text/plain') + self.assertEqual(resp.body, plain_body) # test unknown format uses default plain req = Request.blank( '/sda1/p/a/plainc?format=somethingelse', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_type, 'text/plain') - self.assertEquals(resp.body, plain_body) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_type, 'text/plain') + self.assertEqual(resp.body, plain_body) def test_GET_json_last_modified(self): # make a container @@ -1793,7 +1793,7 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # test format # last_modified format must be uniform, even when there are not msecs json_body = [{"name": "0", @@ -1811,9 +1811,9 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/jsonc?format=json', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/json') - self.assertEquals(simplejson.loads(resp.body), json_body) - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/json') + self.assertEqual(simplejson.loads(resp.body), json_body) + self.assertEqual(resp.charset, 'utf-8') def test_GET_xml(self): # make a container @@ -1833,7 +1833,7 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) xml_body = '\n' \ '' \ '0x0' \ @@ -1855,15 +1855,15 @@ class TestContainerController(unittest.TestCase): '/sda1/p/a/xmlc?format=xml', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/xml') - self.assertEquals(resp.body, xml_body) - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/xml') + self.assertEqual(resp.body, xml_body) + self.assertEqual(resp.charset, 'utf-8') req = Request.blank( '/sda1/p/a/xmlc?format=xml', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/xml') + self.assertEqual(resp.content_type, 'application/xml') for xml_accept in ( 'application/xml', 'application/xml;q=1.0,*/*;q=0.9', @@ -1873,10 +1873,10 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}) req.accept = xml_accept resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( resp.body, xml_body, 'Invalid body for Accept: %s' % xml_accept) - self.assertEquals( + self.assertEqual( resp.content_type, 'application/xml', 'Invalid content_type for Accept: %s' % xml_accept) @@ -1885,7 +1885,7 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}) req.accept = xml_accept resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( resp.content_type, 'application/xml', 'Invalid content_type for Accept: %s' % xml_accept) @@ -1894,8 +1894,8 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}) req.accept = 'text/xml' resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'text/xml') - self.assertEquals(resp.body, xml_body) + self.assertEqual(resp.content_type, 'text/xml') + self.assertEqual(resp.body, xml_body) def test_GET_marker(self): # make a container @@ -1913,13 +1913,13 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_ETAG': 'x', 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # test limit with marker req = Request.blank('/sda1/p/a/c?limit=2&marker=1', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) result = resp.body.split() - self.assertEquals(result, ['2', ]) + self.assertEqual(result, ['2', ]) def test_weird_content_types(self): snowman = u'\u2603' @@ -1936,12 +1936,12 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_ETAG': 'x', 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c?format=json', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) result = [x['content_type'] for x in simplejson.loads(resp.body)] - self.assertEquals(result, [u'\u2603', 'text/plain;charset="utf-8"']) + self.assertEqual(result, [u'\u2603', 'text/plain;charset="utf-8"']) def test_GET_accept_not_valid(self): req = Request.blank('/sda1/p/a/c', method='PUT', headers={ @@ -1951,7 +1951,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c', method='GET') req.accept = 'application/xml*' resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 406) + self.assertEqual(resp.status_int, 406) def test_GET_limit(self): # make a container @@ -1971,13 +1971,13 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # test limit req = Request.blank( '/sda1/p/a/c?limit=2', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) result = resp.body.split() - self.assertEquals(result, ['0', '1']) + self.assertEqual(result, ['0', '1']) def test_GET_prefix(self): req = Request.blank( @@ -1995,18 +1995,18 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c?prefix=a', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.body.split(), ['a1', 'a2', 'a3']) + self.assertEqual(resp.body.split(), ['a1', 'a2', 'a3']) def test_GET_delimiter_too_long(self): req = Request.blank('/sda1/p/a/c?delimiter=xx', environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '0'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_GET_delimiter(self): req = Request.blank( @@ -2022,12 +2022,12 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c?prefix=US-&delimiter=-&format=json', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( simplejson.loads(resp.body), [{"subdir": "US-OK-"}, {"subdir": "US-TX-"}, @@ -2047,12 +2047,12 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c?prefix=US-&delimiter=-&format=xml', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( resp.body, '' '\n' 'US-OK-' @@ -2072,7 +2072,7 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c?delimiter=/&format=xml', environ={'REQUEST_METHOD': 'GET'}) @@ -2082,12 +2082,12 @@ class TestContainerController(unittest.TestCase): container = dom.getElementsByTagName('container')[0] self.assertTrue(len(container.getElementsByTagName('subdir')) == 1) subdir = container.getElementsByTagName('subdir')[0] - self.assertEquals(unicode(subdir.attributes['name'].value), - u'<\'sub\' "dir">/') + self.assertEqual(unicode(subdir.attributes['name'].value), + u'<\'sub\' "dir">/') self.assertTrue(len(subdir.getElementsByTagName('name')) == 1) name = subdir.getElementsByTagName('name')[0] - self.assertEquals(unicode(name.childNodes[0].data), - u'<\'sub\' "dir">/') + self.assertEqual(unicode(name.childNodes[0].data), + u'<\'sub\' "dir">/') def test_GET_path(self): req = Request.blank( @@ -2103,12 +2103,12 @@ class TestContainerController(unittest.TestCase): 'HTTP_X_SIZE': 0}) self._update_object_put_headers(req) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c?path=US&format=json', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals( + self.assertEqual( simplejson.loads(resp.body), [{"name": "US/OK", "hash": "x", "bytes": 0, "content_type": "text/plain", @@ -2124,7 +2124,7 @@ class TestContainerController(unittest.TestCase): '/sda-null/p/a/c', environ={'REQUEST_METHOD': 'GET', 'HTTP_X_TIMESTAMP': '1'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) def test_through_call(self): inbuf = BytesIO() @@ -2149,8 +2149,8 @@ class TestContainerController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '404 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '404 ') def test_through_call_invalid_path(self): inbuf = BytesIO() @@ -2175,8 +2175,8 @@ class TestContainerController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '400 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '400 ') def test_through_call_invalid_path_utf8(self): inbuf = BytesIO() @@ -2201,8 +2201,8 @@ class TestContainerController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '412 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '412 ') def test_invalid_method_doesnt_exist(self): errbuf = StringIO() @@ -2214,8 +2214,8 @@ class TestContainerController(unittest.TestCase): self.controller.__call__({'REQUEST_METHOD': 'method_doesnt_exist', 'PATH_INFO': '/sda1/p/a/c'}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_invalid_method_is_not_public(self): errbuf = StringIO() @@ -2227,8 +2227,8 @@ class TestContainerController(unittest.TestCase): self.controller.__call__({'REQUEST_METHOD': '__init__', 'PATH_INFO': '/sda1/p/a/c'}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_params_format(self): req = Request.blank( @@ -2239,7 +2239,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c?format=%s' % format, method='GET') resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_params_utf8(self): # Bad UTF8 sequence, all parameters should cause 400 error @@ -2248,14 +2248,14 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c?%s=\xce' % param, environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 400, - "%d on param %s" % (resp.status_int, param)) + self.assertEqual(resp.status_int, 400, + "%d on param %s" % (resp.status_int, param)) # Good UTF8 sequence for delimiter, too long (1 byte delimiters only) req = Request.blank('/sda1/p/a/c?delimiter=\xce\xa9', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 412, - "%d on param delimiter" % (resp.status_int)) + self.assertEqual(resp.status_int, 412, + "%d on param delimiter" % (resp.status_int)) req = Request.blank('/sda1/p/a/c', method='PUT', headers={'X-Timestamp': Timestamp(1).internal}) req.get_response(self.controller) @@ -2265,8 +2265,8 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/c?%s=\xce\xa9' % param, environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204, - "%d on param %s" % (resp.status_int, param)) + self.assertEqual(resp.status_int, 204, + "%d on param %s" % (resp.status_int, param)) def test_put_auto_create(self): headers = {'x-timestamp': Timestamp(1).internal, @@ -2278,25 +2278,25 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) req = Request.blank('/sda1/p/.a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/.c/o', environ={'REQUEST_METHOD': 'PUT'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) req = Request.blank('/sda1/p/a/c/.o', environ={'REQUEST_METHOD': 'PUT'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_delete_auto_create(self): headers = {'x-timestamp': Timestamp(1).internal} @@ -2305,25 +2305,25 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) req = Request.blank('/sda1/p/.a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/.c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) req = Request.blank('/sda1/p/a/.c/.o', environ={'REQUEST_METHOD': 'DELETE'}, headers=dict(headers)) resp = req.get_response(self.controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_content_type_on_HEAD(self): Request.blank('/sda1/p/a/o', @@ -2335,30 +2335,30 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/sda1/p/a/o?format=xml', environ=env) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/xml') - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/xml') + self.assertEqual(resp.charset, 'utf-8') req = Request.blank('/sda1/p/a/o?format=json', environ=env) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/json') - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/json') + self.assertEqual(resp.charset, 'utf-8') req = Request.blank('/sda1/p/a/o', environ=env) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'text/plain') - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'text/plain') + self.assertEqual(resp.charset, 'utf-8') req = Request.blank( '/sda1/p/a/o', headers={'Accept': 'application/json'}, environ=env) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/json') - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/json') + self.assertEqual(resp.charset, 'utf-8') req = Request.blank( '/sda1/p/a/o', headers={'Accept': 'application/xml'}, environ=env) resp = req.get_response(self.controller) - self.assertEquals(resp.content_type, 'application/xml') - self.assertEquals(resp.charset, 'utf-8') + self.assertEqual(resp.content_type, 'application/xml') + self.assertEqual(resp.charset, 'utf-8') def test_updating_multiple_container_servers(self): http_connect_args = [] @@ -2403,8 +2403,8 @@ class TestContainerController(unittest.TestCase): http_connect_args.sort(key=operator.itemgetter('ipaddr')) - self.assertEquals(len(http_connect_args), 2) - self.assertEquals( + self.assertEqual(len(http_connect_args), 2) + self.assertEqual( http_connect_args[0], {'ipaddr': '1.2.3.4', 'port': '5', @@ -2422,7 +2422,7 @@ class TestContainerController(unittest.TestCase): 'referer': 'PUT http://localhost/sda1/p/a/c', 'user-agent': 'container-server %d' % os.getpid(), 'x-trans-id': '-'})}) - self.assertEquals( + self.assertEqual( http_connect_args[1], {'ipaddr': '6.7.8.9', 'port': '10', @@ -2445,7 +2445,7 @@ class TestContainerController(unittest.TestCase): # Test replication_server flag was set from configuration file. container_controller = container_server.ContainerController conf = {'devices': self.testdir, 'mount_check': 'false'} - self.assertEquals(container_controller(conf).replication_server, None) + self.assertEqual(container_controller(conf).replication_server, None) for val in [True, '1', 'True', 'true']: conf['replication_server'] = val self.assertTrue(container_controller(conf).replication_server) @@ -2462,7 +2462,7 @@ class TestContainerController(unittest.TestCase): self.assertFalse(hasattr(method, 'replication')) for method_name in repl_methods: method = getattr(self.controller, method_name) - self.assertEquals(method.replication, True) + self.assertEqual(method.replication, True) def test_correct_allowed_method(self): # Test correct work for allowed method using @@ -2568,8 +2568,8 @@ class TestContainerController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False} self.controller(env, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_GET_log_requests_true(self): self.controller.logger = FakeLogger() diff --git a/test/unit/container/test_sync.py b/test/unit/container/test_sync.py index 2b1637ed7e..0a97e843e2 100644 --- a/test/unit/container/test_sync.py +++ b/test/unit/container/test_sync.py @@ -85,22 +85,22 @@ class TestContainerSync(unittest.TestCase): got = flo.read(2) self.assertTrue(len(got) <= 2) - self.assertEquals(got, expect[:len(got)]) + self.assertEqual(got, expect[:len(got)]) expect = expect[len(got):] got = flo.read(5) self.assertTrue(len(got) <= 5) - self.assertEquals(got, expect[:len(got)]) + self.assertEqual(got, expect[:len(got)]) expect = expect[len(got):] - self.assertEquals(flo.read(), expect) - self.assertEquals(flo.read(), '') - self.assertEquals(flo.read(2), '') + self.assertEqual(flo.read(), expect) + self.assertEqual(flo.read(), '') + self.assertEqual(flo.read(2), '') flo = sync.FileLikeIter(iter(['123', '4567', '89', '0'])) - self.assertEquals(flo.read(), '1234567890') - self.assertEquals(flo.read(), '') - self.assertEquals(flo.read(2), '') + self.assertEqual(flo.read(), '1234567890') + self.assertEqual(flo.read(), '') + self.assertEqual(flo.read(2), '') def assertLogMessage(self, msg_level, expected, skip=0): for line in self.logger.get_lines_for_level(msg_level)[skip:]: @@ -129,8 +129,8 @@ class TestContainerSync(unittest.TestCase): self.assertTrue(mock_ic.called) conf_path, name, retry = mock_ic.call_args[0] self.assertTrue(isinstance(conf_path, ConfigString)) - self.assertEquals(conf_path.contents.getvalue(), - dedent(sync.ic_conf_body)) + self.assertEqual(conf_path.contents.getvalue(), + dedent(sync.ic_conf_body)) self.assertLogMessage('warning', 'internal_client_conf_path') self.assertLogMessage('warning', 'internal-client.conf-sample') @@ -143,7 +143,7 @@ class TestContainerSync(unittest.TestCase): self.assertTrue(cs.container_ring is cring) self.assertTrue(mock_ic.called) conf_path, name, retry = mock_ic.call_args[0] - self.assertEquals(conf_path, ic_conf_path) + self.assertEqual(conf_path, ic_conf_path) sample_conf_filename = os.path.join( os.path.dirname(test.__file__), @@ -207,12 +207,12 @@ class TestContainerSync(unittest.TestCase): sync.audit_location_generator = orig_audit_location_generator sync.ContainerBroker = orig_ContainerBroker - self.assertEquals(time_calls, [9]) - self.assertEquals(len(sleep_calls), 2) + self.assertEqual(time_calls, [9]) + self.assertEqual(len(sleep_calls), 2) self.assertTrue(sleep_calls[0] <= cs.interval) self.assertTrue(sleep_calls[1] == cs.interval - 1) - self.assertEquals(audit_location_generator_calls, [2]) - self.assertEquals(cs.reported, 3602) + self.assertEqual(audit_location_generator_calls, [2]) + self.assertEqual(cs.reported, 3602) def test_run_once(self): # This runs runs_once with fakes twice, the first causing an interim @@ -255,9 +255,9 @@ class TestContainerSync(unittest.TestCase): cs = sync.ContainerSync({}, container_ring=FakeRing()) sync.audit_location_generator = fake_audit_location_generator cs.run_once(1, 2, a=3, b=4, verbose=True) - self.assertEquals(time_calls, [6]) - self.assertEquals(audit_location_generator_calls, [1]) - self.assertEquals(cs.reported, 3602) + self.assertEqual(time_calls, [6]) + self.assertEqual(audit_location_generator_calls, [1]) + self.assertEqual(cs.reported, 3602) cs.run_once() except Exception as err: if str(err) != 'we are now done': @@ -267,22 +267,22 @@ class TestContainerSync(unittest.TestCase): sync.audit_location_generator = orig_audit_location_generator sync.ContainerBroker = orig_ContainerBroker - self.assertEquals(time_calls, [10]) - self.assertEquals(audit_location_generator_calls, [2]) - self.assertEquals(cs.reported, 3604) + self.assertEqual(time_calls, [10]) + self.assertEqual(audit_location_generator_calls, [2]) + self.assertEqual(cs.reported, 3604) def test_container_sync_not_db(self): cring = FakeRing() with mock.patch('swift.container.sync.InternalClient'): cs = sync.ContainerSync({}, container_ring=cring) - self.assertEquals(cs.container_failures, 0) + self.assertEqual(cs.container_failures, 0) def test_container_sync_missing_db(self): cring = FakeRing() with mock.patch('swift.container.sync.InternalClient'): cs = sync.ContainerSync({}, container_ring=cring) cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) + self.assertEqual(cs.container_failures, 1) def test_container_sync_not_my_db(self): # Db could be there due to handoff replication so test that we ignore @@ -302,24 +302,24 @@ class TestContainerSync(unittest.TestCase): cs._myips = ['127.0.0.1'] # No match cs._myport = 1 # No match cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 0) + self.assertEqual(cs.container_failures, 0) cs._myips = ['10.0.0.0'] # Match cs._myport = 1 # No match cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 0) + self.assertEqual(cs.container_failures, 0) cs._myips = ['127.0.0.1'] # No match cs._myport = 1000 # Match cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 0) + self.assertEqual(cs.container_failures, 0) cs._myips = ['10.0.0.0'] # Match cs._myport = 1000 # Match # This complete match will cause the 1 container failure since the # broker's info doesn't contain sync point keys cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) + self.assertEqual(cs.container_failures, 1) finally: sync.ContainerBroker = orig_ContainerBroker @@ -337,7 +337,7 @@ class TestContainerSync(unittest.TestCase): # This complete match will cause the 1 container failure since the # broker's info doesn't contain sync point keys cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) + self.assertEqual(cs.container_failures, 1) sync.ContainerBroker = lambda p: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', @@ -345,7 +345,7 @@ class TestContainerSync(unittest.TestCase): # This complete match will not cause any more container failures # since the broker indicates deletion cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) + self.assertEqual(cs.container_failures, 1) finally: sync.ContainerBroker = orig_ContainerBroker @@ -365,8 +365,8 @@ class TestContainerSync(unittest.TestCase): # This complete match will be skipped since the broker's metadata # has no x-container-sync-to or x-container-sync-key cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 0) - self.assertEquals(cs.container_skips, 1) + self.assertEqual(cs.container_failures, 0) + self.assertEqual(cs.container_skips, 1) sync.ContainerBroker = lambda p: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', @@ -379,8 +379,8 @@ class TestContainerSync(unittest.TestCase): # This complete match will be skipped since the broker's metadata # has no x-container-sync-key cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 0) - self.assertEquals(cs.container_skips, 2) + self.assertEqual(cs.container_failures, 0) + self.assertEqual(cs.container_skips, 2) sync.ContainerBroker = lambda p: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', @@ -393,8 +393,8 @@ class TestContainerSync(unittest.TestCase): # This complete match will be skipped since the broker's metadata # has no x-container-sync-to cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 0) - self.assertEquals(cs.container_skips, 3) + self.assertEqual(cs.container_failures, 0) + self.assertEqual(cs.container_skips, 3) sync.ContainerBroker = lambda p: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', @@ -409,8 +409,8 @@ class TestContainerSync(unittest.TestCase): # This complete match will cause a container failure since the # sync-to won't validate as allowed. cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 3) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 3) sync.ContainerBroker = lambda p: FakeContainerBroker( p, info={'account': 'a', 'container': 'c', @@ -425,8 +425,8 @@ class TestContainerSync(unittest.TestCase): # This complete match will succeed completely since the broker # get_items_since will return no new rows. cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 3) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 3) finally: sync.ContainerBroker = orig_ContainerBroker @@ -450,8 +450,8 @@ class TestContainerSync(unittest.TestCase): cs.allowed_sync_hosts = ['127.0.0.1'] # This sync will fail since the items_since data is bad. cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 0) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 0) # Set up fake times to make the sync short-circuit as having taken # too long @@ -468,8 +468,8 @@ class TestContainerSync(unittest.TestCase): # as to be time to move on (before it ever actually tries to do # anything). cs.container_sync('isa.db') - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 0) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 0) finally: sync.ContainerBroker = orig_ContainerBroker sync.time = orig_time @@ -501,10 +501,10 @@ class TestContainerSync(unittest.TestCase): cs.allowed_sync_hosts = ['127.0.0.1'] cs.container_sync('isa.db') # Succeeds because no rows match - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, None) - self.assertEquals(fcb.sync_point2, -1) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, None) + self.assertEqual(fcb.sync_point2, -1) def fake_hash_path(account, container, obj, raw_digest=False): # Ensures that all rows match for full syncing, ordinal is 0 @@ -529,10 +529,10 @@ class TestContainerSync(unittest.TestCase): cs.allowed_sync_hosts = ['127.0.0.1'] cs.container_sync('isa.db') # Succeeds because the two sync points haven't deviated yet - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, -1) - self.assertEquals(fcb.sync_point2, -1) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, -1) + self.assertEqual(fcb.sync_point2, -1) fcb = FakeContainerBroker( 'path', @@ -550,10 +550,10 @@ class TestContainerSync(unittest.TestCase): cs.container_sync('isa.db') # Fails because container_sync_row will fail since the row has no # 'deleted' key - self.assertEquals(cs.container_failures, 2) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, None) - self.assertEquals(fcb.sync_point2, -1) + self.assertEqual(cs.container_failures, 2) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, None) + self.assertEqual(fcb.sync_point2, -1) def fake_delete_object(*args, **kwargs): raise ClientException @@ -577,10 +577,10 @@ class TestContainerSync(unittest.TestCase): cs.allowed_sync_hosts = ['127.0.0.1'] cs.container_sync('isa.db') # Fails because delete_object fails - self.assertEquals(cs.container_failures, 3) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, None) - self.assertEquals(fcb.sync_point2, -1) + self.assertEqual(cs.container_failures, 3) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, None) + self.assertEqual(fcb.sync_point2, -1) fcb = FakeContainerBroker( 'path', @@ -602,10 +602,10 @@ class TestContainerSync(unittest.TestCase): cs.allowed_sync_hosts = ['127.0.0.1'] cs.container_sync('isa.db') # Succeeds because delete_object succeeds - self.assertEquals(cs.container_failures, 3) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, None) - self.assertEquals(fcb.sync_point2, 1) + self.assertEqual(cs.container_failures, 3) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, None) + self.assertEqual(fcb.sync_point2, 1) def test_container_second_loop(self): cring = FakeRing() @@ -640,10 +640,10 @@ class TestContainerSync(unittest.TestCase): cs.allowed_sync_hosts = ['127.0.0.1'] cs.container_sync('isa.db') # Succeeds because no rows match - self.assertEquals(cs.container_failures, 0) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, 1) - self.assertEquals(fcb.sync_point2, None) + self.assertEqual(cs.container_failures, 0) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, 1) + self.assertEqual(fcb.sync_point2, None) def fake_hash_path(account, container, obj, raw_digest=False): # Ensures that all rows match for second loop, ordinal is 0 and @@ -671,10 +671,10 @@ class TestContainerSync(unittest.TestCase): cs.container_sync('isa.db') # Fails because row is missing 'deleted' key # Nevertheless the fault is skipped - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, 1) - self.assertEquals(fcb.sync_point2, None) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, 1) + self.assertEqual(fcb.sync_point2, None) fcb = FakeContainerBroker( 'path', @@ -693,10 +693,10 @@ class TestContainerSync(unittest.TestCase): cs.container_sync('isa.db') # Succeeds because row now has 'deleted' key and delete_object # succeeds - self.assertEquals(cs.container_failures, 1) - self.assertEquals(cs.container_skips, 0) - self.assertEquals(fcb.sync_point1, 1) - self.assertEquals(fcb.sync_point2, None) + self.assertEqual(cs.container_failures, 1) + self.assertEqual(cs.container_skips, 0) + self.assertEqual(fcb.sync_point1, 1) + self.assertEqual(fcb.sync_point2, None) finally: sync.ContainerBroker = orig_ContainerBroker sync.hash_path = orig_hash_path @@ -720,18 +720,18 @@ class TestContainerSync(unittest.TestCase): def fake_delete_object(path, name=None, headers=None, proxy=None, logger=None, timeout=None): - self.assertEquals(path, 'http://sync/to/path') - self.assertEquals(name, 'object') + self.assertEqual(path, 'http://sync/to/path') + self.assertEqual(name, 'object') if realm: - self.assertEquals(headers, { + self.assertEqual(headers, { 'x-container-sync-auth': 'US abcdef 90e95aabb45a6cdc0892a3db5535e7f918428c90', 'x-timestamp': '1.2'}) else: - self.assertEquals( + self.assertEqual( headers, {'x-container-sync-key': 'key', 'x-timestamp': '1.2'}) - self.assertEquals(proxy, 'http://proxy') + self.assertEqual(proxy, 'http://proxy') self.assertEqual(timeout, 5.0) self.assertEqual(logger, self.logger) @@ -749,7 +749,7 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_deletes, 1) + self.assertEqual(cs.container_deletes, 1) exc = [] @@ -766,9 +766,9 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_deletes, 1) - self.assertEquals(len(exc), 1) - self.assertEquals(str(exc[-1]), 'test exception') + self.assertEqual(cs.container_deletes, 1) + self.assertEqual(len(exc), 1) + self.assertEqual(str(exc[-1]), 'test exception') def fake_delete_object(*args, **kwargs): exc.append(ClientException('test client exception')) @@ -783,9 +783,9 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_deletes, 1) - self.assertEquals(len(exc), 2) - self.assertEquals(str(exc[-1]), 'test client exception') + self.assertEqual(cs.container_deletes, 1) + self.assertEqual(len(exc), 2) + self.assertEqual(str(exc[-1]), 'test client exception') def fake_delete_object(*args, **kwargs): exc.append(ClientException('test client exception', @@ -801,9 +801,9 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_deletes, 2) - self.assertEquals(len(exc), 3) - self.assertEquals(str(exc[-1]), 'test client exception: 404') + self.assertEqual(cs.container_deletes, 2) + self.assertEqual(len(exc), 3) + self.assertEqual(str(exc[-1]), 'test client exception: 404') finally: sync.uuid = orig_uuid sync.delete_object = orig_delete_object @@ -829,8 +829,8 @@ class TestContainerSync(unittest.TestCase): def fake_put_object(sync_to, name=None, headers=None, contents=None, proxy=None, logger=None, timeout=None): - self.assertEquals(sync_to, 'http://sync/to/path') - self.assertEquals(name, 'object') + self.assertEqual(sync_to, 'http://sync/to/path') + self.assertEqual(name, 'object') if realm: self.assertEqual(headers, { 'x-container-sync-auth': @@ -840,14 +840,14 @@ class TestContainerSync(unittest.TestCase): 'other-header': 'other header value', 'content-type': 'text/plain'}) else: - self.assertEquals(headers, { + self.assertEqual(headers, { 'x-container-sync-key': 'key', 'x-timestamp': '1.2', 'other-header': 'other header value', 'etag': 'etagvalue', 'content-type': 'text/plain'}) - self.assertEquals(contents.read(), 'contents') - self.assertEquals(proxy, 'http://proxy') + self.assertEqual(contents.read(), 'contents') + self.assertEqual(proxy, 'http://proxy') self.assertEqual(timeout, 5.0) self.assertEqual(logger, self.logger) @@ -876,12 +876,12 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 1) + self.assertEqual(cs.container_puts, 1) def fake_get_object(acct, con, obj, headers, acceptable_statuses): - self.assertEquals(headers['X-Newest'], True) - self.assertEquals(headers['X-Backend-Storage-Policy-Index'], - '0') + self.assertEqual(headers['X-Newest'], True) + self.assertEqual(headers['X-Backend-Storage-Policy-Index'], + '0') return (200, {'date': 'date value', 'last-modified': 'last modified value', @@ -902,14 +902,14 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 2) + self.assertEqual(cs.container_puts, 2) exc = [] def fake_get_object(acct, con, obj, headers, acceptable_statuses): - self.assertEquals(headers['X-Newest'], True) - self.assertEquals(headers['X-Backend-Storage-Policy-Index'], - '0') + self.assertEqual(headers['X-Newest'], True) + self.assertEqual(headers['X-Backend-Storage-Policy-Index'], + '0') exc.append(Exception('test exception')) raise exc[-1] @@ -922,16 +922,16 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 2) - self.assertEquals(len(exc), 1) - self.assertEquals(str(exc[-1]), 'test exception') + self.assertEqual(cs.container_puts, 2) + self.assertEqual(len(exc), 1) + self.assertEqual(str(exc[-1]), 'test exception') exc = [] def fake_get_object(acct, con, obj, headers, acceptable_statuses): - self.assertEquals(headers['X-Newest'], True) - self.assertEquals(headers['X-Backend-Storage-Policy-Index'], - '0') + self.assertEqual(headers['X-Newest'], True) + self.assertEqual(headers['X-Backend-Storage-Policy-Index'], + '0') exc.append(ClientException('test client exception')) raise exc[-1] @@ -945,14 +945,14 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 2) - self.assertEquals(len(exc), 1) - self.assertEquals(str(exc[-1]), 'test client exception') + self.assertEqual(cs.container_puts, 2) + self.assertEqual(len(exc), 1) + self.assertEqual(str(exc[-1]), 'test client exception') def fake_get_object(acct, con, obj, headers, acceptable_statuses): - self.assertEquals(headers['X-Newest'], True) - self.assertEquals(headers['X-Backend-Storage-Policy-Index'], - '0') + self.assertEqual(headers['X-Newest'], True) + self.assertEqual(headers['X-Backend-Storage-Policy-Index'], + '0') return (200, {'other-header': 'other header value', 'x-timestamp': '1.2', 'etag': '"etagvalue"'}, iter('contents')) @@ -970,7 +970,7 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 2) + self.assertEqual(cs.container_puts, 2) self.assertLogMessage('info', 'Unauth') def fake_put_object(*args, **kwargs): @@ -985,7 +985,7 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 2) + self.assertEqual(cs.container_puts, 2) self.assertLogMessage('info', 'Not found', 1) def fake_put_object(*args, **kwargs): @@ -1000,7 +1000,7 @@ class TestContainerSync(unittest.TestCase): 'key', FakeContainerBroker('broker'), {'account': 'a', 'container': 'c', 'storage_policy_index': 0}, realm, realm_key)) - self.assertEquals(cs.container_puts, 2) + self.assertEqual(cs.container_puts, 2) self.assertLogMessage('error', 'ERROR Syncing') finally: sync.uuid = orig_uuid diff --git a/test/unit/container/test_updater.py b/test/unit/container/test_updater.py index ae8a0d6056..c682967046 100644 --- a/test/unit/container/test_updater.py +++ b/test/unit/container/test_updater.py @@ -68,10 +68,10 @@ class TestContainerUpdater(unittest.TestCase): }) self.assertTrue(hasattr(cu, 'logger')) self.assertTrue(cu.logger is not None) - self.assertEquals(cu.devices, self.devices_dir) - self.assertEquals(cu.interval, 1) - self.assertEquals(cu.concurrency, 2) - self.assertEquals(cu.node_timeout, 5) + self.assertEqual(cu.devices, self.devices_dir) + self.assertEqual(cu.interval, 1) + self.assertEqual(cu.concurrency, 2) + self.assertEqual(cu.node_timeout, 5) self.assertTrue(cu.get_account_ring() is not None) def test_run_once(self): @@ -96,19 +96,19 @@ class TestContainerUpdater(unittest.TestCase): cb.initialize(normalize_timestamp(1), 0) cu.run_once() info = cb.get_info() - self.assertEquals(info['object_count'], 0) - self.assertEquals(info['bytes_used'], 0) - self.assertEquals(info['reported_object_count'], 0) - self.assertEquals(info['reported_bytes_used'], 0) + self.assertEqual(info['object_count'], 0) + self.assertEqual(info['bytes_used'], 0) + self.assertEqual(info['reported_object_count'], 0) + self.assertEqual(info['reported_bytes_used'], 0) cb.put_object('o', normalize_timestamp(2), 3, 'text/plain', '68b329da9893e34099c7d8ad5cb9c940') cu.run_once() info = cb.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 3) - self.assertEquals(info['reported_object_count'], 0) - self.assertEquals(info['reported_bytes_used'], 0) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 3) + self.assertEqual(info['reported_object_count'], 0) + self.assertEqual(info['reported_bytes_used'], 0) def accept(sock, addr, return_code): try: @@ -118,8 +118,8 @@ class TestContainerUpdater(unittest.TestCase): out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' % return_code) out.flush() - self.assertEquals(inc.readline(), - 'PUT /sda1/0/a/c HTTP/1.1\r\n') + self.assertEqual(inc.readline(), + 'PUT /sda1/0/a/c HTTP/1.1\r\n') headers = {} line = inc.readline() while line and line != '\r\n': @@ -154,10 +154,10 @@ class TestContainerUpdater(unittest.TestCase): if err: raise err info = cb.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 3) - self.assertEquals(info['reported_object_count'], 1) - self.assertEquals(info['reported_bytes_used'], 3) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 3) + self.assertEqual(info['reported_object_count'], 1) + self.assertEqual(info['reported_bytes_used'], 3) @mock.patch('os.listdir') def test_listdir_with_exception(self, mock_listdir): @@ -250,10 +250,10 @@ class TestContainerUpdater(unittest.TestCase): if err: raise err info = cb.get_info() - self.assertEquals(info['object_count'], 1) - self.assertEquals(info['bytes_used'], 3) - self.assertEquals(info['reported_object_count'], 1) - self.assertEquals(info['reported_bytes_used'], 3) + self.assertEqual(info['object_count'], 1) + self.assertEqual(info['bytes_used'], 3) + self.assertEqual(info['reported_object_count'], 1) + self.assertEqual(info['reported_bytes_used'], 3) if __name__ == '__main__': From 652f0f9da408e955843f87a504b3dab5be3cff80 Mon Sep 17 00:00:00 2001 From: Pete Zaitcev Date: Wed, 5 Aug 2015 15:33:29 -0600 Subject: [PATCH 21/70] Having said H, I, J, we ought to say K In the long run, we might want to stop these updates. But this decision is above my pay grade. Change-Id: I335558e1da8052be1b215fbad51244b47af3d81b --- doc/source/howto_installmultinode.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/source/howto_installmultinode.rst b/doc/source/howto_installmultinode.rst index 8ab73232d3..1d6b0589c9 100644 --- a/doc/source/howto_installmultinode.rst +++ b/doc/source/howto_installmultinode.rst @@ -6,6 +6,13 @@ Please refer to the latest official `Openstack Installation Guides `_ for the most up-to-date documentation. +Object Storage installation guide for Openstack Kilo +---------------------------------------------------- + + * `openSUSE 13.2 and SUSE Linux Enterprise Server 12 `_ + * `RHEL 7, CentOS 7, and Fedora 21 `_ + * `Ubuntu 14.04 `_ + Object Storage installation guide for Openstack Juno ---------------------------------------------------- From 81816bebe6b3506af8cabaa7ebe265c70f3499cd Mon Sep 17 00:00:00 2001 From: kenichiro matsuda Date: Thu, 6 Aug 2015 10:13:02 +0900 Subject: [PATCH 22/70] Fix shebang of commands Fix shebang of following commands. $ grep '#!/usr/bin/python' swift/bin/* swift/bin/swift-account-info:#!/usr/bin/python swift/bin/swift-container-info:#!/usr/bin/python swift/bin/swift-container-sync:#!/usr/bin/python swift/bin/swift-recon:#!/usr/bin/python swift/bin/swift-ring-builder:#!/usr/bin/python swift/bin/swift-ring-builder-analyzer:#!/usr/bin/python Change-Id: I564d1d8abd76eba57730fc2f30263b0a0f809867 Closes-Bug: #1481623 --- bin/swift-account-info | 2 +- bin/swift-container-info | 2 +- bin/swift-container-sync | 2 +- bin/swift-recon | 2 +- bin/swift-ring-builder | 2 +- bin/swift-ring-builder-analyzer | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/bin/swift-account-info b/bin/swift-account-info index f7f10f855e..61c619900c 100755 --- a/bin/swift-account-info +++ b/bin/swift-account-info @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy # of the License at diff --git a/bin/swift-container-info b/bin/swift-container-info index 4956722813..8074b22ccd 100755 --- a/bin/swift-container-info +++ b/bin/swift-container-info @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy # of the License at diff --git a/bin/swift-container-sync b/bin/swift-container-sync index 14d6bc9d7c..b885015703 100755 --- a/bin/swift-container-sync +++ b/bin/swift-container-sync @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/bin/swift-recon b/bin/swift-recon index 9a068f915e..72f6eeef41 100755 --- a/bin/swift-recon +++ b/bin/swift-recon @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Copyright (c) 2014 Christian Schwede # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/bin/swift-ring-builder b/bin/swift-ring-builder index ed1b3843a8..4f85179951 100755 --- a/bin/swift-ring-builder +++ b/bin/swift-ring-builder @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Copyright (c) 2014 Christian Schwede # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/bin/swift-ring-builder-analyzer b/bin/swift-ring-builder-analyzer index 18365777f3..6a70105c14 100755 --- a/bin/swift-ring-builder-analyzer +++ b/bin/swift-ring-builder-analyzer @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/env python # Copyright (c) 2015 Samuel Merritt # # Licensed under the Apache License, Version 2.0 (the "License"); From 49b9ba37ac522502ccad66fba7bfef375eab145d Mon Sep 17 00:00:00 2001 From: Bill Huber Date: Wed, 5 Aug 2015 14:51:32 -0500 Subject: [PATCH 23/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3 in the following dir: test/functional/* Change-Id: Iee7f8ffca9838ccc521107180697d91ac9559405 --- test/functional/test_account.py | 8 +- test/functional/test_container.py | 126 +++++++++++++++--------------- test/functional/test_object.py | 102 ++++++++++++------------ 3 files changed, 118 insertions(+), 118 deletions(-) diff --git a/test/functional/test_account.py b/test/functional/test_account.py index c9d9ae6dde..e952c0923b 100755 --- a/test/functional/test_account.py +++ b/test/functional/test_account.py @@ -190,7 +190,7 @@ class TestAccount(unittest.TestCase): # cannot read account resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read access acl_user = tf.swift_test_user[2] @@ -241,7 +241,7 @@ class TestAccount(unittest.TestCase): # cannot read account resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-write access acl_user = tf.swift_test_user[2] @@ -282,7 +282,7 @@ class TestAccount(unittest.TestCase): # cannot read account resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant admin access acl_user = tf.swift_test_user[2] @@ -321,7 +321,7 @@ class TestAccount(unittest.TestCase): # and again, cannot read account resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) @requires_acls def test_protected_tempurl(self): diff --git a/test/functional/test_container.py b/test/functional/test_container.py index 4d2e9b0b56..345aa0aa84 100755 --- a/test/functional/test_container.py +++ b/test/functional/test_container.py @@ -729,7 +729,7 @@ class TestContainer(unittest.TestCase): # cannot list containers resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-only access acl_user = tf.swift_test_user[2] @@ -742,22 +742,22 @@ class TestContainer(unittest.TestCase): # read-only can list containers resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(self.name, listing) # read-only can not create containers new_container_name = str(uuid4()) resp = retry(put, new_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # but it can see newly created ones resp = retry(put, new_container_name, use_account=1) resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(new_container_name, listing) @requires_acls @@ -788,13 +788,13 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # cannot see metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-only access acl_user = tf.swift_test_user[2] @@ -814,7 +814,7 @@ class TestContainer(unittest.TestCase): # read-only can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) @requires_acls @@ -844,7 +844,7 @@ class TestContainer(unittest.TestCase): # cannot list containers resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-write access acl_user = tf.swift_test_user[2] @@ -857,36 +857,36 @@ class TestContainer(unittest.TestCase): # can list containers resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(self.name, listing) # can create new containers new_container_name = str(uuid4()) resp = retry(put, new_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(new_container_name, listing) # can also delete them resp = retry(delete, new_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertNotIn(new_container_name, listing) # even if they didn't create them empty_container_name = str(uuid4()) resp = retry(put, empty_container_name, use_account=1) resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) resp = retry(delete, empty_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) @requires_acls def test_read_write_acl_metadata(self): @@ -916,13 +916,13 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # cannot see metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-write access acl_user = tf.swift_test_user[2] @@ -935,7 +935,7 @@ class TestContainer(unittest.TestCase): # read-write can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # read-write can also write container metadata @@ -943,20 +943,20 @@ class TestContainer(unittest.TestCase): headers = {'x-container-meta-test': new_value} resp = retry(post, self.name, headers=headers, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value) # and remove it headers = {'x-remove-container-meta-test': 'true'} resp = retry(post, self.name, headers=headers, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), None) @requires_acls @@ -986,7 +986,7 @@ class TestContainer(unittest.TestCase): # cannot list containers resp = retry(get, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant admin access acl_user = tf.swift_test_user[2] @@ -999,36 +999,36 @@ class TestContainer(unittest.TestCase): # can list containers resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(self.name, listing) # can create new containers new_container_name = str(uuid4()) resp = retry(put, new_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(new_container_name, listing) # can also delete them resp = retry(delete, new_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) resp = retry(get, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertNotIn(new_container_name, listing) # even if they didn't create them empty_container_name = str(uuid4()) resp = retry(put, empty_container_name, use_account=1) resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) resp = retry(delete, empty_container_name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) @requires_acls def test_admin_acl_metadata(self): @@ -1058,13 +1058,13 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # cannot see metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant access acl_user = tf.swift_test_user[2] @@ -1077,7 +1077,7 @@ class TestContainer(unittest.TestCase): # can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # can also write container metadata @@ -1085,20 +1085,20 @@ class TestContainer(unittest.TestCase): headers = {'x-container-meta-test': new_value} resp = retry(post, self.name, headers=headers, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value) # and remove it headers = {'x-remove-container-meta-test': 'true'} resp = retry(post, self.name, headers=headers, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), None) @requires_acls @@ -1132,7 +1132,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret') self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) @@ -1147,7 +1147,7 @@ class TestContainer(unittest.TestCase): # can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # but not sync-key self.assertEqual(resp.getheader('X-Container-Sync-Key'), None) @@ -1169,7 +1169,7 @@ class TestContainer(unittest.TestCase): # can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # but not sync-key self.assertEqual(resp.getheader('X-Container-Sync-Key'), None) @@ -1177,7 +1177,7 @@ class TestContainer(unittest.TestCase): # sanity check sync-key w/ account1 resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret') # and can write @@ -1191,7 +1191,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) # validate w/ account1 resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value) # but can not write sync-key self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret') @@ -1207,7 +1207,7 @@ class TestContainer(unittest.TestCase): # admin can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value) # and ALSO sync-key self.assertEqual(resp.getheader('X-Container-Sync-Key'), 'secret') @@ -1220,7 +1220,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Sync-Key'), new_secret) @requires_acls @@ -1255,7 +1255,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe') self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe') self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) @@ -1271,7 +1271,7 @@ class TestContainer(unittest.TestCase): # can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # but not container acl self.assertEqual(resp.getheader('X-Container-Read'), None) @@ -1297,7 +1297,7 @@ class TestContainer(unittest.TestCase): # can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), value) # but not container acl self.assertEqual(resp.getheader('X-Container-Read'), None) @@ -1306,7 +1306,7 @@ class TestContainer(unittest.TestCase): # sanity check container acls with account1 resp = retry(get, self.name, use_account=1) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe') self.assertEqual(resp.getheader('X-Container-Write'), 'jdoe') @@ -1322,7 +1322,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=1) # validate w/ account1 resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value) # but can not write container acls self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe') @@ -1339,7 +1339,7 @@ class TestContainer(unittest.TestCase): # admin can read container metadata resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Meta-Test'), new_value) # and ALSO container acls self.assertEqual(resp.getheader('X-Container-Read'), 'jdoe') @@ -1355,7 +1355,7 @@ class TestContainer(unittest.TestCase): self.assertEqual(resp.status, 204) resp = retry(get, self.name, use_account=3) resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) self.assertEqual(resp.getheader('X-Container-Read'), '.r:*') def test_long_name_content_type(self): @@ -1415,8 +1415,8 @@ class TestContainer(unittest.TestCase): resp = retry(head) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('x-storage-policy'), - default_policy['name']) + self.assertEqual(headers.get('x-storage-policy'), + default_policy['name']) def test_error_invalid_storage_policy_name(self): def put(url, token, parsed, conn, headers): @@ -1453,8 +1453,8 @@ class TestContainer(unittest.TestCase): resp = retry(head) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('x-storage-policy'), - policy['name']) + self.assertEqual(headers.get('x-storage-policy'), + policy['name']) # and test recreate with-out specifying Storage Policy resp = retry(put) @@ -1464,8 +1464,8 @@ class TestContainer(unittest.TestCase): resp = retry(head) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('x-storage-policy'), - policy['name']) + self.assertEqual(headers.get('x-storage-policy'), + policy['name']) # delete it def delete(url, token, parsed, conn): @@ -1480,7 +1480,7 @@ class TestContainer(unittest.TestCase): resp = retry(head) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('x-storage-policy'), None) + self.assertEqual(headers.get('x-storage-policy'), None) @requires_policies def test_conflict_change_storage_policy_with_put(self): @@ -1510,8 +1510,8 @@ class TestContainer(unittest.TestCase): resp = retry(head) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('x-storage-policy'), - policy['name']) + self.assertEqual(headers.get('x-storage-policy'), + policy['name']) @requires_policies def test_noop_change_storage_policy_with_post(self): @@ -1547,8 +1547,8 @@ class TestContainer(unittest.TestCase): resp = retry(head) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('x-storage-policy'), - policy['name']) + self.assertEqual(headers.get('x-storage-policy'), + policy['name']) class BaseTestContainerACLs(unittest.TestCase): diff --git a/test/functional/test_object.py b/test/functional/test_object.py index 7e781c81e6..55868098be 100755 --- a/test/functional/test_object.py +++ b/test/functional/test_object.py @@ -119,10 +119,10 @@ class TestObject(unittest.TestCase): return check_response(conn) resp = retry(put) resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) resp = retry(put) resp.read() - self.assertEquals(resp.status, 412) + self.assertEqual(resp.status, 412) def put(url, token, parsed, conn): conn.request('PUT', '%s/%s/%s' % ( @@ -133,7 +133,7 @@ class TestObject(unittest.TestCase): return check_response(conn) resp = retry(put) resp.read() - self.assertEquals(resp.status, 400) + self.assertEqual(resp.status, 400) def test_non_integer_x_delete_after(self): def put(url, token, parsed, conn): @@ -145,7 +145,7 @@ class TestObject(unittest.TestCase): return check_response(conn) resp = retry(put) body = resp.read() - self.assertEquals(resp.status, 400) + self.assertEqual(resp.status, 400) self.assertEqual(body, 'Non-integer X-Delete-After') def test_non_integer_x_delete_at(self): @@ -158,7 +158,7 @@ class TestObject(unittest.TestCase): return check_response(conn) resp = retry(put) body = resp.read() - self.assertEquals(resp.status, 400) + self.assertEqual(resp.status, 400) self.assertEqual(body, 'Non-integer X-Delete-At') def test_x_delete_at_in_the_past(self): @@ -171,7 +171,7 @@ class TestObject(unittest.TestCase): return check_response(conn) resp = retry(put) body = resp.read() - self.assertEquals(resp.status, 400) + self.assertEqual(resp.status, 400) self.assertEqual(body, 'X-Delete-At in past') def test_copy_object(self): @@ -543,12 +543,12 @@ class TestObject(unittest.TestCase): # cannot list objects resp = retry(get_listing, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # cannot get object resp = retry(get, self.obj, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-only access acl_user = tf.swift_test_user[2] @@ -561,30 +561,30 @@ class TestObject(unittest.TestCase): # can list objects resp = retry(get_listing, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(self.obj, listing) # can get object resp = retry(get, self.obj, use_account=3) body = resp.read() - self.assertEquals(resp.status, 200) - self.assertEquals(body, 'test') + self.assertEqual(resp.status, 200) + self.assertEqual(body, 'test') # can not put an object obj_name = str(uuid4()) resp = retry(put, obj_name, use_account=3) body = resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # can not delete an object resp = retry(delete, self.obj, use_account=3) body = resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # sanity with account1 resp = retry(get_listing, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertNotIn(obj_name, listing) self.assertIn(self.obj, listing) @@ -624,12 +624,12 @@ class TestObject(unittest.TestCase): # cannot list objects resp = retry(get_listing, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # cannot get object resp = retry(get, self.obj, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant read-write access acl_user = tf.swift_test_user[2] @@ -642,30 +642,30 @@ class TestObject(unittest.TestCase): # can list objects resp = retry(get_listing, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(self.obj, listing) # can get object resp = retry(get, self.obj, use_account=3) body = resp.read() - self.assertEquals(resp.status, 200) - self.assertEquals(body, 'test') + self.assertEqual(resp.status, 200) + self.assertEqual(body, 'test') # can put an object obj_name = str(uuid4()) resp = retry(put, obj_name, use_account=3) body = resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) # can delete an object resp = retry(delete, self.obj, use_account=3) body = resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) # sanity with account1 resp = retry(get_listing, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(obj_name, listing) self.assertNotIn(self.obj, listing) @@ -705,12 +705,12 @@ class TestObject(unittest.TestCase): # cannot list objects resp = retry(get_listing, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # cannot get object resp = retry(get, self.obj, use_account=3) resp.read() - self.assertEquals(resp.status, 403) + self.assertEqual(resp.status, 403) # grant admin access acl_user = tf.swift_test_user[2] @@ -723,30 +723,30 @@ class TestObject(unittest.TestCase): # can list objects resp = retry(get_listing, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(self.obj, listing) # can get object resp = retry(get, self.obj, use_account=3) body = resp.read() - self.assertEquals(resp.status, 200) - self.assertEquals(body, 'test') + self.assertEqual(resp.status, 200) + self.assertEqual(body, 'test') # can put an object obj_name = str(uuid4()) resp = retry(put, obj_name, use_account=3) body = resp.read() - self.assertEquals(resp.status, 201) + self.assertEqual(resp.status, 201) # can delete an object resp = retry(delete, self.obj, use_account=3) body = resp.read() - self.assertEquals(resp.status, 204) + self.assertEqual(resp.status, 204) # sanity with account1 resp = retry(get_listing, use_account=3) listing = resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) self.assertIn(obj_name, listing) self.assertNotIn(self.obj, listing) @@ -1113,78 +1113,78 @@ class TestObject(unittest.TestCase): resp = retry(put_cors_cont, '*') resp.read() - self.assertEquals(resp.status // 100, 2) + self.assertEqual(resp.status // 100, 2) resp = retry(put_obj, 'cat') resp.read() - self.assertEquals(resp.status // 100, 2) + self.assertEqual(resp.status // 100, 2) resp = retry(check_cors, 'OPTIONS', 'cat', {'Origin': 'http://m.com'}) - self.assertEquals(resp.status, 401) + self.assertEqual(resp.status, 401) resp = retry(check_cors, 'OPTIONS', 'cat', {'Origin': 'http://m.com', 'Access-Control-Request-Method': 'GET'}) - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) resp.read() headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('access-control-allow-origin'), - '*') + self.assertEqual(headers.get('access-control-allow-origin'), + '*') resp = retry(check_cors, 'GET', 'cat', {'Origin': 'http://m.com'}) - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('access-control-allow-origin'), - '*') + self.assertEqual(headers.get('access-control-allow-origin'), + '*') resp = retry(check_cors, 'GET', 'cat', {'Origin': 'http://m.com', 'X-Web-Mode': 'True'}) - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('access-control-allow-origin'), - '*') + self.assertEqual(headers.get('access-control-allow-origin'), + '*') #################### resp = retry(put_cors_cont, 'http://secret.com') resp.read() - self.assertEquals(resp.status // 100, 2) + self.assertEqual(resp.status // 100, 2) resp = retry(check_cors, 'OPTIONS', 'cat', {'Origin': 'http://m.com', 'Access-Control-Request-Method': 'GET'}) resp.read() - self.assertEquals(resp.status, 401) + self.assertEqual(resp.status, 401) if strict_cors: resp = retry(check_cors, 'GET', 'cat', {'Origin': 'http://m.com'}) resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) headers = dict((k.lower(), v) for k, v in resp.getheaders()) self.assertNotIn('access-control-allow-origin', headers) resp = retry(check_cors, 'GET', 'cat', {'Origin': 'http://secret.com'}) resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('access-control-allow-origin'), - 'http://secret.com') + self.assertEqual(headers.get('access-control-allow-origin'), + 'http://secret.com') else: resp = retry(check_cors, 'GET', 'cat', {'Origin': 'http://m.com'}) resp.read() - self.assertEquals(resp.status, 200) + self.assertEqual(resp.status, 200) headers = dict((k.lower(), v) for k, v in resp.getheaders()) - self.assertEquals(headers.get('access-control-allow-origin'), - 'http://m.com') + self.assertEqual(headers.get('access-control-allow-origin'), + 'http://m.com') @requires_policies def test_cross_policy_copy(self): From 239e94e62518f36601fb18119700269753d02a01 Mon Sep 17 00:00:00 2001 From: Bill Huber Date: Thu, 6 Aug 2015 09:28:51 -0500 Subject: [PATCH 24/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3 in the following dir: test/probe/* Change-Id: Ie08dd7a8a6c48e3452dfe4f2b41676330ce455d5 --- test/probe/test_account_failures.py | 82 +++++++++---------- test/probe/test_account_reaper.py | 8 +- test/probe/test_container_failures.py | 18 ++-- .../test_container_merge_policy_index.py | 2 +- test/probe/test_empty_device_handoff.py | 4 +- test/probe/test_object_failures.py | 16 ++-- test/probe/test_object_handoff.py | 8 +- 7 files changed, 69 insertions(+), 69 deletions(-) diff --git a/test/probe/test_account_failures.py b/test/probe/test_account_failures.py index ab6a48b5bc..f45394e6bf 100755 --- a/test/probe/test_account_failures.py +++ b/test/probe/test_account_failures.py @@ -35,20 +35,20 @@ class TestAccountFailures(ReplProbeTest): # Assert account level sees them headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '2') - self.assertEquals(headers['x-account-object-count'], '0') - self.assertEquals(headers['x-account-bytes-used'], '0') + self.assertEqual(headers['x-account-container-count'], '2') + self.assertEqual(headers['x-account-object-count'], '0') + self.assertEqual(headers['x-account-bytes-used'], '0') found1 = False found2 = False for container in containers: if container['name'] == container1: found1 = True - self.assertEquals(container['count'], 0) - self.assertEquals(container['bytes'], 0) + self.assertEqual(container['count'], 0) + self.assertEqual(container['bytes'], 0) elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 0) - self.assertEquals(container['bytes'], 0) + self.assertEqual(container['count'], 0) + self.assertEqual(container['bytes'], 0) self.assertTrue(found1) self.assertTrue(found2) @@ -57,20 +57,20 @@ class TestAccountFailures(ReplProbeTest): # Assert account level doesn't see it yet headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '2') - self.assertEquals(headers['x-account-object-count'], '0') - self.assertEquals(headers['x-account-bytes-used'], '0') + self.assertEqual(headers['x-account-container-count'], '2') + self.assertEqual(headers['x-account-object-count'], '0') + self.assertEqual(headers['x-account-bytes-used'], '0') found1 = False found2 = False for container in containers: if container['name'] == container1: found1 = True - self.assertEquals(container['count'], 0) - self.assertEquals(container['bytes'], 0) + self.assertEqual(container['count'], 0) + self.assertEqual(container['bytes'], 0) elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 0) - self.assertEquals(container['bytes'], 0) + self.assertEqual(container['count'], 0) + self.assertEqual(container['bytes'], 0) self.assertTrue(found1) self.assertTrue(found2) @@ -79,20 +79,20 @@ class TestAccountFailures(ReplProbeTest): # Assert account level now sees the container2/object1 headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '2') - self.assertEquals(headers['x-account-object-count'], '1') - self.assertEquals(headers['x-account-bytes-used'], '4') + self.assertEqual(headers['x-account-container-count'], '2') + self.assertEqual(headers['x-account-object-count'], '1') + self.assertEqual(headers['x-account-bytes-used'], '4') found1 = False found2 = False for container in containers: if container['name'] == container1: found1 = True - self.assertEquals(container['count'], 0) - self.assertEquals(container['bytes'], 0) + self.assertEqual(container['count'], 0) + self.assertEqual(container['bytes'], 0) elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 1) - self.assertEquals(container['bytes'], 4) + self.assertEqual(container['count'], 1) + self.assertEqual(container['bytes'], 4) self.assertTrue(found1) self.assertTrue(found2) @@ -111,9 +111,9 @@ class TestAccountFailures(ReplProbeTest): # Assert account level knows container1 is gone but doesn't know about # container2/object2 yet headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '1') - self.assertEquals(headers['x-account-object-count'], '1') - self.assertEquals(headers['x-account-bytes-used'], '4') + self.assertEqual(headers['x-account-container-count'], '1') + self.assertEqual(headers['x-account-object-count'], '1') + self.assertEqual(headers['x-account-bytes-used'], '4') found1 = False found2 = False for container in containers: @@ -121,8 +121,8 @@ class TestAccountFailures(ReplProbeTest): found1 = True elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 1) - self.assertEquals(container['bytes'], 4) + self.assertEqual(container['count'], 1) + self.assertEqual(container['bytes'], 4) self.assertTrue(not found1) self.assertTrue(found2) @@ -131,9 +131,9 @@ class TestAccountFailures(ReplProbeTest): # Assert account level now knows about container2/object2 headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '1') - self.assertEquals(headers['x-account-object-count'], '2') - self.assertEquals(headers['x-account-bytes-used'], '9') + self.assertEqual(headers['x-account-container-count'], '1') + self.assertEqual(headers['x-account-object-count'], '2') + self.assertEqual(headers['x-account-bytes-used'], '9') found1 = False found2 = False for container in containers: @@ -141,8 +141,8 @@ class TestAccountFailures(ReplProbeTest): found1 = True elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 2) - self.assertEquals(container['bytes'], 9) + self.assertEqual(container['count'], 2) + self.assertEqual(container['bytes'], 9) self.assertTrue(not found1) self.assertTrue(found2) @@ -154,9 +154,9 @@ class TestAccountFailures(ReplProbeTest): # new container2/object2 yet headers, containers = \ direct_client.direct_get_account(anodes[0], apart, self.account) - self.assertEquals(headers['x-account-container-count'], '2') - self.assertEquals(headers['x-account-object-count'], '1') - self.assertEquals(headers['x-account-bytes-used'], '4') + self.assertEqual(headers['x-account-container-count'], '2') + self.assertEqual(headers['x-account-object-count'], '1') + self.assertEqual(headers['x-account-bytes-used'], '4') found1 = False found2 = False for container in containers: @@ -164,8 +164,8 @@ class TestAccountFailures(ReplProbeTest): found1 = True elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 1) - self.assertEquals(container['bytes'], 4) + self.assertEqual(container['count'], 1) + self.assertEqual(container['bytes'], 4) self.assertTrue(found1) self.assertTrue(found2) @@ -175,9 +175,9 @@ class TestAccountFailures(ReplProbeTest): # Assert that server is now up to date headers, containers = \ direct_client.direct_get_account(anodes[0], apart, self.account) - self.assertEquals(headers['x-account-container-count'], '1') - self.assertEquals(headers['x-account-object-count'], '2') - self.assertEquals(headers['x-account-bytes-used'], '9') + self.assertEqual(headers['x-account-container-count'], '1') + self.assertEqual(headers['x-account-object-count'], '2') + self.assertEqual(headers['x-account-bytes-used'], '9') found1 = False found2 = False for container in containers: @@ -185,8 +185,8 @@ class TestAccountFailures(ReplProbeTest): found1 = True elif container['name'] == container2: found2 = True - self.assertEquals(container['count'], 2) - self.assertEquals(container['bytes'], 9) + self.assertEqual(container['count'], 2) + self.assertEqual(container['bytes'], 9) self.assertTrue(not found1) self.assertTrue(found2) diff --git a/test/probe/test_account_reaper.py b/test/probe/test_account_reaper.py index 7da9dcd39d..f5d2efee87 100644 --- a/test/probe/test_account_reaper.py +++ b/test/probe/test_account_reaper.py @@ -66,7 +66,7 @@ class TestAccountReaper(ReplProbeTest): direct_head_container(cnode, cpart, self.account, container) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) delete_time = err.http_headers.get( 'X-Backend-DELETE-Timestamp') # 'X-Backend-DELETE-Timestamp' confirms it was deleted @@ -91,7 +91,7 @@ class TestAccountReaper(ReplProbeTest): direct_get_object(node, part, self.account, container, obj, headers=headers) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) delete_time = err.http_headers.get('X-Backend-Timestamp') # 'X-Backend-Timestamp' confirms obj was deleted self.assertTrue(delete_time) @@ -114,7 +114,7 @@ class TestAccountReaper(ReplProbeTest): direct_head_container(cnode, cpart, self.account, container) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) delete_time = err.http_headers.get( 'X-Backend-DELETE-Timestamp') # 'X-Backend-DELETE-Timestamp' confirms it was deleted @@ -134,7 +134,7 @@ class TestAccountReaper(ReplProbeTest): direct_get_object(node, part, self.account, container, obj, headers=headers) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) delete_time = err.http_headers.get('X-Backend-Timestamp') # 'X-Backend-Timestamp' confirms obj was deleted self.assertTrue(delete_time) diff --git a/test/probe/test_container_failures.py b/test/probe/test_container_failures.py index 5eddad1464..d8c132c53d 100755 --- a/test/probe/test_container_failures.py +++ b/test/probe/test_container_failures.py @@ -70,7 +70,7 @@ class TestContainerFailures(ReplProbeTest): # Assert all container1 servers indicate container1 is alive and # well with object1 for cnode in cnodes: - self.assertEquals( + self.assertEqual( [o['name'] for o in direct_client.direct_get_container( cnode, cpart, self.account, container1)[1]], ['object1']) @@ -78,9 +78,9 @@ class TestContainerFailures(ReplProbeTest): # Assert account level also indicates container1 is alive and # well with object1 headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '1') - self.assertEquals(headers['x-account-object-count'], '1') - self.assertEquals(headers['x-account-bytes-used'], '3') + self.assertEqual(headers['x-account-container-count'], '1') + self.assertEqual(headers['x-account-object-count'], '1') + self.assertEqual(headers['x-account-bytes-used'], '3') def test_two_nodes_fail(self): # Create container1 @@ -118,15 +118,15 @@ class TestContainerFailures(ReplProbeTest): direct_client.direct_get_container(cnode, cpart, self.account, container1) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) else: self.fail("Expected ClientException but didn't get it") # Assert account level also indicates container1 is gone headers, containers = client.get_account(self.url, self.token) - self.assertEquals(headers['x-account-container-count'], '0') - self.assertEquals(headers['x-account-object-count'], '0') - self.assertEquals(headers['x-account-bytes-used'], '0') + self.assertEqual(headers['x-account-container-count'], '0') + self.assertEqual(headers['x-account-object-count'], '0') + self.assertEqual(headers['x-account-bytes-used'], '0') def _get_container_db_files(self, container): opart, onodes = self.container_ring.get_nodes(self.account, container) @@ -160,7 +160,7 @@ class TestContainerFailures(ReplProbeTest): try: client.delete_container(self.url, self.token, container) except client.ClientException as err: - self.assertEquals(err.http_status, 503) + self.assertEqual(err.http_status, 503) else: self.fail("Expected ClientException but didn't get it") else: diff --git a/test/probe/test_container_merge_policy_index.py b/test/probe/test_container_merge_policy_index.py index 1ce2c818dc..3472488f5f 100644 --- a/test/probe/test_container_merge_policy_index.py +++ b/test/probe/test_container_merge_policy_index.py @@ -336,7 +336,7 @@ class TestContainerMergePolicyIndex(ReplProbeTest): self.assertEqual(metadata['x-static-large-object'].lower(), 'true') for i, entry in enumerate(utils.json.loads(body)): for key in ('hash', 'bytes', 'name'): - self.assertEquals(entry[key], direct_manifest_data[i][key]) + self.assertEqual(entry[key], direct_manifest_data[i][key]) metadata, body = client.get_object( self.url, self.token, self.container_name, direct_manifest_name) self.assertEqual(metadata['x-static-large-object'].lower(), 'true') diff --git a/test/probe/test_empty_device_handoff.py b/test/probe/test_empty_device_handoff.py index f68ee6692b..c3138be05c 100755 --- a/test/probe/test_empty_device_handoff.py +++ b/test/probe/test_empty_device_handoff.py @@ -137,7 +137,7 @@ class TestEmptyDevice(ReplProbeTest): onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx}) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) self.assertFalse(os.path.exists(obj_dir)) else: self.fail("Expected ClientException but didn't get it") @@ -169,7 +169,7 @@ class TestEmptyDevice(ReplProbeTest): another_onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx}) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) else: self.fail("Expected ClientException but didn't get it") diff --git a/test/probe/test_object_failures.py b/test/probe/test_object_failures.py index eed5aca6a3..ba53177743 100755 --- a/test/probe/test_object_failures.py +++ b/test/probe/test_object_failures.py @@ -57,7 +57,7 @@ class TestObjectFailures(ReplProbeTest): self.policy.name}) client.put_object(self.url, self.token, container, obj, data) odata = client.get_object(self.url, self.token, container, obj)[-1] - self.assertEquals(odata, data) + self.assertEqual(odata, data) opart, onodes = self.object_ring.get_nodes( self.account, container, obj) onode = onodes[0] @@ -84,14 +84,14 @@ class TestObjectFailures(ReplProbeTest): odata = direct_client.direct_get_object( onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx})[-1] - self.assertEquals(odata, 'VERIFY') + self.assertEqual(odata, 'VERIFY') try: direct_client.direct_get_object( onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx}) raise Exception("Did not quarantine object") except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) def run_quarantine_range_etag(self): container = 'container-range-%s' % uuid4() @@ -111,7 +111,7 @@ class TestObjectFailures(ReplProbeTest): odata = direct_client.direct_get_object( onode, opart, self.account, container, obj, headers=req_headers)[-1] - self.assertEquals(odata, result) + self.assertEqual(odata, result) try: direct_client.direct_get_object( @@ -119,7 +119,7 @@ class TestObjectFailures(ReplProbeTest): 'X-Backend-Storage-Policy-Index': self.policy.idx}) raise Exception("Did not quarantine object") except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) def run_quarantine_zero_byte_get(self): container = 'container-zbyte-%s' % uuid4() @@ -137,7 +137,7 @@ class TestObjectFailures(ReplProbeTest): self.policy.idx}) raise Exception("Did not quarantine object") except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) def run_quarantine_zero_byte_head(self): container = 'container-zbyte-%s' % uuid4() @@ -155,7 +155,7 @@ class TestObjectFailures(ReplProbeTest): self.policy.idx}) raise Exception("Did not quarantine object") except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) def run_quarantine_zero_byte_post(self): container = 'container-zbyte-%s' % uuid4() @@ -177,7 +177,7 @@ class TestObjectFailures(ReplProbeTest): response_timeout=1) raise Exception("Did not quarantine object") except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) def test_runner(self): self.run_quarantine() diff --git a/test/probe/test_object_handoff.py b/test/probe/test_object_handoff.py index 37fb7626b5..c7df4b9e07 100755 --- a/test/probe/test_object_handoff.py +++ b/test/probe/test_object_handoff.py @@ -102,7 +102,7 @@ class TestObjectHandoff(ReplProbeTest): onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx}) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) else: self.fail("Expected ClientException but didn't get it") @@ -136,7 +136,7 @@ class TestObjectHandoff(ReplProbeTest): another_onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx}) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) else: self.fail("Expected ClientException but didn't get it") @@ -160,7 +160,7 @@ class TestObjectHandoff(ReplProbeTest): try: client.head_object(self.url, self.token, container, obj) except client.ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) else: self.fail("Expected ClientException but didn't get it") @@ -206,7 +206,7 @@ class TestObjectHandoff(ReplProbeTest): another_onode, opart, self.account, container, obj, headers={ 'X-Backend-Storage-Policy-Index': self.policy.idx}) except ClientException as err: - self.assertEquals(err.http_status, 404) + self.assertEqual(err.http_status, 404) else: self.fail("Expected ClientException but didn't get it") From c35cc13b8abeb97400632cbe8ec56fc1040d8210 Mon Sep 17 00:00:00 2001 From: Bill Huber Date: Thu, 6 Aug 2015 10:01:17 -0500 Subject: [PATCH 25/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3 in the following dir: test/unit/proxy/* Change-Id: Ie2c7e73e1096233a10ee7fbf6f88386fa4d469d6 --- test/unit/proxy/controllers/test_account.py | 12 +- test/unit/proxy/controllers/test_base.py | 202 ++-- test/unit/proxy/controllers/test_container.py | 4 +- test/unit/proxy/controllers/test_obj.py | 116 +-- test/unit/proxy/test_server.py | 906 +++++++++--------- 5 files changed, 620 insertions(+), 620 deletions(-) diff --git a/test/unit/proxy/controllers/test_account.py b/test/unit/proxy/controllers/test_account.py index 1513d902dc..a73c3ca713 100644 --- a/test/unit/proxy/controllers/test_account.py +++ b/test/unit/proxy/controllers/test_account.py @@ -57,7 +57,7 @@ class TestAccountController(unittest.TestCase): with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200, headers=owner_headers)): resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) + self.assertEqual(2, resp.status_int // 100) for key in owner_headers: self.assertTrue(key not in resp.headers) @@ -65,7 +65,7 @@ class TestAccountController(unittest.TestCase): with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200, headers=owner_headers)): resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) + self.assertEqual(2, resp.status_int // 100) for key in owner_headers: self.assertTrue(key in resp.headers) @@ -79,7 +79,7 @@ class TestAccountController(unittest.TestCase): with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(404, headers=resp_headers)): resp = controller.HEAD(req) - self.assertEquals(410, resp.status_int) + self.assertEqual(410, resp.status_int) def test_long_acct_names(self): long_acct_name = '%sLongAccountName' % ( @@ -90,17 +90,17 @@ class TestAccountController(unittest.TestCase): with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200)): resp = controller.HEAD(req) - self.assertEquals(400, resp.status_int) + self.assertEqual(400, resp.status_int) with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200)): resp = controller.GET(req) - self.assertEquals(400, resp.status_int) + self.assertEqual(400, resp.status_int) with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200)): resp = controller.POST(req) - self.assertEquals(400, resp.status_int) + self.assertEqual(400, resp.status_int) def _make_callback_func(self, context): def callback(ipaddr, port, device, partition, method, path, diff --git a/test/unit/proxy/controllers/test_base.py b/test/unit/proxy/controllers/test_base.py index 3938f03b20..48300340c8 100644 --- a/test/unit/proxy/controllers/test_base.py +++ b/test/unit/proxy/controllers/test_base.py @@ -199,34 +199,34 @@ class TestFuncs(unittest.TestCase): env = {} info_a = get_info(app, env, 'a') # Check that you got proper info - self.assertEquals(info_a['status'], 200) - self.assertEquals(info_a['bytes'], 6666) - self.assertEquals(info_a['total_object_count'], 1000) + self.assertEqual(info_a['status'], 200) + self.assertEqual(info_a['bytes'], 6666) + self.assertEqual(info_a['total_object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) + self.assertEqual(env.get('swift.account/a'), info_a) # Make sure the app was called self.assertEqual(app.responses.stats['account'], 1) # Do an env cached call to account info_a = get_info(app, env, 'a') # Check that you got proper info - self.assertEquals(info_a['status'], 200) - self.assertEquals(info_a['bytes'], 6666) - self.assertEquals(info_a['total_object_count'], 1000) + self.assertEqual(info_a['status'], 200) + self.assertEqual(info_a['bytes'], 6666) + self.assertEqual(info_a['total_object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) + self.assertEqual(env.get('swift.account/a'), info_a) # Make sure the app was NOT called AGAIN self.assertEqual(app.responses.stats['account'], 1) # This time do env cached call to account and non cached to container info_c = get_info(app, env, 'a', 'c') # Check that you got proper info - self.assertEquals(info_c['status'], 200) - self.assertEquals(info_c['bytes'], 6666) - self.assertEquals(info_c['object_count'], 1000) + self.assertEqual(info_c['status'], 200) + self.assertEqual(info_c['bytes'], 6666) + self.assertEqual(info_c['object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - self.assertEquals(env.get('swift.container/a/c'), info_c) + self.assertEqual(env.get('swift.account/a'), info_a) + self.assertEqual(env.get('swift.container/a/c'), info_c) # Make sure the app was called for container self.assertEqual(app.responses.stats['container'], 1) @@ -236,12 +236,12 @@ class TestFuncs(unittest.TestCase): env = {} # abandon previous call to env info_c = get_info(app, env, 'a', 'c') # Check that you got proper info - self.assertEquals(info_c['status'], 200) - self.assertEquals(info_c['bytes'], 6666) - self.assertEquals(info_c['object_count'], 1000) + self.assertEqual(info_c['status'], 200) + self.assertEqual(info_c['bytes'], 6666) + self.assertEqual(info_c['object_count'], 1000) # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) - self.assertEquals(env.get('swift.container/a/c'), info_c) + self.assertEqual(env.get('swift.account/a'), info_a) + self.assertEqual(env.get('swift.container/a/c'), info_c) # check app calls both account and container self.assertEqual(app.responses.stats['account'], 1) self.assertEqual(app.responses.stats['container'], 1) @@ -251,11 +251,11 @@ class TestFuncs(unittest.TestCase): del(env['swift.account/a']) info_c = get_info(app, env, 'a', 'c') # Check that you got proper info - self.assertEquals(info_a['status'], 200) - self.assertEquals(info_c['bytes'], 6666) - self.assertEquals(info_c['object_count'], 1000) + self.assertEqual(info_a['status'], 200) + self.assertEqual(info_c['bytes'], 6666) + self.assertEqual(info_c['object_count'], 1000) # Make sure the env cache is set and account still not cached - self.assertEquals(env.get('swift.container/a/c'), info_c) + self.assertEqual(env.get('swift.container/a/c'), info_c) # no additional calls were made self.assertEqual(app.responses.stats['account'], 1) self.assertEqual(app.responses.stats['container'], 1) @@ -265,22 +265,22 @@ class TestFuncs(unittest.TestCase): env = {} info_a = get_info(app, env, 'a', ret_not_found=True) # Check that you got proper info - self.assertEquals(info_a['status'], 404) - self.assertEquals(info_a['bytes'], None) - self.assertEquals(info_a['total_object_count'], None) + self.assertEqual(info_a['status'], 404) + self.assertEqual(info_a['bytes'], None) + self.assertEqual(info_a['total_object_count'], None) # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) + self.assertEqual(env.get('swift.account/a'), info_a) # and account was called self.assertEqual(app.responses.stats['account'], 1) # Do a cached call to account not found with ret_not_found info_a = get_info(app, env, 'a', ret_not_found=True) # Check that you got proper info - self.assertEquals(info_a['status'], 404) - self.assertEquals(info_a['bytes'], None) - self.assertEquals(info_a['total_object_count'], None) + self.assertEqual(info_a['status'], 404) + self.assertEqual(info_a['bytes'], None) + self.assertEqual(info_a['total_object_count'], None) # Make sure the env cache is set - self.assertEquals(env.get('swift.account/a'), info_a) + self.assertEqual(env.get('swift.account/a'), info_a) # add account was NOT called AGAIN self.assertEqual(app.responses.stats['account'], 1) @@ -289,16 +289,16 @@ class TestFuncs(unittest.TestCase): env = {} info_a = get_info(app, env, 'a') # Check that you got proper info - self.assertEquals(info_a, None) - self.assertEquals(env['swift.account/a']['status'], 404) + self.assertEqual(info_a, None) + self.assertEqual(env['swift.account/a']['status'], 404) # and account was called self.assertEqual(app.responses.stats['account'], 1) # Do a cached call to account not found without ret_not_found info_a = get_info(None, env, 'a') # Check that you got proper info - self.assertEquals(info_a, None) - self.assertEquals(env['swift.account/a']['status'], 404) + self.assertEqual(info_a, None) + self.assertEqual(env['swift.account/a']['status'], 404) # add account was NOT called AGAIN self.assertEqual(app.responses.stats['account'], 1) @@ -319,9 +319,9 @@ class TestFuncs(unittest.TestCase): req = Request.blank("/v1/AUTH_account/cont", environ={'swift.cache': FakeCache({})}) resp = get_container_info(req.environ, FakeApp()) - self.assertEquals(resp['storage_policy'], '0') - self.assertEquals(resp['bytes'], 6666) - self.assertEquals(resp['object_count'], 1000) + self.assertEqual(resp['storage_policy'], '0') + self.assertEqual(resp['bytes'], 6666) + self.assertEqual(resp['object_count'], 1000) def test_get_container_info_no_account(self): responses = DynamicResponseFactory(404, 200) @@ -336,8 +336,8 @@ class TestFuncs(unittest.TestCase): req = Request.blank("/v1/.system_account/cont") info = get_container_info(req.environ, app) self.assertEqual(info['status'], 200) - self.assertEquals(info['bytes'], 6666) - self.assertEquals(info['object_count'], 1000) + self.assertEqual(info['bytes'], 6666) + self.assertEqual(info['object_count'], 1000) def test_get_container_info_cache(self): cache_stub = { @@ -347,11 +347,11 @@ class TestFuncs(unittest.TestCase): req = Request.blank("/v1/account/cont", environ={'swift.cache': FakeCache(cache_stub)}) resp = get_container_info(req.environ, FakeApp()) - self.assertEquals(resp['storage_policy'], '0') - self.assertEquals(resp['bytes'], 3333) - self.assertEquals(resp['object_count'], 10) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['versions'], "\xe1\xbd\x8a\x39") + self.assertEqual(resp['storage_policy'], '0') + self.assertEqual(resp['bytes'], 3333) + self.assertEqual(resp['object_count'], 10) + self.assertEqual(resp['status'], 404) + self.assertEqual(resp['versions'], "\xe1\xbd\x8a\x39") def test_get_container_info_env(self): cache_key = get_container_memcache_key("account", "cont") @@ -360,7 +360,7 @@ class TestFuncs(unittest.TestCase): environ={env_key: {'bytes': 3867}, 'swift.cache': FakeCache({})}) resp = get_container_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 3867) + self.assertEqual(resp['bytes'], 3867) def test_get_account_info_swift_source(self): app = FakeApp() @@ -373,8 +373,8 @@ class TestFuncs(unittest.TestCase): req = Request.blank("/v1/AUTH_account", environ={'swift.cache': FakeCache({})}) resp = get_account_info(req.environ, app) - self.assertEquals(resp['bytes'], 6666) - self.assertEquals(resp['total_object_count'], 1000) + self.assertEqual(resp['bytes'], 6666) + self.assertEqual(resp['total_object_count'], 1000) def test_get_account_info_cache(self): # The original test that we prefer to preserve @@ -384,9 +384,9 @@ class TestFuncs(unittest.TestCase): req = Request.blank("/v1/account/cont", environ={'swift.cache': FakeCache(cached)}) resp = get_account_info(req.environ, FakeApp()) - self.assertEquals(resp['bytes'], 3333) - self.assertEquals(resp['total_object_count'], 10) - self.assertEquals(resp['status'], 404) + self.assertEqual(resp['bytes'], 3333) + self.assertEqual(resp['total_object_count'], 10) + self.assertEqual(resp['status'], 404) # Here is a more realistic test cached = {'status': 404, @@ -397,11 +397,11 @@ class TestFuncs(unittest.TestCase): req = Request.blank("/v1/account/cont", environ={'swift.cache': FakeCache(cached)}) resp = get_account_info(req.environ, FakeApp()) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['bytes'], '3333') - self.assertEquals(resp['container_count'], 234) - self.assertEquals(resp['meta'], {}) - self.assertEquals(resp['total_object_count'], '10') + self.assertEqual(resp['status'], 404) + self.assertEqual(resp['bytes'], '3333') + self.assertEqual(resp['container_count'], 234) + self.assertEqual(resp['meta'], {}) + self.assertEqual(resp['total_object_count'], '10') def test_get_account_info_env(self): cache_key = get_account_memcache_key("account") @@ -410,7 +410,7 @@ class TestFuncs(unittest.TestCase): environ={env_key: {'bytes': 3867}, 'swift.cache': FakeCache({})}) resp = get_account_info(req.environ, 'xxx') - self.assertEquals(resp['bytes'], 3867) + self.assertEqual(resp['bytes'], 3867) def test_get_object_info_env(self): cached = {'status': 200, @@ -422,8 +422,8 @@ class TestFuncs(unittest.TestCase): environ={env_key: cached, 'swift.cache': FakeCache({})}) resp = get_object_info(req.environ, 'xxx') - self.assertEquals(resp['length'], 3333) - self.assertEquals(resp['type'], 'application/json') + self.assertEqual(resp['length'], 3333) + self.assertEqual(resp['type'], 'application/json') def test_get_object_info_no_env(self): app = FakeApp() @@ -433,8 +433,8 @@ class TestFuncs(unittest.TestCase): self.assertEqual(app.responses.stats['account'], 0) self.assertEqual(app.responses.stats['container'], 0) self.assertEqual(app.responses.stats['obj'], 1) - self.assertEquals(resp['length'], 5555) - self.assertEquals(resp['type'], 'text/plain') + self.assertEqual(resp['length'], 5555) + self.assertEqual(resp['type'], 'text/plain') def test_options(self): base = Controller(self.app) @@ -469,26 +469,26 @@ class TestFuncs(unittest.TestCase): def test_headers_to_container_info_missing(self): resp = headers_to_container_info({}, 404) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['read_acl'], None) - self.assertEquals(resp['write_acl'], None) + self.assertEqual(resp['status'], 404) + self.assertEqual(resp['read_acl'], None) + self.assertEqual(resp['write_acl'], None) def test_headers_to_container_info_meta(self): headers = {'X-Container-Meta-Whatevs': 14, 'x-container-meta-somethingelse': 0} resp = headers_to_container_info(headers.items(), 200) - self.assertEquals(len(resp['meta']), 2) - self.assertEquals(resp['meta']['whatevs'], 14) - self.assertEquals(resp['meta']['somethingelse'], 0) + self.assertEqual(len(resp['meta']), 2) + self.assertEqual(resp['meta']['whatevs'], 14) + self.assertEqual(resp['meta']['somethingelse'], 0) def test_headers_to_container_info_sys_meta(self): prefix = get_sys_meta_prefix('container') headers = {'%sWhatevs' % prefix: 14, '%ssomethingelse' % prefix: 0} resp = headers_to_container_info(headers.items(), 200) - self.assertEquals(len(resp['sysmeta']), 2) - self.assertEquals(resp['sysmeta']['whatevs'], 14) - self.assertEquals(resp['sysmeta']['somethingelse'], 0) + self.assertEqual(len(resp['sysmeta']), 2) + self.assertEqual(resp['sysmeta']['whatevs'], 14) + self.assertEqual(resp['sysmeta']['somethingelse'], 0) def test_headers_to_container_info_values(self): headers = { @@ -498,37 +498,37 @@ class TestFuncs(unittest.TestCase): 'x-container-meta-access-control-allow-origin': 'here', } resp = headers_to_container_info(headers.items(), 200) - self.assertEquals(resp['read_acl'], 'readvalue') - self.assertEquals(resp['write_acl'], 'writevalue') - self.assertEquals(resp['cors']['allow_origin'], 'here') + self.assertEqual(resp['read_acl'], 'readvalue') + self.assertEqual(resp['write_acl'], 'writevalue') + self.assertEqual(resp['cors']['allow_origin'], 'here') headers['x-unused-header'] = 'blahblahblah' - self.assertEquals( + self.assertEqual( resp, headers_to_container_info(headers.items(), 200)) def test_headers_to_account_info_missing(self): resp = headers_to_account_info({}, 404) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['bytes'], None) - self.assertEquals(resp['container_count'], None) + self.assertEqual(resp['status'], 404) + self.assertEqual(resp['bytes'], None) + self.assertEqual(resp['container_count'], None) def test_headers_to_account_info_meta(self): headers = {'X-Account-Meta-Whatevs': 14, 'x-account-meta-somethingelse': 0} resp = headers_to_account_info(headers.items(), 200) - self.assertEquals(len(resp['meta']), 2) - self.assertEquals(resp['meta']['whatevs'], 14) - self.assertEquals(resp['meta']['somethingelse'], 0) + self.assertEqual(len(resp['meta']), 2) + self.assertEqual(resp['meta']['whatevs'], 14) + self.assertEqual(resp['meta']['somethingelse'], 0) def test_headers_to_account_info_sys_meta(self): prefix = get_sys_meta_prefix('account') headers = {'%sWhatevs' % prefix: 14, '%ssomethingelse' % prefix: 0} resp = headers_to_account_info(headers.items(), 200) - self.assertEquals(len(resp['sysmeta']), 2) - self.assertEquals(resp['sysmeta']['whatevs'], 14) - self.assertEquals(resp['sysmeta']['somethingelse'], 0) + self.assertEqual(len(resp['sysmeta']), 2) + self.assertEqual(resp['sysmeta']['whatevs'], 14) + self.assertEqual(resp['sysmeta']['somethingelse'], 0) def test_headers_to_account_info_values(self): headers = { @@ -536,36 +536,36 @@ class TestFuncs(unittest.TestCase): 'x-account-container-count': '20', } resp = headers_to_account_info(headers.items(), 200) - self.assertEquals(resp['total_object_count'], '10') - self.assertEquals(resp['container_count'], '20') + self.assertEqual(resp['total_object_count'], '10') + self.assertEqual(resp['container_count'], '20') headers['x-unused-header'] = 'blahblahblah' - self.assertEquals( + self.assertEqual( resp, headers_to_account_info(headers.items(), 200)) def test_headers_to_object_info_missing(self): resp = headers_to_object_info({}, 404) - self.assertEquals(resp['status'], 404) - self.assertEquals(resp['length'], None) - self.assertEquals(resp['etag'], None) + self.assertEqual(resp['status'], 404) + self.assertEqual(resp['length'], None) + self.assertEqual(resp['etag'], None) def test_headers_to_object_info_meta(self): headers = {'X-Object-Meta-Whatevs': 14, 'x-object-meta-somethingelse': 0} resp = headers_to_object_info(headers.items(), 200) - self.assertEquals(len(resp['meta']), 2) - self.assertEquals(resp['meta']['whatevs'], 14) - self.assertEquals(resp['meta']['somethingelse'], 0) + self.assertEqual(len(resp['meta']), 2) + self.assertEqual(resp['meta']['whatevs'], 14) + self.assertEqual(resp['meta']['somethingelse'], 0) def test_headers_to_object_info_sys_meta(self): prefix = get_sys_meta_prefix('object') headers = {'%sWhatevs' % prefix: 14, '%ssomethingelse' % prefix: 0} resp = headers_to_object_info(headers.items(), 200) - self.assertEquals(len(resp['sysmeta']), 2) - self.assertEquals(resp['sysmeta']['whatevs'], 14) - self.assertEquals(resp['sysmeta']['somethingelse'], 0) + self.assertEqual(len(resp['sysmeta']), 2) + self.assertEqual(resp['sysmeta']['whatevs'], 14) + self.assertEqual(resp['sysmeta']['somethingelse'], 0) def test_headers_to_object_info_values(self): headers = { @@ -573,11 +573,11 @@ class TestFuncs(unittest.TestCase): 'content-type': 'application/json', } resp = headers_to_object_info(headers.items(), 200) - self.assertEquals(resp['length'], '1024') - self.assertEquals(resp['type'], 'application/json') + self.assertEqual(resp['length'], '1024') + self.assertEqual(resp['type'], 'application/json') headers['x-unused-header'] = 'blahblahblah' - self.assertEquals( + self.assertEqual( resp, headers_to_object_info(headers.items(), 200)) @@ -624,24 +624,24 @@ class TestFuncs(unittest.TestCase): req = Request.blank('/') handler = GetOrHeadHandler(None, req, None, None, None, None, {}) handler.fast_forward(50) - self.assertEquals(handler.backend_headers['Range'], 'bytes=50-') + self.assertEqual(handler.backend_headers['Range'], 'bytes=50-') handler = GetOrHeadHandler(None, req, None, None, None, None, {'Range': 'bytes=23-50'}) handler.fast_forward(20) - self.assertEquals(handler.backend_headers['Range'], 'bytes=43-50') + self.assertEqual(handler.backend_headers['Range'], 'bytes=43-50') self.assertRaises(HTTPException, handler.fast_forward, 80) handler = GetOrHeadHandler(None, req, None, None, None, None, {'Range': 'bytes=23-'}) handler.fast_forward(20) - self.assertEquals(handler.backend_headers['Range'], 'bytes=43-') + self.assertEqual(handler.backend_headers['Range'], 'bytes=43-') handler = GetOrHeadHandler(None, req, None, None, None, None, {'Range': 'bytes=-100'}) handler.fast_forward(20) - self.assertEquals(handler.backend_headers['Range'], 'bytes=-80') + self.assertEqual(handler.backend_headers['Range'], 'bytes=-80') def test_transfer_headers_with_sysmeta(self): base = Controller(self.app) diff --git a/test/unit/proxy/controllers/test_container.py b/test/unit/proxy/controllers/test_container.py index d2b7ce450e..4d43ceed75 100644 --- a/test/unit/proxy/controllers/test_container.py +++ b/test/unit/proxy/controllers/test_container.py @@ -89,7 +89,7 @@ class TestContainerController(TestRingBase): with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200, 200, headers=owner_headers)): resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) + self.assertEqual(2, resp.status_int // 100) for key in owner_headers: self.assertTrue(key not in resp.headers) @@ -97,7 +97,7 @@ class TestContainerController(TestRingBase): with mock.patch('swift.proxy.controllers.base.http_connect', fake_http_connect(200, 200, headers=owner_headers)): resp = controller.HEAD(req) - self.assertEquals(2, resp.status_int // 100) + self.assertEqual(2, resp.status_int // 100) for key in owner_headers: self.assertTrue(key in resp.headers) diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index 22685ad178..3cc9ce65dc 100755 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -236,7 +236,7 @@ class BaseObjectControllerMixin(object): codes = [204] * self.replicas() with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) def test_DELETE_missing_one(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') @@ -244,14 +244,14 @@ class BaseObjectControllerMixin(object): random.shuffle(codes) with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) def test_DELETE_not_found(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') codes = [404] * (self.replicas() - 1) + [204] with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_DELETE_mostly_found(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') @@ -260,7 +260,7 @@ class BaseObjectControllerMixin(object): self.assertEqual(len(codes), self.replicas()) with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) def test_DELETE_mostly_not_found(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') @@ -269,7 +269,7 @@ class BaseObjectControllerMixin(object): self.assertEqual(len(codes), self.replicas()) with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_DELETE_half_not_found_statuses(self): self.obj_ring.set_replicas(4) @@ -277,7 +277,7 @@ class BaseObjectControllerMixin(object): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') with set_http_connect(404, 204, 404, 204): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) def test_DELETE_half_not_found_headers_and_body(self): # Transformed responses have bogus bodies and headers, so make sure we @@ -292,16 +292,16 @@ class BaseObjectControllerMixin(object): with set_http_connect(*status_codes, body_iter=bodies, headers=headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 204) - self.assertEquals(resp.headers.get('Pick-Me'), 'yes') - self.assertEquals(resp.body, '') + self.assertEqual(resp.status_int, 204) + self.assertEqual(resp.headers.get('Pick-Me'), 'yes') + self.assertEqual(resp.body, '') def test_DELETE_handoff(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='DELETE') codes = [204] * self.replicas() with set_http_connect(507, *codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) def test_POST_non_int_delete_after(self): t = str(int(time.time() + 100)) + '.1' @@ -381,14 +381,14 @@ class BaseObjectControllerMixin(object): req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD') with set_http_connect(200): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_HEAD_x_newest(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD', headers={'X-Newest': 'true'}) with set_http_connect(200, 200, 200): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_HEAD_x_newest_different_timestamps(self): req = swob.Request.blank('/v1/a/c/o', method='HEAD', @@ -475,7 +475,7 @@ class BaseObjectControllerMixin(object): def test_PUT_requires_length(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 411) + self.assertEqual(resp.status_int, 411) # end of BaseObjectControllerMixin @@ -491,7 +491,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, req.headers['content-length'] = '0' with set_http_connect(201, 201, 201): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_if_none_match(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') @@ -499,7 +499,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, req.headers['content-length'] = '0' with set_http_connect(201, 201, 201): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_if_none_match_denied(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') @@ -507,7 +507,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, req.headers['content-length'] = '0' with set_http_connect(201, 412, 201): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_PUT_if_none_match_not_star(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') @@ -515,7 +515,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, req.headers['content-length'] = '0' with set_http_connect(): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_PUT_connect_exceptions(self): object_ring = self.app.get_object_ring(None) @@ -574,20 +574,20 @@ class TestReplicatedObjController(BaseObjectControllerMixin, req = swift.common.swob.Request.blank('/v1/a/c/o') with set_http_connect(200): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_GET_error(self): req = swift.common.swob.Request.blank('/v1/a/c/o') with set_http_connect(503, 200): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_GET_handoff(self): req = swift.common.swob.Request.blank('/v1/a/c/o') codes = [503] * self.obj_ring.replicas + [200] with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_GET_not_found(self): req = swift.common.swob.Request.blank('/v1/a/c/o') @@ -595,7 +595,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, self.obj_ring.max_more_nodes) with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_POST_as_COPY_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='POST') @@ -605,8 +605,8 @@ class TestReplicatedObjController(BaseObjectControllerMixin, codes = get_resp + put_resp with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 202) - self.assertEquals(req.environ['QUERY_STRING'], '') + self.assertEqual(resp.status_int, 202) + self.assertEqual(req.environ['QUERY_STRING'], '') self.assertTrue('swift.post_as_copy' in req.environ) def test_POST_as_COPY_static_large_object(self): @@ -621,8 +621,8 @@ class TestReplicatedObjController(BaseObjectControllerMixin, headers = {'headers': get_headers} with set_http_connect(*codes, **headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 202) - self.assertEquals(req.environ['QUERY_STRING'], '') + self.assertEqual(resp.status_int, 202) + self.assertEqual(req.environ['QUERY_STRING'], '') self.assertTrue('swift.post_as_copy' in req.environ) def test_POST_delete_at(self): @@ -642,12 +642,12 @@ class TestReplicatedObjController(BaseObjectControllerMixin, codes = x_newest_responses + post_resp with set_http_connect(*codes, give_connect=capture_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(req.environ['QUERY_STRING'], '') # sanity + self.assertEqual(resp.status_int, 200) + self.assertEqual(req.environ['QUERY_STRING'], '') # sanity self.assertTrue('swift.post_as_copy' in req.environ) for given_headers in post_headers: - self.assertEquals(given_headers.get('X-Delete-At'), t) + self.assertEqual(given_headers.get('X-Delete-At'), t) self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) @@ -667,9 +667,9 @@ class TestReplicatedObjController(BaseObjectControllerMixin, codes = [201] * self.obj_ring.replicas with set_http_connect(*codes, give_connect=capture_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) for given_headers in put_headers: - self.assertEquals(given_headers.get('X-Delete-At'), t) + self.assertEqual(given_headers.get('X-Delete-At'), t) self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) @@ -690,11 +690,11 @@ class TestReplicatedObjController(BaseObjectControllerMixin, with set_http_connect(*codes, give_connect=capture_headers): with mock.patch('time.time', lambda: t): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) expected_delete_at = str(int(t) + 60) for given_headers in put_headers: - self.assertEquals(given_headers.get('X-Delete-At'), - expected_delete_at) + self.assertEqual(given_headers.get('X-Delete-At'), + expected_delete_at) self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) @@ -861,7 +861,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, codes = head_resp + put_resp with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_log_info(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') @@ -876,7 +876,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, with set_http_connect(*codes, headers=resp_headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 201) - self.assertEquals( + self.assertEqual( req.environ.get('swift.log_info'), ['x-copy-from:some/where']) # and then check that we don't do that for originating POSTs req = swift.common.swob.Request.blank('/v1/a/c/o') @@ -885,7 +885,7 @@ class TestReplicatedObjController(BaseObjectControllerMixin, with set_http_connect(*codes, headers=resp_headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 202) - self.assertEquals(req.environ.get('swift.log_info'), None) + self.assertEqual(req.environ.get('swift.log_info'), None) @patch_policies(legacy_only=True) @@ -941,38 +941,38 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): expected = {} for i, p in enumerate(putters): expected[p] = i - self.assertEquals(got, expected) + self.assertEqual(got, expected) # now lets make a handoff at the end putters[3].node_index = None got = controller._determine_chunk_destinations(putters) - self.assertEquals(got, expected) + self.assertEqual(got, expected) putters[3].node_index = 3 # now lets make a handoff at the start putters[0].node_index = None got = controller._determine_chunk_destinations(putters) - self.assertEquals(got, expected) + self.assertEqual(got, expected) putters[0].node_index = 0 # now lets make a handoff in the middle putters[2].node_index = None got = controller._determine_chunk_destinations(putters) - self.assertEquals(got, expected) + self.assertEqual(got, expected) putters[2].node_index = 0 # now lets make all of them handoffs for index in range(0, 4): putters[index].node_index = None got = controller._determine_chunk_destinations(putters) - self.assertEquals(got, expected) + self.assertEqual(got, expected) def test_GET_simple(self): req = swift.common.swob.Request.blank('/v1/a/c/o') get_resp = [200] * self.policy.ec_ndata with set_http_connect(*get_resp): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_GET_simple_x_newest(self): req = swift.common.swob.Request.blank('/v1/a/c/o', @@ -980,14 +980,14 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): codes = [200] * self.policy.ec_ndata with set_http_connect(*codes): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_GET_error(self): req = swift.common.swob.Request.blank('/v1/a/c/o') get_resp = [503] + [200] * self.policy.ec_ndata with set_http_connect(*get_resp): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_GET_with_body(self): req = swift.common.swob.Request.blank('/v1/a/c/o') @@ -1021,7 +1021,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): with set_http_connect(*status_codes, body_iter=body_iter, headers=headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) self.assertEqual(len(real_body), len(resp.body)) self.assertEqual(real_body, resp.body) @@ -1035,7 +1035,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_with_explicit_commit_status(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1047,7 +1047,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_error(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1059,7 +1059,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_PUT_mostly_success(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1073,7 +1073,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_error_commit(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1085,7 +1085,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_PUT_mostly_success_commit(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1100,7 +1100,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_mostly_error_commit(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1114,7 +1114,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_PUT_commit_timeout(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1127,7 +1127,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_commit_exception(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', @@ -1140,7 +1140,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): } with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_with_body(self): req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT') @@ -1171,7 +1171,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): give_connect=capture_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) frag_archives = [] for connection_id, info in put_requests.items(): body = unchunk_body(''.join(info['chunks'])) @@ -1257,7 +1257,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): codes, expect_headers = zip(*responses) with set_http_connect(*codes, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_COPY_cross_policy_type_from_replicated(self): self.app.per_container_info = { @@ -1493,7 +1493,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): start = time.time() resp = req.get_response(self.app) response_time = time.time() - start - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) self.assertTrue(response_time < response_sleep) def test_COPY_with_ranges(self): @@ -1528,7 +1528,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): with set_http_connect(*status_codes, body_iter=body_iter, headers=headers, expect_headers=expect_headers): resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) if __name__ == '__main__': diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 17c88ac8f6..2a7cb04328 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -449,7 +449,7 @@ class TestController(unittest.TestCase): self.controller.transfer_headers(src_headers, dst_headers) expected_headers = {'x-base-meta-owner': '', 'x-base-meta-size': '151M'} - self.assertEquals(dst_headers, expected_headers) + self.assertEqual(dst_headers, expected_headers) def check_account_info_return(self, partition, nodes, is_none=False): if is_none: @@ -464,26 +464,26 @@ class TestController(unittest.TestCase): set_http_connect(200, count=123) partition, nodes, count = \ self.controller.account_info(self.account) - self.assertEquals(count, 123) + self.assertEqual(count, 123) with save_globals(): set_http_connect(200, count='123') partition, nodes, count = \ self.controller.account_info(self.account) - self.assertEquals(count, 123) + self.assertEqual(count, 123) with save_globals(): cache_key = get_account_memcache_key(self.account) account_info = {'status': 200, 'container_count': 1234} self.memcache.set(cache_key, account_info) partition, nodes, count = \ self.controller.account_info(self.account) - self.assertEquals(count, 1234) + self.assertEqual(count, 1234) with save_globals(): cache_key = get_account_memcache_key(self.account) account_info = {'status': 200, 'container_count': '1234'} self.memcache.set(cache_key, account_info) partition, nodes, count = \ self.controller.account_info(self.account) - self.assertEquals(count, 1234) + self.assertEqual(count, 1234) def test_make_requests(self): with save_globals(): @@ -502,7 +502,7 @@ class TestController(unittest.TestCase): partition, nodes, count = \ self.controller.account_info(self.account, self.request) self.check_account_info_return(partition, nodes) - self.assertEquals(count, 12345) + self.assertEqual(count, 12345) # Test the internal representation in memcache # 'container_count' changed from int to str @@ -513,14 +513,14 @@ class TestController(unittest.TestCase): 'bytes': None, 'meta': {}, 'sysmeta': {}} - self.assertEquals(container_info, - self.memcache.get(cache_key)) + self.assertEqual(container_info, + self.memcache.get(cache_key)) set_http_connect() partition, nodes, count = \ self.controller.account_info(self.account, self.request) self.check_account_info_return(partition, nodes) - self.assertEquals(count, 12345) + self.assertEqual(count, 12345) # tests if 404 is cached and used def test_account_info_404(self): @@ -529,7 +529,7 @@ class TestController(unittest.TestCase): partition, nodes, count = \ self.controller.account_info(self.account, self.request) self.check_account_info_return(partition, nodes, True) - self.assertEquals(count, None) + self.assertEqual(count, None) # Test the internal representation in memcache # 'container_count' changed from 0 to None @@ -540,14 +540,14 @@ class TestController(unittest.TestCase): 'bytes': None, 'meta': {}, 'sysmeta': {}} - self.assertEquals(account_info, - self.memcache.get(cache_key)) + self.assertEqual(account_info, + self.memcache.get(cache_key)) set_http_connect() partition, nodes, count = \ self.controller.account_info(self.account, self.request) self.check_account_info_return(partition, nodes, True) - self.assertEquals(count, None) + self.assertEqual(count, None) # tests if some http status codes are not cached def test_account_info_no_cache(self): @@ -557,7 +557,7 @@ class TestController(unittest.TestCase): self.controller.account_info(self.account, self.request) self.assertEqual(len(self.memcache.keys()), 0) self.check_account_info_return(partition, nodes, True) - self.assertEquals(count, None) + self.assertEqual(count, None) with save_globals(): # We cache if we have two 404 responses - fail if only one @@ -573,7 +573,7 @@ class TestController(unittest.TestCase): partition, nodes, count = \ self.controller.account_info(self.account, self.request) self.check_account_info_return(partition, nodes, is_none=True) - self.assertEquals(count, None) + self.assertEqual(count, None) def check_container_info_return(self, ret, is_none=False): if is_none: @@ -614,7 +614,7 @@ class TestController(unittest.TestCase): self.container) cache_value = self.memcache.get(cache_key) self.assertTrue(isinstance(cache_value, dict)) - self.assertEquals(200, cache_value.get('status')) + self.assertEqual(200, cache_value.get('status')) set_http_connect() ret = self.controller.container_info( @@ -637,7 +637,7 @@ class TestController(unittest.TestCase): self.container) cache_value = self.memcache.get(cache_key) self.assertTrue(isinstance(cache_value, dict)) - self.assertEquals(404, cache_value.get('status')) + self.assertEqual(404, cache_value.get('status')) set_http_connect() ret = self.controller.container_info( @@ -653,7 +653,7 @@ class TestController(unittest.TestCase): self.container) cache_value = self.memcache.get(cache_key) self.assertTrue(isinstance(cache_value, dict)) - self.assertEquals(404, cache_value.get('status')) + self.assertEqual(404, cache_value.get('status')) set_http_connect() ret = self.controller.container_info( @@ -717,7 +717,7 @@ class TestProxyServer(unittest.TestCase): req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) app.update_request(req) resp = app.handle_request(req) - self.assertEquals(resp.status_int, 500) + self.assertEqual(resp.status_int, 500) def test_internal_method_request(self): baseapp = proxy_server.Application({}, @@ -726,7 +726,7 @@ class TestProxyServer(unittest.TestCase): account_ring=FakeRing()) resp = baseapp.handle_request( Request.blank('/v1/a', environ={'REQUEST_METHOD': '__init__'})) - self.assertEquals(resp.status, '405 Method Not Allowed') + self.assertEqual(resp.status, '405 Method Not Allowed') def test_inexistent_method_request(self): baseapp = proxy_server.Application({}, @@ -735,7 +735,7 @@ class TestProxyServer(unittest.TestCase): account_ring=FakeRing()) resp = baseapp.handle_request( Request.blank('/v1/a', environ={'REQUEST_METHOD': '!invalid'})) - self.assertEquals(resp.status, '405 Method Not Allowed') + self.assertEqual(resp.status, '405 Method Not Allowed') def test_calls_authorize_allow(self): called = [False] @@ -776,12 +776,12 @@ class TestProxyServer(unittest.TestCase): FakeRing(), FakeRing()) resp = baseapp.handle_request( Request.blank('/', environ={'CONTENT_LENGTH': '-1'})) - self.assertEquals(resp.status, '400 Bad Request') - self.assertEquals(resp.body, 'Invalid Content-Length') + self.assertEqual(resp.status, '400 Bad Request') + self.assertEqual(resp.body, 'Invalid Content-Length') resp = baseapp.handle_request( Request.blank('/', environ={'CONTENT_LENGTH': '-123'})) - self.assertEquals(resp.status, '400 Bad Request') - self.assertEquals(resp.body, 'Invalid Content-Length') + self.assertEqual(resp.status, '400 Bad Request') + self.assertEqual(resp.body, 'Invalid Content-Length') finally: rmtree(swift_dir, ignore_errors=True) @@ -834,7 +834,7 @@ class TestProxyServer(unittest.TestCase): resp = baseapp.handle_request( Request.blank('/v1/a/c/o', environ={'HTTP_HOST': 'invalid_host.com'})) - self.assertEquals(resp.status, '403 Forbidden') + self.assertEqual(resp.status, '403 Forbidden') finally: rmtree(swift_dir, ignore_errors=True) @@ -843,27 +843,27 @@ class TestProxyServer(unittest.TestCase): FakeMemcache(), container_ring=FakeRing(), account_ring=FakeRing()) - self.assertEquals(baseapp.node_timings, {}) + self.assertEqual(baseapp.node_timings, {}) req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) baseapp.update_request(req) resp = baseapp.handle_request(req) - self.assertEquals(resp.status_int, 503) # couldn't connect to anything + self.assertEqual(resp.status_int, 503) # couldn't connect to anything exp_timings = {} - self.assertEquals(baseapp.node_timings, exp_timings) + self.assertEqual(baseapp.node_timings, exp_timings) times = [time.time()] exp_timings = {'127.0.0.1': (0.1, times[0] + baseapp.timing_expiry)} with mock.patch('swift.proxy.server.time', lambda: times.pop(0)): baseapp.set_node_timing({'ip': '127.0.0.1'}, 0.1) - self.assertEquals(baseapp.node_timings, exp_timings) + self.assertEqual(baseapp.node_timings, exp_timings) nodes = [{'ip': '127.0.0.1'}, {'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}] with mock.patch('swift.proxy.server.shuffle', lambda l: l): res = baseapp.sort_nodes(nodes) exp_sorting = [{'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}, {'ip': '127.0.0.1'}] - self.assertEquals(res, exp_sorting) + self.assertEqual(res, exp_sorting) def test_node_affinity(self): baseapp = proxy_server.Application({'sorting_method': 'affinity', @@ -878,7 +878,7 @@ class TestProxyServer(unittest.TestCase): app_sorted = baseapp.sort_nodes(nodes) exp_sorted = [{'region': 1, 'zone': 2, 'ip': '127.0.0.2'}, {'region': 2, 'zone': 1, 'ip': '127.0.0.1'}] - self.assertEquals(exp_sorted, app_sorted) + self.assertEqual(exp_sorted, app_sorted) def test_info_defaults(self): app = proxy_server.Application({}, FakeMemcache(), @@ -1172,7 +1172,7 @@ class TestObjectController(unittest.TestCase): res = method(req) except HTTPException as res: pass - self.assertEquals(res.status_int, expected) + self.assertEqual(res.status_int, expected) # repeat test set_http_connect(*statuses, **kwargs) @@ -1185,7 +1185,7 @@ class TestObjectController(unittest.TestCase): res = method(req) except HTTPException as res: pass - self.assertEquals(res.status_int, expected) + self.assertEqual(res.status_int, expected) @unpatch_policies def test_policy_IO(self): @@ -2742,7 +2742,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}) self.app.update_request(req) controller.DELETE(req) - self.assertEquals(test_errors, []) + self.assertEqual(test_errors, []) @patch_policies([ StoragePolicy(0, 'zero', False, object_ring=FakeRing()), @@ -2803,8 +2803,8 @@ class TestObjectController(unittest.TestCase): ('GET', '/a/foo/2'), ('PUT', '/a/c/o'), ('DELETE', '/a/foo/2')] - self.assertEquals(set(exp_methods), (methods)) - self.assertEquals(authorize_call_count[0], 2) + self.assertEqual(set(exp_methods), (methods)) + self.assertEqual(authorize_call_count[0], 2) @patch_policies([ StoragePolicy(0, 'zero', False, object_ring=FakeRing()), @@ -2864,7 +2864,7 @@ class TestObjectController(unittest.TestCase): resp = controller.DELETE(req) self.assertEqual(403, resp.status_int) self.assertFalse(methods, methods) - self.assertEquals(authorize_call_count[0], 1) + self.assertEqual(authorize_call_count[0], 1) def test_PUT_auto_content_type(self): with save_globals(): @@ -2878,8 +2878,8 @@ class TestObjectController(unittest.TestCase): # servers) set_http_connect(201, 201, 201, 201, 201, give_content_type=lambda content_type: - self.assertEquals(content_type, - next(expected))) + self.assertEqual(content_type, + next(expected))) # We need into include a transfer-encoding to get past # constraints.check_object_creation() req = Request.blank('/v1/a/c/%s' % filename, {}, @@ -2889,7 +2889,7 @@ class TestObjectController(unittest.TestCase): res = controller.PUT(req) # If we don't check the response here we could miss problems # in PUT() - self.assertEquals(res.status_int, 201) + self.assertEqual(res.status_int, 201) test_content_type('test.jpg', iter(['', '', 'image/jpeg', 'image/jpeg', 'image/jpeg'])) @@ -2906,10 +2906,10 @@ class TestObjectController(unittest.TestCase): proxy_server.Application({'swift_dir': swift_dir}, FakeMemcache(), FakeLogger(), FakeRing(), FakeRing()) - self.assertEquals(proxy_server.mimetypes.guess_type('blah.foo')[0], - 'foo/bar') - self.assertEquals(proxy_server.mimetypes.guess_type('blah.jpg')[0], - 'image/jpeg') + self.assertEqual(proxy_server.mimetypes.guess_type('blah.foo')[0], + 'foo/bar') + self.assertEqual(proxy_server.mimetypes.guess_type('blah.jpg')[0], + 'image/jpeg') finally: rmtree(swift_dir, ignore_errors=True) @@ -2926,7 +2926,7 @@ class TestObjectController(unittest.TestCase): self.app.memcache.store = {} res = controller.PUT(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 200, 201, 201, 201), 201) test_status_map((200, 200, 201, 201, 500), 201) test_status_map((200, 200, 204, 404, 404), 404) @@ -2949,7 +2949,7 @@ class TestObjectController(unittest.TestCase): except HTTPException as res: pass expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 200, 201, 201, -1), 201) # connect exc # connect errors test_status_map((200, 200, Timeout(), 201, 201, ), 201) @@ -2981,7 +2981,7 @@ class TestObjectController(unittest.TestCase): except HTTPException as res: pass expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 200, 201, -1, 201), 201) test_status_map((200, 200, 201, -1, -1), 503) test_status_map((200, 200, 503, 503, -1), 503) @@ -2996,7 +2996,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'foo/bar'}) self.app.update_request(req) res = controller.PUT(req) - self.assertEquals(res.status_int, 413) + self.assertEqual(res.status_int, 413) def test_PUT_bad_content_type(self): with save_globals(): @@ -3007,7 +3007,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': 0, 'Content-Type': 'foo/bar;swift_hey=45'}) self.app.update_request(req) res = controller.PUT(req) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_PUT_getresponse_exceptions(self): @@ -3026,8 +3026,8 @@ class TestObjectController(unittest.TestCase): except HTTPException as res: pass expected = str(expected) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) test_status_map((200, 200, 201, 201, -1), 201) test_status_map((200, 200, 201, -1, -1), 503) test_status_map((200, 200, 503, 503, -1), 503) @@ -3044,7 +3044,7 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) res = req.get_response(self.app) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 200, 202, 202, 202), 202) test_status_map((200, 200, 202, 202, 500), 202) test_status_map((200, 200, 202, 500, 500), 503) @@ -3194,7 +3194,7 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) res = req.get_response(self.app) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 200, 200, 200, 200, 202, 202, 202), 202) test_status_map((200, 200, 200, 200, 200, 202, 202, 500), 202) test_status_map((200, 200, 200, 200, 200, 202, 500, 500), 503) @@ -3211,8 +3211,8 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'DELETE'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) test_status_map((200, 200, 204, 204, 204), 204) test_status_map((200, 200, 204, 204, 500), 204) test_status_map((200, 200, 204, 404, 404), 404) @@ -3228,13 +3228,13 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) if expected < 400: self.assertTrue('x-works' in res.headers) - self.assertEquals(res.headers['x-works'], 'yes') + self.assertEqual(res.headers['x-works'], 'yes') self.assertTrue('accept-ranges' in res.headers) - self.assertEquals(res.headers['accept-ranges'], 'bytes') + self.assertEqual(res.headers['accept-ranges'], 'bytes') test_status_map((200, 200, 200, 404, 404), 200) test_status_map((200, 200, 200, 500, 404), 200) @@ -3253,10 +3253,10 @@ class TestObjectController(unittest.TestCase): headers={'x-newest': 'true'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - self.assertEquals(res.headers.get('last-modified'), - expected_timestamp) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) + self.assertEqual(res.headers.get('last-modified'), + expected_timestamp) # acct cont obj obj obj test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', @@ -3284,10 +3284,10 @@ class TestObjectController(unittest.TestCase): headers={'x-newest': 'true'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - self.assertEquals(res.headers.get('last-modified'), - expected_timestamp) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) + self.assertEqual(res.headers.get('last-modified'), + expected_timestamp) test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', '2', '3'), '3') @@ -3310,10 +3310,10 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) - self.assertEquals(res.headers.get('last-modified'), - expected_timestamp) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) + self.assertEqual(res.headers.get('last-modified'), + expected_timestamp) test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1', '2', '3'), '1') @@ -3339,7 +3339,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Foo': 'x' * limit}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) + self.assertEqual(res.status_int, 202) set_http_connect(202, 202, 202) req = Request.blank( '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, @@ -3347,7 +3347,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Foo': 'x' * (limit + 1)}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_POST_as_copy_meta_val_len(self): with save_globals(): @@ -3359,7 +3359,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Foo': 'x' * limit}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) + self.assertEqual(res.status_int, 202) set_http_connect(202, 202, 202) req = Request.blank( '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, @@ -3367,7 +3367,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Foo': 'x' * (limit + 1)}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_POST_meta_key_len(self): with save_globals(): @@ -3381,7 +3381,7 @@ class TestObjectController(unittest.TestCase): ('X-Object-Meta-' + 'x' * limit): 'x'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) + self.assertEqual(res.status_int, 202) set_http_connect(202, 202, 202) req = Request.blank( '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, @@ -3389,7 +3389,7 @@ class TestObjectController(unittest.TestCase): ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_POST_as_copy_meta_key_len(self): with save_globals(): @@ -3402,7 +3402,7 @@ class TestObjectController(unittest.TestCase): ('X-Object-Meta-' + 'x' * limit): 'x'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 202) + self.assertEqual(res.status_int, 202) set_http_connect(202, 202, 202) req = Request.blank( '/v1/a/c/o', {'REQUEST_METHOD': 'POST'}, @@ -3410,7 +3410,7 @@ class TestObjectController(unittest.TestCase): ('X-Object-Meta-' + 'x' * (limit + 1)): 'x'}) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_POST_meta_count(self): with save_globals(): @@ -3423,7 +3423,7 @@ class TestObjectController(unittest.TestCase): headers=headers) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_POST_meta_size(self): with save_globals(): @@ -3438,7 +3438,7 @@ class TestObjectController(unittest.TestCase): headers=headers) self.app.update_request(req) res = req.get_response(self.app) - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) def test_PUT_not_autodetect_content_type(self): with save_globals(): @@ -3520,7 +3520,7 @@ class TestObjectController(unittest.TestCase): set_http_connect(200, 200, 201, 201, 201) # acct cont obj obj obj resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) self.app.client_timeout = 0.05 req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT', @@ -3531,7 +3531,7 @@ class TestObjectController(unittest.TestCase): set_http_connect(201, 201, 201) # obj obj obj resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 408) + self.assertEqual(resp.status_int, 408) def test_client_disconnect(self): with save_globals(): @@ -3566,7 +3566,7 @@ class TestObjectController(unittest.TestCase): set_http_connect(200, 200, 201, 201, 201) # acct cont obj obj obj resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 499) + self.assertEqual(resp.status_int, 499) def test_node_read_timeout(self): with save_globals(): @@ -3627,7 +3627,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.app) got_exc = False try: - self.assertEquals('', resp.body) + self.assertEqual('', resp.body) except ChunkReadTimeout: got_exc = True self.assertTrue(got_exc) @@ -3637,7 +3637,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.app) got_exc = False try: - self.assertEquals(resp.body, 'lalala') + self.assertEqual(resp.body, 'lalala') except ChunkReadTimeout: got_exc = True self.assertTrue(not got_exc) @@ -3647,7 +3647,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.app) got_exc = False try: - self.assertEquals(resp.body, 'lalala') + self.assertEqual(resp.body, 'lalala') except ChunkReadTimeout: got_exc = True self.assertTrue(not got_exc) @@ -3657,7 +3657,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.app) got_exc = False try: - self.assertEquals(resp.body, 'lalala') + self.assertEqual(resp.body, 'lalala') except ChunkReadTimeout: got_exc = True self.assertTrue(not got_exc) @@ -3696,7 +3696,7 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) set_http_connect(200, 200, 201, 201, 201, slow=0.1) resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) self.app.node_timeout = 0.1 req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -3706,14 +3706,14 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) set_http_connect(201, 201, 201, slow=1.0) resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_node_request_setting(self): baseapp = proxy_server.Application({'request_node_count': '3'}, FakeMemcache(), container_ring=FakeRing(), account_ring=FakeRing()) - self.assertEquals(baseapp.request_node_count(3), 3) + self.assertEqual(baseapp.request_node_count(3), 3) def test_iter_nodes(self): with save_globals(): @@ -3727,7 +3727,7 @@ class TestObjectController(unittest.TestCase): for node in self.app.iter_nodes(object_ring, partition): collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 5) + self.assertEqual(len(collected_nodes), 5) object_ring.max_more_nodes = 20 self.app.request_node_count = lambda r: 20 @@ -3738,7 +3738,7 @@ class TestObjectController(unittest.TestCase): for node in self.app.iter_nodes(object_ring, partition): collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 9) + self.assertEqual(len(collected_nodes), 9) # zero error-limited primary nodes -> no handoff warnings self.app.log_handoffs = True @@ -3751,9 +3751,9 @@ class TestObjectController(unittest.TestCase): collected_nodes = [] for node in self.app.iter_nodes(object_ring, partition): collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 7) - self.assertEquals(self.app.logger.log_dict['warning'], []) - self.assertEquals(self.app.logger.get_increments(), []) + self.assertEqual(len(collected_nodes), 7) + self.assertEqual(self.app.logger.log_dict['warning'], []) + self.assertEqual(self.app.logger.get_increments(), []) # one error-limited primary node -> one handoff warning self.app.log_handoffs = True @@ -3766,11 +3766,11 @@ class TestObjectController(unittest.TestCase): collected_nodes = [] for node in self.app.iter_nodes(object_ring, partition): collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 7) - self.assertEquals(self.app.logger.log_dict['warning'], [ + self.assertEqual(len(collected_nodes), 7) + self.assertEqual(self.app.logger.log_dict['warning'], [ (('Handoff requested (5)',), {})]) - self.assertEquals(self.app.logger.get_increments(), - ['handoff_count']) + self.assertEqual(self.app.logger.get_increments(), + ['handoff_count']) # two error-limited primary nodes -> two handoff warnings self.app.log_handoffs = True @@ -3784,13 +3784,13 @@ class TestObjectController(unittest.TestCase): collected_nodes = [] for node in self.app.iter_nodes(object_ring, partition): collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 7) - self.assertEquals(self.app.logger.log_dict['warning'], [ + self.assertEqual(len(collected_nodes), 7) + self.assertEqual(self.app.logger.log_dict['warning'], [ (('Handoff requested (5)',), {}), (('Handoff requested (6)',), {})]) - self.assertEquals(self.app.logger.get_increments(), - ['handoff_count', - 'handoff_count']) + self.assertEqual(self.app.logger.get_increments(), + ['handoff_count', + 'handoff_count']) # all error-limited primary nodes -> four handoff warnings, # plus a handoff-all metric @@ -3806,18 +3806,18 @@ class TestObjectController(unittest.TestCase): collected_nodes = [] for node in self.app.iter_nodes(object_ring, partition): collected_nodes.append(node) - self.assertEquals(len(collected_nodes), 10) - self.assertEquals(self.app.logger.log_dict['warning'], [ + self.assertEqual(len(collected_nodes), 10) + self.assertEqual(self.app.logger.log_dict['warning'], [ (('Handoff requested (7)',), {}), (('Handoff requested (8)',), {}), (('Handoff requested (9)',), {}), (('Handoff requested (10)',), {})]) - self.assertEquals(self.app.logger.get_increments(), - ['handoff_count', - 'handoff_count', - 'handoff_count', - 'handoff_count', - 'handoff_all_count']) + self.assertEqual(self.app.logger.get_increments(), + ['handoff_count', + 'handoff_count', + 'handoff_count', + 'handoff_count', + 'handoff_all_count']) finally: object_ring.max_more_nodes = 0 @@ -3854,8 +3854,8 @@ class TestObjectController(unittest.TestCase): if not second_nodes: self.app.error_limit(node, 'test') second_nodes.append(node) - self.assertEquals(len(first_nodes), 6) - self.assertEquals(len(second_nodes), 7) + self.assertEqual(len(first_nodes), 6) + self.assertEqual(len(second_nodes), 7) def test_iter_nodes_with_custom_node_iter(self): object_ring = self.app.get_object_ring(None) @@ -3885,7 +3885,7 @@ class TestObjectController(unittest.TestCase): 'Object', headers=[{'X-Test': '1'}, {'X-Test': '2'}, {'X-Test': '3'}]) - self.assertEquals(resp.headers['X-Test'], '1') + self.assertEqual(resp.headers['X-Test'], '1') def test_best_response_sets_etag(self): controller = ReplicatedObjectController( @@ -3893,12 +3893,12 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3, 'Object') - self.assertEquals(resp.etag, None) + self.assertEqual(resp.etag, None) resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3, 'Object', etag='68b329da9893e34099c7d8ad5cb9c940' ) - self.assertEquals(resp.etag, '68b329da9893e34099c7d8ad5cb9c940') + self.assertEqual(resp.etag, '68b329da9893e34099c7d8ad5cb9c940') def test_proxy_passes_content_type(self): with save_globals(): @@ -3906,16 +3906,16 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) set_http_connect(200, 200, 200) resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_type, 'x-application/test') + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_type, 'x-application/test') set_http_connect(200, 200, 200) resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 0) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 0) set_http_connect(200, 200, 200, slow=True) resp = req.get_response(self.app) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 4) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 4) def test_proxy_passes_content_length_on_head(self): with save_globals(): @@ -3926,12 +3926,12 @@ class TestObjectController(unittest.TestCase): self.app, 'account', 'container', 'object') set_http_connect(200, 200, 200) resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 0) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 0) set_http_connect(200, 200, 200, slow=True) resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 4) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 4) def test_error_limiting(self): with save_globals(): @@ -3941,7 +3941,7 @@ class TestObjectController(unittest.TestCase): object_ring = controller.app.get_object_ring(None) self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200), 200) - self.assertEquals( + self.assertEqual( node_error_count(controller.app, object_ring.devs[0]), 2) self.assertTrue( node_last_error(controller.app, object_ring.devs[0]) @@ -3949,7 +3949,7 @@ class TestObjectController(unittest.TestCase): for _junk in range(self.app.error_suppression_limit): self.assert_status_map(controller.HEAD, (200, 200, 503, 503, 503), 503) - self.assertEquals( + self.assertEqual( node_error_count(controller.app, object_ring.devs[0]), self.app.error_suppression_limit + 1) self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200), @@ -3980,7 +3980,7 @@ class TestObjectController(unittest.TestCase): object_ring = controller.app.get_object_ring(None) self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200), 200) - self.assertEquals( + self.assertEqual( node_error_count(controller.app, object_ring.devs[0]), 2) self.assertTrue( node_last_error(controller.app, object_ring.devs[0]) @@ -3988,7 +3988,7 @@ class TestObjectController(unittest.TestCase): for _junk in range(self.app.error_suppression_limit): self.assert_status_map(controller.HEAD, (200, 200, 503, 503, 503), 503) - self.assertEquals( + self.assertEqual( node_error_count(controller.app, object_ring.devs[0]), self.app.error_suppression_limit + 1) @@ -4013,9 +4013,9 @@ class TestObjectController(unittest.TestCase): # 2, not 1, because assert_status_map() calls the method twice odevs = object_ring.devs - self.assertEquals(node_error_count(controller.app, odevs[0]), 2) - self.assertEquals(node_error_count(controller.app, odevs[1]), 0) - self.assertEquals(node_error_count(controller.app, odevs[2]), 0) + self.assertEqual(node_error_count(controller.app, odevs[0]), 2) + self.assertEqual(node_error_count(controller.app, odevs[1]), 0) + self.assertEqual(node_error_count(controller.app, odevs[2]), 0) self.assertTrue( node_last_error(controller.app, odevs[0]) is not None) self.assertTrue(node_last_error(controller.app, odevs[1]) is None) @@ -4033,9 +4033,9 @@ class TestObjectController(unittest.TestCase): # 2, not 1, because assert_status_map() calls the method twice odevs = object_ring.devs - self.assertEquals(node_error_count(controller.app, odevs[0]), 0) - self.assertEquals(node_error_count(controller.app, odevs[1]), 0) - self.assertEquals(node_error_count(controller.app, odevs[2]), 2) + self.assertEqual(node_error_count(controller.app, odevs[0]), 0) + self.assertEqual(node_error_count(controller.app, odevs[1]), 0) + self.assertEqual(node_error_count(controller.app, odevs[2]), 2) self.assertTrue(node_last_error(controller.app, odevs[0]) is None) self.assertTrue(node_last_error(controller.app, odevs[1]) is None) self.assertTrue( @@ -4052,7 +4052,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}) self.app.update_request(req) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) set_http_connect(404, 404, 404) # acct acct acct @@ -4060,7 +4060,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(503, 404, 404) # acct acct acct @@ -4068,7 +4068,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(503, 503, 404) # acct acct acct @@ -4076,7 +4076,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(503, 503, 503) # acct acct acct @@ -4084,7 +4084,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(200, 200, 204, 204, 204) # acct cont obj obj obj @@ -4092,7 +4092,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) set_http_connect(200, 404, 404, 404) # acct cont cont cont @@ -4100,7 +4100,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(200, 503, 503, 503) # acct cont cont cont @@ -4108,7 +4108,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) for dev in self.app.account_ring.devs: set_node_errors( @@ -4121,7 +4121,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) for dev in self.app.account_ring.devs: set_node_errors(self.app, dev, 0, last_error=None) @@ -4136,7 +4136,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = getattr(controller, 'DELETE')(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_PUT_POST_requires_container_exist(self): with save_globals(): @@ -4150,7 +4150,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT'}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(200, 404, 404, 404, 200, 200) req = Request.blank('/v1/a/c/o', @@ -4158,7 +4158,7 @@ class TestObjectController(unittest.TestCase): headers={'Content-Type': 'text/plain'}) self.app.update_request(req) resp = controller.POST(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_PUT_POST_as_copy_requires_container_exist(self): with save_globals(): @@ -4169,7 +4169,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(200, 404, 404, 404, 200, 200, 200, 200, 200, 200) req = Request.blank('/v1/a/c/o', @@ -4177,7 +4177,7 @@ class TestObjectController(unittest.TestCase): headers={'Content-Type': 'text/plain'}) self.app.update_request(req) resp = controller.POST(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_bad_metadata(self): with save_globals(): @@ -4189,7 +4189,7 @@ class TestObjectController(unittest.TestCase): headers={'Content-Length': '0'}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank( @@ -4199,7 +4199,7 @@ class TestObjectController(unittest.TestCase): 'a' * constraints.MAX_META_NAME_LENGTH): 'v'}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank( '/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4209,7 +4209,7 @@ class TestObjectController(unittest.TestCase): 'a' * (constraints.MAX_META_NAME_LENGTH + 1)): 'v'}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4218,7 +4218,7 @@ class TestObjectController(unittest.TestCase): constraints.MAX_META_VALUE_LENGTH}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank( '/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4227,7 +4227,7 @@ class TestObjectController(unittest.TestCase): (constraints.MAX_META_VALUE_LENGTH + 1)}) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) headers = {'Content-Length': '0'} @@ -4237,7 +4237,7 @@ class TestObjectController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) headers = {'Content-Length': '0'} for x in range(constraints.MAX_META_COUNT + 1): @@ -4246,7 +4246,7 @@ class TestObjectController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) headers = {'Content-Length': '0'} @@ -4265,7 +4265,7 @@ class TestObjectController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) headers['X-Object-Meta-a'] = \ 'a' * (constraints.MAX_META_OVERALL_SIZE - size) @@ -4273,7 +4273,7 @@ class TestObjectController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) @contextmanager def controller_context(self, req, *args, **kwargs): @@ -4303,8 +4303,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') def test_basic_put_with_x_copy_from_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4315,9 +4315,9 @@ class TestObjectController(unittest.TestCase): # acct cont acc1 con1 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_basic_put_with_x_copy_from_across_container(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4327,8 +4327,8 @@ class TestObjectController(unittest.TestCase): # acct cont conc objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c2/o') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c2/o') def test_basic_put_with_x_copy_from_across_container_and_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4339,9 +4339,9 @@ class TestObjectController(unittest.TestCase): # acct cont acc1 con1 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c2/o') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c2/o') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_copy_non_zero_content_length(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4351,7 +4351,7 @@ class TestObjectController(unittest.TestCase): # acct cont with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_copy_non_zero_content_length_with_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4362,7 +4362,7 @@ class TestObjectController(unittest.TestCase): # acct cont with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_copy_with_slashes_in_x_copy_from(self): # extra source path parsing @@ -4373,8 +4373,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') def test_copy_with_slashes_in_x_copy_from_and_account(self): # extra source path parsing @@ -4386,9 +4386,9 @@ class TestObjectController(unittest.TestCase): # acct cont acc1 con1 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_copy_with_spaces_in_x_copy_from(self): # space in soure path @@ -4399,8 +4399,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o%20o2') def test_copy_with_spaces_in_x_copy_from_and_account(self): # space in soure path @@ -4412,9 +4412,9 @@ class TestObjectController(unittest.TestCase): # acct cont acc1 con1 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o%20o2') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o%20o2') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_copy_with_leading_slash_in_x_copy_from(self): # repeat tests with leading / @@ -4425,8 +4425,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') def test_copy_with_leading_slash_in_x_copy_from_and_account(self): # repeat tests with leading / @@ -4438,9 +4438,9 @@ class TestObjectController(unittest.TestCase): # acct cont acc1 con1 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4450,8 +4450,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4462,9 +4462,9 @@ class TestObjectController(unittest.TestCase): # acct cont acc1 con1 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_copy_with_no_object_in_x_copy_from(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4476,7 +4476,7 @@ class TestObjectController(unittest.TestCase): try: controller.PUT(req) except HTTPException as resp: - self.assertEquals(resp.status_int // 100, 4) # client error + self.assertEqual(resp.status_int // 100, 4) # client error else: raise self.fail('Invalid X-Copy-From did not raise ' 'client error') @@ -4492,7 +4492,7 @@ class TestObjectController(unittest.TestCase): try: controller.PUT(req) except HTTPException as resp: - self.assertEquals(resp.status_int // 100, 4) # client error + self.assertEqual(resp.status_int // 100, 4) # client error else: raise self.fail('Invalid X-Copy-From did not raise ' 'client error') @@ -4505,7 +4505,7 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_copy_server_error_reading_source_and_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4516,7 +4516,7 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_copy_not_found_reading_source(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4527,7 +4527,7 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_copy_not_found_reading_source_and_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4539,7 +4539,7 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_copy_with_some_missing_sources(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4549,7 +4549,7 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_copy_with_some_missing_sources_and_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4560,7 +4560,7 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_copy_with_object_metadata(self): req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4572,10 +4572,10 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('x-object-meta-test'), 'testing') - self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') - self.assertEquals(resp.headers.get('x-delete-at'), '9876543210') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers.get('x-object-meta-test'), 'testing') + self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay') + self.assertEqual(resp.headers.get('x-delete-at'), '9876543210') def test_copy_with_object_metadata_and_account(self): req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -4588,10 +4588,10 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.PUT(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('x-object-meta-test'), 'testing') - self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') - self.assertEquals(resp.headers.get('x-delete-at'), '9876543210') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers.get('x-object-meta-test'), 'testing') + self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay') + self.assertEqual(resp.headers.get('x-delete-at'), '9876543210') @_limit_max_file_size def test_copy_source_larger_than_max_file_size(self): @@ -4621,7 +4621,7 @@ class TestObjectController(unittest.TestCase): resp = controller.PUT(req) except HTTPException as resp: pass - self.assertEquals(resp.status_int, 413) + self.assertEqual(resp.status_int, 413) def test_basic_COPY(self): req = Request.blank('/v1/a/c/o', @@ -4631,8 +4631,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') def test_basic_COPY_account(self): req = Request.blank('/v1/a/c/o', @@ -4643,9 +4643,9 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_COPY_across_containers(self): req = Request.blank('/v1/a/c/o', @@ -4655,8 +4655,8 @@ class TestObjectController(unittest.TestCase): # acct cont c2 objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') def test_COPY_source_with_slashes_in_name(self): req = Request.blank('/v1/a/c/o/o2', @@ -4666,8 +4666,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') def test_COPY_account_source_with_slashes_in_name(self): req = Request.blank('/v1/a/c/o/o2', @@ -4678,9 +4678,9 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_COPY_destination_leading_slash(self): req = Request.blank('/v1/a/c/o', @@ -4690,8 +4690,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') def test_COPY_account_destination_leading_slash(self): req = Request.blank('/v1/a/c/o', @@ -4702,9 +4702,9 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_COPY_source_with_slashes_destination_leading_slash(self): req = Request.blank('/v1/a/c/o/o2', @@ -4714,8 +4714,8 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') def test_COPY_account_source_with_slashes_destination_leading_slash(self): req = Request.blank('/v1/a/c/o/o2', @@ -4726,9 +4726,9 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from'], 'c/o/o2') - self.assertEquals(resp.headers['x-copied-from-account'], 'a') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2') + self.assertEqual(resp.headers['x-copied-from-account'], 'a') def test_COPY_no_object_in_destination(self): req = Request.blank('/v1/a/c/o', @@ -4755,7 +4755,7 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_COPY_account_server_error_reading_source(self): req = Request.blank('/v1/a/c/o', @@ -4766,7 +4766,7 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_COPY_not_found_reading_source(self): req = Request.blank('/v1/a/c/o', @@ -4776,7 +4776,7 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_COPY_account_not_found_reading_source(self): req = Request.blank('/v1/a/c/o', @@ -4787,7 +4787,7 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_COPY_with_some_missing_sources(self): req = Request.blank('/v1/a/c/o', @@ -4797,7 +4797,7 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_COPY_account_with_some_missing_sources(self): req = Request.blank('/v1/a/c/o', @@ -4808,7 +4808,7 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_COPY_with_metadata(self): req = Request.blank('/v1/a/c/o', @@ -4819,11 +4819,11 @@ class TestObjectController(unittest.TestCase): # acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('x-object-meta-test'), - 'testing') - self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') - self.assertEquals(resp.headers.get('x-delete-at'), '9876543210') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers.get('x-object-meta-test'), + 'testing') + self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay') + self.assertEqual(resp.headers.get('x-delete-at'), '9876543210') def test_COPY_account_with_metadata(self): req = Request.blank('/v1/a/c/o', @@ -4835,11 +4835,11 @@ class TestObjectController(unittest.TestCase): # acct cont acct cont objc objc objc obj obj obj with self.controller_context(req, *status_list) as controller: resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers.get('x-object-meta-test'), - 'testing') - self.assertEquals(resp.headers.get('x-object-meta-ours'), 'okay') - self.assertEquals(resp.headers.get('x-delete-at'), '9876543210') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers.get('x-object-meta-test'), + 'testing') + self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay') + self.assertEqual(resp.headers.get('x-delete-at'), '9876543210') @_limit_max_file_size def test_COPY_source_larger_than_max_file_size(self): @@ -4865,7 +4865,7 @@ class TestObjectController(unittest.TestCase): resp = controller.COPY(req) except HTTPException as resp: pass - self.assertEquals(resp.status_int, 413) + self.assertEqual(resp.status_int, 413) @_limit_max_file_size def test_COPY_account_source_larger_than_max_file_size(self): @@ -4892,7 +4892,7 @@ class TestObjectController(unittest.TestCase): resp = controller.COPY(req) except HTTPException as resp: pass - self.assertEquals(resp.status_int, 413) + self.assertEqual(resp.status_int, 413) def test_COPY_newest(self): with save_globals(): @@ -4909,9 +4909,9 @@ class TestObjectController(unittest.TestCase): '4')) self.app.memcache.store = {} resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from-last-modified'], - '3') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from-last-modified'], + '3') def test_COPY_account_newest(self): with save_globals(): @@ -4929,9 +4929,9 @@ class TestObjectController(unittest.TestCase): '4', '4', '4')) self.app.memcache.store = {} resp = controller.COPY(req) - self.assertEquals(resp.status_int, 201) - self.assertEquals(resp.headers['x-copied-from-last-modified'], - '3') + self.assertEqual(resp.status_int, 201) + self.assertEqual(resp.headers['x-copied-from-last-modified'], + '3') def test_COPY_delete_at(self): with save_globals(): @@ -4956,8 +4956,8 @@ class TestObjectController(unittest.TestCase): for method, path, given_headers in backend_requests: if method != 'PUT': continue - self.assertEquals(given_headers.get('X-Delete-At'), - '9876543210') + self.assertEqual(given_headers.get('X-Delete-At'), + '9876543210') self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) @@ -4987,8 +4987,8 @@ class TestObjectController(unittest.TestCase): for method, path, given_headers in backend_requests: if method != 'PUT': continue - self.assertEquals(given_headers.get('X-Delete-At'), - '9876543210') + self.assertEqual(given_headers.get('X-Delete-At'), + '9876543210') self.assertTrue('X-Delete-At-Host' in given_headers) self.assertTrue('X-Delete-At-Device' in given_headers) self.assertTrue('X-Delete-At-Partition' in given_headers) @@ -5028,7 +5028,7 @@ class TestObjectController(unittest.TestCase): self.app.memcache.store = {} self.app.update_request(req) res = controller.PUT(req) - self.assertEquals(res.status_int // 100, 2) # success + self.assertEqual(res.status_int // 100, 2) # success # test 413 entity to large set_http_connect(201, 201, 201, 201) @@ -5042,7 +5042,7 @@ class TestObjectController(unittest.TestCase): with mock.patch('swift.common.constraints.MAX_FILE_SIZE', 10): res = controller.PUT(req) - self.assertEquals(res.status_int, 413) + self.assertEqual(res.status_int, 413) @unpatch_policies def test_chunked_put_bad_version(self): @@ -5056,7 +5056,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) @unpatch_policies def test_chunked_put_bad_path(self): @@ -5070,7 +5070,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 404' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) @unpatch_policies def test_chunked_put_bad_utf8(self): @@ -5085,7 +5085,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) @unpatch_policies def test_chunked_put_bad_path_no_controller(self): @@ -5100,7 +5100,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) @unpatch_policies def test_chunked_put_bad_method(self): @@ -5115,7 +5115,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 405' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) @unpatch_policies def test_chunked_put_unhandled_exception(self): @@ -5138,7 +5138,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 500' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) prosrv.update_request = orig_update_request @unpatch_policies @@ -5156,7 +5156,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 204' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('\r\nContent-Length: 0\r\n' in headers) @unpatch_policies @@ -5180,7 +5180,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # List account with ustr container (test plain) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5190,7 +5190,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) containers = fd.read().split('\n') self.assertTrue(ustr in containers) # List account with ustr container (test json) @@ -5202,7 +5202,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) listing = json.loads(fd.read()) self.assertTrue(ustr.decode('utf8') in [l['name'] for l in listing]) # List account with ustr container (test xml) @@ -5214,7 +5214,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('%s' % ustr in fd.read()) # Create ustr object with ustr metadata in ustr container sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5227,7 +5227,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # List ustr container with ustr object (test plain) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5237,7 +5237,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) objects = fd.read().split('\n') self.assertTrue(ustr in objects) # List ustr container with ustr object (test json) @@ -5250,9 +5250,9 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) listing = json.loads(fd.read()) - self.assertEquals(listing[0]['name'], ustr.decode('utf8')) + self.assertEqual(listing[0]['name'], ustr.decode('utf8')) # List ustr container with ustr object (test xml) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5263,7 +5263,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('%s' % ustr in fd.read()) # Retrieve ustr object with ustr metadata sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5275,7 +5275,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('\r\nX-Object-Meta-%s: %s\r\n' % (quote(ustr_short).lower(), quote(ustr)) in headers) @@ -5296,7 +5296,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Ensure we get what we put sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5305,9 +5305,9 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) body = fd.read() - self.assertEquals(body, 'oh hai123456789abcdef') + self.assertEqual(body, 'oh hai123456789abcdef') @unpatch_policies def test_version_manifest(self, oc='versions', vc='vers', o='name'): @@ -5331,7 +5331,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # check that the header was set sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5340,7 +5340,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('X-Versions-Location: %s' % vc in headers) # make the container for the object versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5351,7 +5351,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Create the versioned file sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5362,7 +5362,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Create the object versions for segment in range(1, versions_to_create): sleep(.01) # guarantee that the timestamp changes @@ -5375,7 +5375,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Ensure retrieving the manifest file gets the latest version sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5385,12 +5385,12 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue( 'Content-Type: text/jibberish%s' % segment in headers) self.assertTrue('X-Object-Meta-Foo: barbaz' not in headers) body = fd.read() - self.assertEquals(body, '%05d' % segment) + self.assertEqual(body, '%05d' % segment) # Ensure we have the right number of versions saved sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5400,10 +5400,10 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) body = fd.read() versions = [x for x in body.split('\n') if x] - self.assertEquals(len(versions), versions_to_create - 1) + self.assertEqual(len(versions), versions_to_create - 1) # copy a version and make sure the version info is stripped sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5414,7 +5414,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response to the COPY - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('GET /v1/a/%s/copied_name HTTP/1.1\r\nHost: ' @@ -5423,9 +5423,9 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) body = fd.read() - self.assertEquals(body, '%05d' % segment) + self.assertEqual(body, '%05d' % segment) # post and make sure it's updated sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5436,7 +5436,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response to the POST - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' @@ -5445,11 +5445,11 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('Content-Type: foo/bar' in headers) self.assertTrue('X-Object-Meta-Bar: foo' in headers) body = fd.read() - self.assertEquals(body, '%05d' % segment) + self.assertEqual(body, '%05d' % segment) # Delete the object versions for segment in range(versions_to_create - 1, 0, -1): sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5460,7 +5460,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Ensure retrieving the manifest file gets the latest version sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5470,11 +5470,11 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) self.assertTrue('Content-Type: text/jibberish%s' % (segment - 1) in headers) body = fd.read() - self.assertEquals(body, '%05d' % (segment - 1)) + self.assertEqual(body, '%05d' % (segment - 1)) # Ensure we have the right number of versions saved sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5484,10 +5484,10 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) body = fd.read() versions = [x for x in body.split('\n') if x] - self.assertEquals(len(versions), segment - 1) + self.assertEqual(len(versions), segment - 1) # there is now one segment left (in the manifest) # Ensure we have no saved versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5498,7 +5498,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 204 No Content' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # delete the last verision sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5507,7 +5507,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Ensure it's all gone sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5517,7 +5517,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 404' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # make sure dlo manifest files don't get versioned for _junk in range(1, versions_to_create): @@ -5533,7 +5533,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Ensure we have no saved versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5544,7 +5544,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 204 No Content' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5556,7 +5556,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' @@ -5566,7 +5566,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' @@ -5576,7 +5576,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' @@ -5586,7 +5586,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' @@ -5594,7 +5594,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' @@ -5603,10 +5603,10 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx series response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) body = fd.read() versions = [x for x in body.split('\n') if x] - self.assertEquals(len(versions), 1) + self.assertEqual(len(versions), 1) # Check for when the versions target container doesn't exist sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5617,7 +5617,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Create the versioned file sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5627,7 +5627,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Create another version sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5637,7 +5637,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 412' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # Delete the object sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() @@ -5646,7 +5646,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 2' # 2xx response - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) @unpatch_policies def test_version_manifest_utf8(self): @@ -5711,7 +5711,7 @@ class TestObjectController(unittest.TestCase): fd.flush() exp = 'HTTP/1.1 201' headers = readuntil2crlfs(fd) - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # put an object in it sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -5727,7 +5727,7 @@ class TestObjectController(unittest.TestCase): fd.flush() exp = 'HTTP/1.1 201' headers = readuntil2crlfs(fd) - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) # request with both If-None-Match and Range etag = md5("abcdefghij").hexdigest() @@ -5743,7 +5743,7 @@ class TestObjectController(unittest.TestCase): fd.flush() exp = 'HTTP/1.1 304' headers = readuntil2crlfs(fd) - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) def test_mismatched_etags(self): with save_globals(): @@ -5759,7 +5759,7 @@ class TestObjectController(unittest.TestCase): '68b329da9893e34099c7d8ad5cb9c940', '68b329da9893e34099c7d8ad5cb9c941']) resp = controller.PUT(req) - self.assertEquals(resp.status_int // 100, 5) # server error + self.assertEqual(resp.status_int // 100, 5) # server error # req supplies etag, object servers return 422 - mismatch headers = {'Content-Length': '0', @@ -5773,7 +5773,7 @@ class TestObjectController(unittest.TestCase): None, None]) resp = controller.PUT(req) - self.assertEquals(resp.status_int // 100, 4) # client error + self.assertEqual(resp.status_int // 100, 4) # client error def test_response_get_accept_ranges_header(self): with save_globals(): @@ -5784,7 +5784,7 @@ class TestObjectController(unittest.TestCase): set_http_connect(200, 200, 200) resp = controller.GET(req) self.assertTrue('accept-ranges' in resp.headers) - self.assertEquals(resp.headers['accept-ranges'], 'bytes') + self.assertEqual(resp.headers['accept-ranges'], 'bytes') def test_response_head_accept_ranges_header(self): with save_globals(): @@ -5796,7 +5796,7 @@ class TestObjectController(unittest.TestCase): set_http_connect(200, 200, 200) resp = controller.HEAD(req) self.assertTrue('accept-ranges' in resp.headers) - self.assertEquals(resp.headers['accept-ranges'], 'bytes') + self.assertEqual(resp.headers['accept-ranges'], 'bytes') def test_GET_calls_authorize(self): called = [False] @@ -5918,9 +5918,9 @@ class TestObjectController(unittest.TestCase): 'X-Delete-After': '60'}) self.app.update_request(req) res = controller.POST(req) - self.assertEquals(res.status, '202 Fake') - self.assertEquals(req.headers.get('x-delete-at'), - str(int(t + 60))) + self.assertEqual(res.status, '202 Fake') + self.assertEqual(req.headers.get('x-delete-at'), + str(int(t + 60))) finally: time.time = orig_time @@ -5935,11 +5935,11 @@ class TestObjectController(unittest.TestCase): def test_connect(ipaddr, port, device, partition, method, path, headers=None, query_string=None): if method == "HEAD": - self.assertEquals(path, '/a/c/o.jpg') + self.assertEqual(path, '/a/c/o.jpg') self.assertNotEquals(None, headers['X-Backend-Storage-Policy-Index']) - self.assertEquals(1, int(headers - ['X-Backend-Storage-Policy-Index'])) + self.assertEqual(1, int(headers + ['X-Backend-Storage-Policy-Index'])) def fake_container_info(account, container, req): return {'status': 200, 'sync_key': None, 'storage_policy': '1', @@ -5971,7 +5971,7 @@ class TestObjectController(unittest.TestCase): self.app.update_request(req) self.app.memcache.store = {} res = controller.PUT(req) - self.assertEquals(201, res.status_int) + self.assertEqual(201, res.status_int) @patch_policies([ StoragePolicy(0, 'zero', False, object_ring=FakeRing()), @@ -6026,7 +6026,7 @@ class TestObjectController(unittest.TestCase): with mock.patch('swift.proxy.controllers.base.get_info', fake_container_info): resp = self.app.handle_request(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) expected = [('GET', '/a/c-versions')] * 2 + \ [('GET', '/a/c-versions/old_version')] * 3 + \ [('PUT', '/a/c/current_version')] * 3 + \ @@ -6104,7 +6104,7 @@ class TestObjectController(unittest.TestCase): gc.collect() else: sleep(0) - self.assertEquals( + self.assertEqual( before_request_instances, len(_request_instances)) def test_OPTIONS(self): @@ -6121,7 +6121,7 @@ class TestObjectController(unittest.TestCase): headers={'Origin': 'http://foo.com', 'Access-Control-Request-Method': 'GET'}) resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) def my_empty_origin_container_info(*args): return {'cors': {'allow_origin': None}} @@ -6132,7 +6132,7 @@ class TestObjectController(unittest.TestCase): headers={'Origin': 'http://foo.com', 'Access-Control-Request-Method': 'GET'}) resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) def my_container_info(*args): return { @@ -6149,38 +6149,38 @@ class TestObjectController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals( + self.assertEqual(200, resp.status_int) + self.assertEqual( 'https://foo.bar', resp.headers['access-control-allow-origin']) for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['access-control-allow-methods']) - self.assertEquals( + self.assertEqual( len(resp.headers['access-control-allow-methods'].split(', ')), 7) - self.assertEquals('999', resp.headers['access-control-max-age']) + self.assertEqual('999', resp.headers['access-control-max-age']) req = Request.blank( '/v1/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'}, headers={'Origin': 'https://foo.bar'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) req = Request.blank('/v1/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 7) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 7) req = Request.blank( '/v1/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'}, headers={'Origin': 'http://foo.com'}) resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) req = Request.blank( '/v1/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'}, @@ -6188,7 +6188,7 @@ class TestObjectController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) controller.app.cors_allow_origin = ['http://foo.bar', ] resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) def my_container_info_wildcard(*args): return { @@ -6205,15 +6205,15 @@ class TestObjectController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals('*', resp.headers['access-control-allow-origin']) + self.assertEqual(200, resp.status_int) + self.assertEqual('*', resp.headers['access-control-allow-origin']) for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['access-control-allow-methods']) - self.assertEquals( + self.assertEqual( len(resp.headers['access-control-allow-methods'].split(', ')), 7) - self.assertEquals('999', resp.headers['access-control-max-age']) + self.assertEqual('999', resp.headers['access-control-max-age']) def test_CORS_valid(self): with save_globals(): @@ -6242,12 +6242,12 @@ class TestObjectController(unittest.TestCase): resp = cors_validation(objectGET)(controller, req) - self.assertEquals(200, resp.status_int) - self.assertEquals('http://foo.bar', - resp.headers['access-control-allow-origin']) - self.assertEquals('red', resp.headers['x-object-meta-color']) + self.assertEqual(200, resp.status_int) + self.assertEqual('http://foo.bar', + resp.headers['access-control-allow-origin']) + self.assertEqual('red', resp.headers['x-object-meta-color']) # X-Super-Secret is in the response, but not "exposed" - self.assertEquals('hush', resp.headers['x-super-secret']) + self.assertEqual('hush', resp.headers['x-super-secret']) self.assertTrue('access-control-expose-headers' in resp.headers) exposed = set( h.strip() for h in @@ -6256,7 +6256,7 @@ class TestObjectController(unittest.TestCase): 'content-type', 'expires', 'last-modified', 'pragma', 'etag', 'x-timestamp', 'x-trans-id', 'x-object-meta-color']) - self.assertEquals(expected_exposed, exposed) + self.assertEqual(expected_exposed, exposed) controller.app.strict_cors_mode = True req = Request.blank( @@ -6266,7 +6266,7 @@ class TestObjectController(unittest.TestCase): resp = cors_validation(objectGET)(controller, req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) self.assertTrue('access-control-allow-origin' not in resp.headers) def test_CORS_valid_with_obj_headers(self): @@ -6297,11 +6297,11 @@ class TestObjectController(unittest.TestCase): resp = cors_validation(objectGET)(controller, req) - self.assertEquals(200, resp.status_int) - self.assertEquals('http://obj.origin', - resp.headers['access-control-allow-origin']) - self.assertEquals('x-trans-id', - resp.headers['access-control-expose-headers']) + self.assertEqual(200, resp.status_int) + self.assertEqual('http://obj.origin', + resp.headers['access-control-allow-origin']) + self.assertEqual('x-trans-id', + resp.headers['access-control-expose-headers']) def _gather_x_container_headers(self, controller_call, req, *connect_args, **kwargs): @@ -7191,14 +7191,14 @@ class TestContainerController(unittest.TestCase): 'Content-Type': 'text/plain'}) self.app.update_request(req) res = method(req) - self.assertEquals(res.status_int, expected) + self.assertEqual(res.status_int, expected) set_http_connect(*statuses, **kwargs) self.app.memcache.store = {} req = Request.blank('/v1/a/c/', headers={'Content-Length': '0', 'Content-Type': 'text/plain'}) self.app.update_request(req) res = method(req) - self.assertEquals(res.status_int, expected) + self.assertEqual(res.status_int, expected) def test_HEAD_GET(self): with save_globals(): @@ -7211,22 +7211,22 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/v1/a/c', {}) self.app.update_request(req) res = controller.HEAD(req) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) if expected < 400: self.assertTrue('x-works' in res.headers) - self.assertEquals(res.headers['x-works'], 'yes') + self.assertEqual(res.headers['x-works'], 'yes') if c_expected: self.assertTrue('swift.container/a/c' in res.environ) - self.assertEquals( + self.assertEqual( res.environ['swift.container/a/c']['status'], c_expected) else: self.assertTrue('swift.container/a/c' not in res.environ) if a_expected: self.assertTrue('swift.account/a' in res.environ) - self.assertEquals(res.environ['swift.account/a']['status'], - a_expected) + self.assertEqual(res.environ['swift.account/a']['status'], + a_expected) else: self.assertTrue('swift.account/a' not in res.environ) @@ -7235,22 +7235,22 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/v1/a/c', {}) self.app.update_request(req) res = controller.GET(req) - self.assertEquals(res.status[:len(str(expected))], - str(expected)) + self.assertEqual(res.status[:len(str(expected))], + str(expected)) if expected < 400: self.assertTrue('x-works' in res.headers) - self.assertEquals(res.headers['x-works'], 'yes') + self.assertEqual(res.headers['x-works'], 'yes') if c_expected: self.assertTrue('swift.container/a/c' in res.environ) - self.assertEquals( + self.assertEqual( res.environ['swift.container/a/c']['status'], c_expected) else: self.assertTrue('swift.container/a/c' not in res.environ) if a_expected: self.assertTrue('swift.account/a' in res.environ) - self.assertEquals(res.environ['swift.account/a']['status'], - a_expected) + self.assertEqual(res.environ['swift.account/a']['status'], + a_expected) else: self.assertTrue('swift.account/a' not in res.environ) # In all the following tests cache 200 for account @@ -7296,14 +7296,14 @@ class TestContainerController(unittest.TestCase): expected_policy = POLICIES.default res = req.get_response(self.app) if expected_policy.is_deprecated: - self.assertEquals(res.status_int, 400) + self.assertEqual(res.status_int, 400) self.assertEqual(0, len(backend_requests)) expected = 'is deprecated' self.assertTrue(expected in res.body, '%r did not include %r' % ( res.body, expected)) return - self.assertEquals(res.status_int, 201) + self.assertEqual(res.status_int, 201) self.assertEqual( expected_policy.object_ring.replicas, len(backend_requests)) @@ -7343,7 +7343,7 @@ class TestContainerController(unittest.TestCase): self.app.update_request(req) res = controller.PUT(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 201, 201, 201), 201, missing_container=True) test_status_map((200, 201, 201, 500), 201, missing_container=True) @@ -7403,7 +7403,7 @@ class TestContainerController(unittest.TestCase): self.app.update_request(req) res = controller.PUT(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) self.app.account_autocreate = True calls = [] @@ -7442,7 +7442,7 @@ class TestContainerController(unittest.TestCase): self.app.update_request(req) res = controller.POST(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((200, 201, 201, 201), 201, missing_container=True) test_status_map((200, 201, 201, 500), 201, missing_container=True) @@ -7525,28 +7525,28 @@ class TestContainerController(unittest.TestCase): environ={'REQUEST_METHOD': meth}) self.app.update_request(req) resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) set_http_connect(404, 404, 404, 200, 200, 200) # Make sure it is a blank request wthout env caching req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': meth}) resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(503, 404, 404) # Make sure it is a blank request wthout env caching req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': meth}) resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) set_http_connect(503, 404, raise_exc=True) # Make sure it is a blank request wthout env caching req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': meth}) resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) for dev in self.app.account_ring.devs: set_node_errors(self.app, dev, @@ -7557,7 +7557,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': meth}) resp = getattr(controller, meth)(req) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_put_locking(self): @@ -7583,7 +7583,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'}) self.app.update_request(req) res = controller.PUT(req) - self.assertEquals(res.status_int, 201) + self.assertEqual(res.status_int, 201) def test_error_limiting(self): with save_globals(): @@ -7594,7 +7594,7 @@ class TestContainerController(unittest.TestCase): self.assert_status_map(controller.HEAD, (200, 503, 200, 200), 200, missing_container=False) - self.assertEquals( + self.assertEqual( node_error_count(controller.app, container_ring.devs[0]), 2) self.assertTrue( node_last_error(controller.app, container_ring.devs[0]) @@ -7602,7 +7602,7 @@ class TestContainerController(unittest.TestCase): for _junk in range(self.app.error_suppression_limit): self.assert_status_map(controller.HEAD, (200, 503, 503, 503), 503) - self.assertEquals( + self.assertEqual( node_error_count(controller.app, container_ring.devs[0]), self.app.error_suppression_limit + 1) self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 503) @@ -7705,7 +7705,7 @@ class TestContainerController(unittest.TestCase): headers={test_header: test_value}) self.app.update_request(req) getattr(controller, method)(req) - self.assertEquals(test_errors, []) + self.assertEqual(test_errors, []) def test_PUT_bad_metadata(self): self.bad_metadata_helper('PUT') @@ -7720,7 +7720,7 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, @@ -7728,7 +7728,7 @@ class TestContainerController(unittest.TestCase): ('a' * constraints.MAX_META_NAME_LENGTH): 'v'}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank( '/v1/a/c', environ={'REQUEST_METHOD': method}, @@ -7736,7 +7736,7 @@ class TestContainerController(unittest.TestCase): ('a' * (constraints.MAX_META_NAME_LENGTH + 1)): 'v'}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, @@ -7744,14 +7744,14 @@ class TestContainerController(unittest.TestCase): 'a' * constraints.MAX_META_VALUE_LENGTH}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, headers={'X-Container-Meta-Too-Long': 'a' * (constraints.MAX_META_VALUE_LENGTH + 1)}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) headers = {} @@ -7761,7 +7761,7 @@ class TestContainerController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) headers = {} for x in range(constraints.MAX_META_COUNT + 1): @@ -7770,7 +7770,7 @@ class TestContainerController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) headers = {} @@ -7789,7 +7789,7 @@ class TestContainerController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) headers['X-Container-Meta-a'] = \ 'a' * (constraints.MAX_META_OVERALL_SIZE - size) @@ -7797,7 +7797,7 @@ class TestContainerController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_POST_calls_clean_acl(self): called = [False] @@ -7863,10 +7863,10 @@ class TestContainerController(unittest.TestCase): req = Request.blank('/v1/a/c') self.app.update_request(req) res = controller.GET(req) - self.assertEquals(res.status_int, 204) - self.assertEquals( + self.assertEqual(res.status_int, 204) + self.assertEqual( res.environ['swift.container/a/c']['status'], 204) - self.assertEquals(res.content_length, 0) + self.assertEqual(res.content_length, 0) self.assertTrue('transfer-encoding' not in res.headers) def test_GET_calls_authorize(self): @@ -7883,7 +7883,7 @@ class TestContainerController(unittest.TestCase): req.environ['swift.authorize'] = authorize self.app.update_request(req) res = controller.GET(req) - self.assertEquals(res.environ['swift.container/a/c']['status'], 201) + self.assertEqual(res.environ['swift.container/a/c']['status'], 201) self.assertTrue(called[0]) def test_HEAD_calls_authorize(self): @@ -8014,7 +8014,7 @@ class TestContainerController(unittest.TestCase): headers={'Origin': 'http://foo.com', 'Access-Control-Request-Method': 'GET'}) resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) def my_empty_origin_container_info(*args): return {'cors': {'allow_origin': None}} @@ -8025,7 +8025,7 @@ class TestContainerController(unittest.TestCase): headers={'Origin': 'http://foo.com', 'Access-Control-Request-Method': 'GET'}) resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) def my_container_info(*args): return { @@ -8042,39 +8042,39 @@ class TestContainerController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals( + self.assertEqual(200, resp.status_int) + self.assertEqual( 'https://foo.bar', resp.headers['access-control-allow-origin']) for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['access-control-allow-methods']) - self.assertEquals( + self.assertEqual( len(resp.headers['access-control-allow-methods'].split(', ')), 6) - self.assertEquals('999', resp.headers['access-control-max-age']) + self.assertEqual('999', resp.headers['access-control-max-age']) req = Request.blank( '/v1/a/c', {'REQUEST_METHOD': 'OPTIONS'}, headers={'Origin': 'https://foo.bar'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) req = Request.blank('/v1/a/c', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 6) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 6) req = Request.blank( '/v1/a/c', {'REQUEST_METHOD': 'OPTIONS'}, headers={'Origin': 'http://foo.bar', 'Access-Control-Request-Method': 'GET'}) resp = controller.OPTIONS(req) - self.assertEquals(401, resp.status_int) + self.assertEqual(401, resp.status_int) req = Request.blank( '/v1/a/c', {'REQUEST_METHOD': 'OPTIONS'}, @@ -8082,7 +8082,7 @@ class TestContainerController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) controller.app.cors_allow_origin = ['http://foo.bar', ] resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) def my_container_info_wildcard(*args): return { @@ -8099,15 +8099,15 @@ class TestContainerController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals('*', resp.headers['access-control-allow-origin']) + self.assertEqual(200, resp.status_int) + self.assertEqual('*', resp.headers['access-control-allow-origin']) for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['access-control-allow-methods']) - self.assertEquals( + self.assertEqual( len(resp.headers['access-control-allow-methods'].split(', ')), 6) - self.assertEquals('999', resp.headers['access-control-max-age']) + self.assertEqual('999', resp.headers['access-control-max-age']) req = Request.blank( '/v1/a/c/o.jpg', @@ -8119,8 +8119,8 @@ class TestContainerController(unittest.TestCase): ) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) - self.assertEquals( + self.assertEqual(200, resp.status_int) + self.assertEqual( sortHeaderNames('x-foo, x-bar, x-auth-token'), sortHeaderNames(resp.headers['access-control-allow-headers'])) @@ -8149,12 +8149,12 @@ class TestContainerController(unittest.TestCase): resp = cors_validation(containerGET)(controller, req) - self.assertEquals(200, resp.status_int) - self.assertEquals('http://foo.bar', - resp.headers['access-control-allow-origin']) - self.assertEquals('red', resp.headers['x-container-meta-color']) + self.assertEqual(200, resp.status_int) + self.assertEqual('http://foo.bar', + resp.headers['access-control-allow-origin']) + self.assertEqual('red', resp.headers['x-container-meta-color']) # X-Super-Secret is in the response, but not "exposed" - self.assertEquals('hush', resp.headers['x-super-secret']) + self.assertEqual('hush', resp.headers['x-super-secret']) self.assertTrue('access-control-expose-headers' in resp.headers) exposed = set( h.strip() for h in @@ -8163,7 +8163,7 @@ class TestContainerController(unittest.TestCase): 'content-type', 'expires', 'last-modified', 'pragma', 'etag', 'x-timestamp', 'x-trans-id', 'x-container-meta-color']) - self.assertEquals(expected_exposed, exposed) + self.assertEqual(expected_exposed, exposed) def _gather_x_account_headers(self, controller_call, req, *connect_args, **kwargs): @@ -8350,18 +8350,18 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/v1/a', {}, headers=headers) self.app.update_request(req) res = method(req) - self.assertEquals(res.status_int, expected) + self.assertEqual(res.status_int, expected) if env_expected: - self.assertEquals(res.environ['swift.account/a']['status'], - env_expected) + self.assertEqual(res.environ['swift.account/a']['status'], + env_expected) set_http_connect(*statuses) req = Request.blank('/v1/a/', {}) self.app.update_request(req) res = method(req) - self.assertEquals(res.status_int, expected) + self.assertEqual(res.status_int, expected) if env_expected: - self.assertEquals(res.environ['swift.account/a']['status'], - env_expected) + self.assertEqual(res.environ['swift.account/a']['status'], + env_expected) def test_OPTIONS(self): with save_globals(): @@ -8370,11 +8370,11 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/v1/account', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST HEAD'.split(): self.assertTrue( verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 4) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 4) # Test a CORS OPTIONS request (i.e. including Origin and # Access-Control-Request-Method headers) @@ -8386,22 +8386,22 @@ class TestAccountController(unittest.TestCase): 'Access-Control-Request-Method': 'GET'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST HEAD'.split(): self.assertTrue( verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 4) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 4) self.app.allow_account_management = True controller = proxy_server.AccountController(self.app, 'account') req = Request.blank('/v1/account', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = controller.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split(): self.assertTrue( verb in resp.headers['Allow']) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 6) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 6) def test_GET(self): with save_globals(): @@ -8542,7 +8542,7 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) self.app.update_request(req) resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_other_socket_error(self): self.app.account_ring.get_nodes('account') @@ -8553,7 +8553,7 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'}) self.app.update_request(req) resp = controller.HEAD(req) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) def test_response_get_accept_ranges_header(self): with save_globals(): @@ -8588,7 +8588,7 @@ class TestAccountController(unittest.TestCase): self.app.update_request(req) res = controller.PUT(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((201, 201, 201), 405) self.app.allow_account_management = True test_status_map((201, 201, 201), 201) @@ -8659,7 +8659,7 @@ class TestAccountController(unittest.TestCase): headers={test_header: test_value}) self.app.update_request(req) getattr(controller, method)(req) - self.assertEquals(test_errors, []) + self.assertEqual(test_errors, []) def test_PUT_bad_metadata(self): self.bad_metadata_helper('PUT') @@ -8675,7 +8675,7 @@ class TestAccountController(unittest.TestCase): req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, @@ -8683,7 +8683,7 @@ class TestAccountController(unittest.TestCase): ('a' * constraints.MAX_META_NAME_LENGTH): 'v'}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank( '/v1/a/c', environ={'REQUEST_METHOD': method}, @@ -8691,7 +8691,7 @@ class TestAccountController(unittest.TestCase): ('a' * (constraints.MAX_META_NAME_LENGTH + 1)): 'v'}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, @@ -8699,14 +8699,14 @@ class TestAccountController(unittest.TestCase): 'a' * constraints.MAX_META_VALUE_LENGTH}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method}, headers={'X-Account-Meta-Too-Long': 'a' * (constraints.MAX_META_VALUE_LENGTH + 1)}) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) headers = {} @@ -8716,7 +8716,7 @@ class TestAccountController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) headers = {} for x in range(constraints.MAX_META_COUNT + 1): @@ -8725,7 +8725,7 @@ class TestAccountController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) set_http_connect(201, 201, 201) headers = {} @@ -8744,7 +8744,7 @@ class TestAccountController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) set_http_connect(201, 201, 201) headers['X-Account-Meta-a'] = \ 'a' * (constraints.MAX_META_OVERALL_SIZE - size) @@ -8752,7 +8752,7 @@ class TestAccountController(unittest.TestCase): headers=headers) self.app.update_request(req) resp = getattr(controller, method)(req) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_DELETE(self): with save_globals(): @@ -8766,7 +8766,7 @@ class TestAccountController(unittest.TestCase): self.app.update_request(req) res = controller.DELETE(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((201, 201, 201), 405) self.app.allow_account_management = True test_status_map((201, 201, 201), 201) @@ -8790,7 +8790,7 @@ class TestAccountController(unittest.TestCase): self.app.update_request(req) res = controller.DELETE(req) expected = str(expected) - self.assertEquals(res.status[:len(expected)], expected) + self.assertEqual(res.status[:len(expected)], expected) test_status_map((201, 201, 201), 400) self.app.allow_account_management = True test_status_map((201, 201, 201), 400) From 1952451ed7e97e2bfa3b11dfa5000cc39f712084 Mon Sep 17 00:00:00 2001 From: Zhao Lei Date: Fri, 7 Aug 2015 21:57:08 +0800 Subject: [PATCH 26/70] Fix a spelling typo in comment s/automaticaly/automatically/ for swift/proxy/controllers/obj.py Change-Id: I405441383739637e0b13746d6f4e2bad1c874b4d Signed-off-by: Zhao Lei --- swift/proxy/controllers/obj.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index e86b35debe..a1563c81db 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -834,7 +834,7 @@ class BaseObjectController(Controller): data_source = iter(lambda: reader(self.app.client_chunk_size), '') update_response = lambda req, resp: resp - # check if object is set to be automaticaly deleted (i.e. expired) + # check if object is set to be automatically deleted (i.e. expired) req, delete_at_container, delete_at_part, \ delete_at_nodes = self._config_obj_expiration(req) From 9456af35a2832aeafc4a4e78ebd0d4142ead71cd Mon Sep 17 00:00:00 2001 From: janonymous Date: Thu, 6 Aug 2015 00:55:36 +0530 Subject: [PATCH 27/70] pep8 fix: assertEquals -> assertEqual assertEquals is deprecated in py3,changes in dir: *test/unit/obj/* *test/unit/test_locale/* Change-Id: I3dd0c1107165ac529f1cd967363e5cf408a1d02b --- test/unit/obj/test_auditor.py | 88 +-- test/unit/obj/test_diskfile.py | 90 +-- test/unit/obj/test_reconstructor.py | 2 +- test/unit/obj/test_replicator.py | 362 +++++------ test/unit/obj/test_server.py | 918 +++++++++++++-------------- test/unit/obj/test_ssync_receiver.py | 14 +- test/unit/obj/test_ssync_sender.py | 42 +- test/unit/obj/test_updater.py | 14 +- test/unit/test_locale/test_locale.py | 2 +- 9 files changed, 766 insertions(+), 766 deletions(-) diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 3454d3cad2..eb36edf500 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -84,8 +84,8 @@ class TestAuditor(unittest.TestCase): def test_worker_conf_parms(self): def check_common_defaults(): - self.assertEquals(auditor_worker.max_bytes_per_second, 10000000) - self.assertEquals(auditor_worker.log_time, 3600) + self.assertEqual(auditor_worker.max_bytes_per_second, 10000000) + self.assertEqual(auditor_worker.log_time, 3600) # test default values conf = dict( @@ -95,9 +95,9 @@ class TestAuditor(unittest.TestCase): auditor_worker = auditor.AuditorWorker(conf, self.logger, self.rcache, self.devices) check_common_defaults() - self.assertEquals(auditor_worker.diskfile_mgr.disk_chunk_size, 65536) - self.assertEquals(auditor_worker.max_files_per_second, 20) - self.assertEquals(auditor_worker.zero_byte_only_at_fps, 0) + self.assertEqual(auditor_worker.diskfile_mgr.disk_chunk_size, 65536) + self.assertEqual(auditor_worker.max_files_per_second, 20) + self.assertEqual(auditor_worker.zero_byte_only_at_fps, 0) # test specified audit value overrides conf.update({'disk_chunk_size': 4096}) @@ -105,9 +105,9 @@ class TestAuditor(unittest.TestCase): self.rcache, self.devices, zero_byte_only_at_fps=50) check_common_defaults() - self.assertEquals(auditor_worker.diskfile_mgr.disk_chunk_size, 4096) - self.assertEquals(auditor_worker.max_files_per_second, 50) - self.assertEquals(auditor_worker.zero_byte_only_at_fps, 50) + self.assertEqual(auditor_worker.diskfile_mgr.disk_chunk_size, 4096) + self.assertEqual(auditor_worker.max_files_per_second, 50) + self.assertEqual(auditor_worker.zero_byte_only_at_fps, 50) def test_object_audit_extra_data(self): def run_tests(disk_file): @@ -131,15 +131,15 @@ class TestAuditor(unittest.TestCase): auditor_worker.object_audit( AuditLocation(disk_file._datadir, 'sda', '0', policy=POLICIES.legacy)) - self.assertEquals(auditor_worker.quarantines, pre_quarantines) + self.assertEqual(auditor_worker.quarantines, pre_quarantines) os.write(writer._fd, 'extra_data') auditor_worker.object_audit( AuditLocation(disk_file._datadir, 'sda', '0', policy=POLICIES.legacy)) - self.assertEquals(auditor_worker.quarantines, - pre_quarantines + 1) + self.assertEqual(auditor_worker.quarantines, + pre_quarantines + 1) run_tests(self.disk_file) run_tests(self.disk_file_p1) @@ -168,7 +168,7 @@ class TestAuditor(unittest.TestCase): auditor_worker.object_audit( AuditLocation(self.disk_file._datadir, 'sda', '0', policy=POLICIES.legacy)) - self.assertEquals(auditor_worker.quarantines, pre_quarantines) + self.assertEqual(auditor_worker.quarantines, pre_quarantines) etag = md5() etag.update('1' + '0' * 1023) etag = etag.hexdigest() @@ -181,7 +181,7 @@ class TestAuditor(unittest.TestCase): auditor_worker.object_audit( AuditLocation(self.disk_file._datadir, 'sda', '0', policy=POLICIES.legacy)) - self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1) + self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1) def test_object_audit_no_meta(self): timestamp = str(normalize_timestamp(time.time())) @@ -197,7 +197,7 @@ class TestAuditor(unittest.TestCase): auditor_worker.object_audit( AuditLocation(self.disk_file._datadir, 'sda', '0', policy=POLICIES.legacy)) - self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1) + self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1) def test_object_audit_will_not_swallow_errors_in_tests(self): timestamp = str(normalize_timestamp(time.time())) @@ -232,7 +232,7 @@ class TestAuditor(unittest.TestCase): auditor_worker.failsafe_object_audit( AuditLocation(os.path.dirname(path), 'sda', '0', policy=POLICIES.legacy)) - self.assertEquals(auditor_worker.errors, 1) + self.assertEqual(auditor_worker.errors, 1) def test_generic_exception_handling(self): auditor_worker = auditor.AuditorWorker(self.conf, self.logger, @@ -256,7 +256,7 @@ class TestAuditor(unittest.TestCase): with mock.patch('swift.obj.diskfile.DiskFileManager.diskfile_cls', lambda *_: 1 / 0): auditor_worker.audit_all_objects() - self.assertEquals(auditor_worker.errors, pre_errors + 1) + self.assertEqual(auditor_worker.errors, pre_errors + 1) def test_object_run_once_pass(self): auditor_worker = auditor.AuditorWorker(self.conf, self.logger, @@ -285,10 +285,10 @@ class TestAuditor(unittest.TestCase): write_file(self.disk_file_p1) auditor_worker.audit_all_objects() - self.assertEquals(auditor_worker.quarantines, pre_quarantines) + self.assertEqual(auditor_worker.quarantines, pre_quarantines) # 1 object per policy falls into 1024 bucket - self.assertEquals(auditor_worker.stats_buckets[1024], 2) - self.assertEquals(auditor_worker.stats_buckets[10240], 0) + self.assertEqual(auditor_worker.stats_buckets[1024], 2) + self.assertEqual(auditor_worker.stats_buckets[10240], 0) # pick up some additional code coverage, large file data = '0' * 1024 * 1024 @@ -304,22 +304,22 @@ class TestAuditor(unittest.TestCase): } writer.put(metadata) auditor_worker.audit_all_objects(device_dirs=['sda', 'sdb']) - self.assertEquals(auditor_worker.quarantines, pre_quarantines) + self.assertEqual(auditor_worker.quarantines, pre_quarantines) # still have the 1024 byte object left in policy-1 (plus the # stats from the original 2) - self.assertEquals(auditor_worker.stats_buckets[1024], 3) - self.assertEquals(auditor_worker.stats_buckets[10240], 0) + self.assertEqual(auditor_worker.stats_buckets[1024], 3) + self.assertEqual(auditor_worker.stats_buckets[10240], 0) # and then policy-0 disk_file was re-written as a larger object - self.assertEquals(auditor_worker.stats_buckets['OVER'], 1) + self.assertEqual(auditor_worker.stats_buckets['OVER'], 1) # pick up even more additional code coverage, misc paths auditor_worker.log_time = -1 auditor_worker.stats_sizes = [] auditor_worker.audit_all_objects(device_dirs=['sda', 'sdb']) - self.assertEquals(auditor_worker.quarantines, pre_quarantines) - self.assertEquals(auditor_worker.stats_buckets[1024], 3) - self.assertEquals(auditor_worker.stats_buckets[10240], 0) - self.assertEquals(auditor_worker.stats_buckets['OVER'], 1) + self.assertEqual(auditor_worker.quarantines, pre_quarantines) + self.assertEqual(auditor_worker.stats_buckets[1024], 3) + self.assertEqual(auditor_worker.stats_buckets[10240], 0) + self.assertEqual(auditor_worker.stats_buckets['OVER'], 1) def test_object_run_logging(self): logger = FakeLogger() @@ -360,7 +360,7 @@ class TestAuditor(unittest.TestCase): writer.put(metadata) os.write(writer._fd, 'extra_data') auditor_worker.audit_all_objects() - self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1) + self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1) def test_object_run_once_multi_devices(self): auditor_worker = auditor.AuditorWorker(self.conf, self.logger, @@ -398,7 +398,7 @@ class TestAuditor(unittest.TestCase): writer.put(metadata) os.write(writer._fd, 'extra_data') auditor_worker.audit_all_objects() - self.assertEquals(auditor_worker.quarantines, pre_quarantines + 1) + self.assertEqual(auditor_worker.quarantines, pre_quarantines + 1) def test_object_run_fast_track_non_zero(self): self.auditor = auditor.ObjectAuditor(self.conf) @@ -611,14 +611,14 @@ class TestAuditor(unittest.TestCase): self.assertRaises(StopForever, my_auditor.run_forever, zero_byte_fps=50) - self.assertEquals(mocker.check_kwargs['zero_byte_fps'], 50) - self.assertEquals(mocker.fork_called, 0) + self.assertEqual(mocker.check_kwargs['zero_byte_fps'], 50) + self.assertEqual(mocker.fork_called, 0) self.assertRaises(SystemExit, my_auditor.run_once) - self.assertEquals(mocker.fork_called, 1) - self.assertEquals(mocker.check_kwargs['zero_byte_fps'], 89) - self.assertEquals(mocker.check_device_dir, []) - self.assertEquals(mocker.check_args, ()) + self.assertEqual(mocker.fork_called, 1) + self.assertEqual(mocker.check_kwargs['zero_byte_fps'], 89) + self.assertEqual(mocker.check_device_dir, []) + self.assertEqual(mocker.check_args, ()) device_list = ['sd%s' % i for i in string.ascii_letters[2:10]] device_string = ','.join(device_list) @@ -627,9 +627,9 @@ class TestAuditor(unittest.TestCase): mocker.fork_called = 0 self.assertRaises(SystemExit, my_auditor.run_once, devices=device_string_bogus) - self.assertEquals(mocker.fork_called, 1) - self.assertEquals(mocker.check_kwargs['zero_byte_fps'], 89) - self.assertEquals(sorted(mocker.check_device_dir), device_list) + self.assertEqual(mocker.fork_called, 1) + self.assertEqual(mocker.check_kwargs['zero_byte_fps'], 89) + self.assertEqual(sorted(mocker.check_device_dir), device_list) mocker.master = 1 @@ -638,8 +638,8 @@ class TestAuditor(unittest.TestCase): # Fork is called 2 times since the zbf process is forked just # once before self._sleep() is called and StopForever is raised # Also wait is called just once before StopForever is raised - self.assertEquals(mocker.fork_called, 2) - self.assertEquals(mocker.wait_called, 1) + self.assertEqual(mocker.fork_called, 2) + self.assertEqual(mocker.wait_called, 1) my_auditor._sleep = mocker.mock_sleep_continue @@ -650,10 +650,10 @@ class TestAuditor(unittest.TestCase): # Fork is called no. of devices + (no. of devices)/2 + 1 times # since zbf process is forked (no.of devices)/2 + 1 times no_devices = len(os.listdir(self.devices)) - self.assertEquals(mocker.fork_called, no_devices + no_devices / 2 - + 1) - self.assertEquals(mocker.wait_called, no_devices + no_devices / 2 - + 1) + self.assertEqual(mocker.fork_called, no_devices + no_devices / 2 + + 1) + self.assertEqual(mocker.wait_called, no_devices + no_devices / 2 + + 1) finally: os.fork = was_fork diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 8a6ae0bee6..f15187e6cc 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -206,28 +206,28 @@ class TestDiskFileModuleMethods(unittest.TestCase): self.devices, qbit) def test_get_data_dir(self): - self.assertEquals(diskfile.get_data_dir(POLICIES[0]), - diskfile.DATADIR_BASE) - self.assertEquals(diskfile.get_data_dir(POLICIES[1]), - diskfile.DATADIR_BASE + "-1") + self.assertEqual(diskfile.get_data_dir(POLICIES[0]), + diskfile.DATADIR_BASE) + self.assertEqual(diskfile.get_data_dir(POLICIES[1]), + diskfile.DATADIR_BASE + "-1") self.assertRaises(ValueError, diskfile.get_data_dir, 'junk') self.assertRaises(ValueError, diskfile.get_data_dir, 99) def test_get_async_dir(self): - self.assertEquals(diskfile.get_async_dir(POLICIES[0]), - diskfile.ASYNCDIR_BASE) - self.assertEquals(diskfile.get_async_dir(POLICIES[1]), - diskfile.ASYNCDIR_BASE + "-1") + self.assertEqual(diskfile.get_async_dir(POLICIES[0]), + diskfile.ASYNCDIR_BASE) + self.assertEqual(diskfile.get_async_dir(POLICIES[1]), + diskfile.ASYNCDIR_BASE + "-1") self.assertRaises(ValueError, diskfile.get_async_dir, 'junk') self.assertRaises(ValueError, diskfile.get_async_dir, 99) def test_get_tmp_dir(self): - self.assertEquals(diskfile.get_tmp_dir(POLICIES[0]), - diskfile.TMP_BASE) - self.assertEquals(diskfile.get_tmp_dir(POLICIES[1]), - diskfile.TMP_BASE + "-1") + self.assertEqual(diskfile.get_tmp_dir(POLICIES[0]), + diskfile.TMP_BASE) + self.assertEqual(diskfile.get_tmp_dir(POLICIES[1]), + diskfile.TMP_BASE + "-1") self.assertRaises(ValueError, diskfile.get_tmp_dir, 'junk') self.assertRaises(ValueError, diskfile.get_tmp_dir, 99) @@ -1982,7 +1982,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._get_open_disk_file(ts=self.ts().internal, extra_metadata=orig_metadata) with df.open(): - self.assertEquals('1024', df._metadata['Content-Length']) + self.assertEqual('1024', df._metadata['Content-Length']) # write some new metadata (fast POST, don't send orig meta, at t0+1) df = self._simple_get_diskfile() df.write_metadata({'X-Timestamp': self.ts().internal, @@ -1990,11 +1990,11 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._simple_get_diskfile() with df.open(): # non-fast-post updateable keys are preserved - self.assertEquals('text/garbage', df._metadata['Content-Type']) + self.assertEqual('text/garbage', df._metadata['Content-Type']) # original fast-post updateable keys are removed self.assertTrue('X-Object-Meta-Key1' not in df._metadata) # new fast-post updateable keys are added - self.assertEquals('Value2', df._metadata['X-Object-Meta-Key2']) + self.assertEqual('Value2', df._metadata['X-Object-Meta-Key2']) def test_disk_file_preserves_sysmeta(self): # build an object with some meta (at t0) @@ -2003,7 +2003,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._get_open_disk_file(ts=self.ts().internal, extra_metadata=orig_metadata) with df.open(): - self.assertEquals('1024', df._metadata['Content-Length']) + self.assertEqual('1024', df._metadata['Content-Length']) # write some new metadata (fast POST, don't send orig meta, at t0+1s) df = self._simple_get_diskfile() df.write_metadata({'X-Timestamp': self.ts().internal, @@ -2012,9 +2012,9 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._simple_get_diskfile() with df.open(): # non-fast-post updateable keys are preserved - self.assertEquals('text/garbage', df._metadata['Content-Type']) + self.assertEqual('text/garbage', df._metadata['Content-Type']) # original sysmeta keys are preserved - self.assertEquals('Value1', df._metadata['X-Object-Sysmeta-Key1']) + self.assertEqual('Value1', df._metadata['X-Object-Sysmeta-Key1']) def test_disk_file_reader_iter(self): df = self._create_test_file('1234567890') @@ -2037,9 +2037,9 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._create_test_file('1234567890') quarantine_msgs = [] reader = df.reader(_quarantine_hook=quarantine_msgs.append) - self.assertEquals(''.join(reader.app_iter_range(0, None)), - '1234567890') - self.assertEquals(quarantine_msgs, []) + self.assertEqual(''.join(reader.app_iter_range(0, None)), + '1234567890') + self.assertEqual(quarantine_msgs, []) df = self._simple_get_diskfile() with df.open(): reader = df.reader() @@ -2132,7 +2132,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): '5e816ff8b8b8e9a5d355497e5d9e0301\r\n']) value = header + ''.join(it) - self.assertEquals(quarantine_msgs, []) + self.assertEqual(quarantine_msgs, []) parts = map(lambda p: p.get_payload(decode=True), email.message_from_string(value).walk())[1:3] @@ -2571,7 +2571,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'} df.write_metadata(metadata) dl = os.listdir(df._datadir) - self.assertEquals(len(dl), file_count + 1) + self.assertEqual(len(dl), file_count + 1) exp_name = '%s.meta' % timestamp self.assertTrue(exp_name in set(dl)) @@ -2638,8 +2638,8 @@ class DiskFileMixin(BaseDiskFileTestMixin): if policy.policy_type == EC_POLICY: expected = ['%s#2.data' % timestamp, '%s.durable' % timestamp] - self.assertEquals(len(dl), len(expected), - 'Unexpected dir listing %s' % dl) + self.assertEqual(len(dl), len(expected), + 'Unexpected dir listing %s' % dl) self.assertEqual(sorted(expected), sorted(dl)) def test_write_cleanup(self): @@ -2657,8 +2657,8 @@ class DiskFileMixin(BaseDiskFileTestMixin): if policy.policy_type == EC_POLICY: expected = ['%s#2.data' % timestamp_2, '%s.durable' % timestamp_2] - self.assertEquals(len(dl), len(expected), - 'Unexpected dir listing %s' % dl) + self.assertEqual(len(dl), len(expected), + 'Unexpected dir listing %s' % dl) self.assertEqual(sorted(expected), sorted(dl)) def test_commit_fsync(self): @@ -2713,8 +2713,8 @@ class DiskFileMixin(BaseDiskFileTestMixin): expected = ['%s#2.data' % timestamp.internal, '%s.durable' % timestamp.internal] dl = os.listdir(df._datadir) - self.assertEquals(len(dl), len(expected), - 'Unexpected dir listing %s' % dl) + self.assertEqual(len(dl), len(expected), + 'Unexpected dir listing %s' % dl) self.assertEqual(sorted(expected), sorted(dl)) def test_number_calls_to_hash_cleanup_listdir_during_create(self): @@ -2789,7 +2789,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df.delete(ts) exp_name = '%s.ts' % ts.internal dl = os.listdir(df._datadir) - self.assertEquals(len(dl), 1) + self.assertEqual(len(dl), 1) self.assertTrue(exp_name in set(dl), 'Expected file %s missing in %s' % (exp_name, dl)) # cleanup before next policy @@ -2801,7 +2801,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df.delete(ts) exp_name = '%s.ts' % str(Timestamp(ts).internal) dl = os.listdir(df._datadir) - self.assertEquals(len(dl), 1) + self.assertEqual(len(dl), 1) self.assertTrue(exp_name in set(dl)) df = self._simple_get_diskfile() self.assertRaises(DiskFileDeleted, df.open) @@ -2812,7 +2812,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df.delete(ts) exp_name = '%s.ts' % str(Timestamp(ts).internal) dl = os.listdir(df._datadir) - self.assertEquals(len(dl), 1) + self.assertEqual(len(dl), 1) self.assertTrue(exp_name in set(dl)) # it's pickle-format, so removing the last byte is sufficient to # corrupt it @@ -2862,7 +2862,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): for chunk in reader: pass # close is called at the end of the iterator - self.assertEquals(reader._fp, None) + self.assertEqual(reader._fp, None) error_lines = df._logger.get_lines_for_level('error') self.assertEqual(len(error_lines), 1) self.assertTrue('close failure' in error_lines[0]) @@ -2891,7 +2891,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): try: df.open() except DiskFileDeleted as d: - self.assertEquals(d.timestamp, Timestamp(10).internal) + self.assertEqual(d.timestamp, Timestamp(10).internal) else: self.fail("Expected DiskFileDeleted exception") @@ -2907,7 +2907,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): try: df.open() except DiskFileDeleted as d: - self.assertEquals(d.timestamp, Timestamp(8).internal) + self.assertEqual(d.timestamp, Timestamp(8).internal) else: self.fail("Expected DiskFileDeleted exception") @@ -2925,8 +2925,8 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._simple_get_diskfile() with df.open(): self.assertTrue('X-Timestamp' in df._metadata) - self.assertEquals(df._metadata['X-Timestamp'], - Timestamp(10).internal) + self.assertEqual(df._metadata['X-Timestamp'], + Timestamp(10).internal) self.assertTrue('deleted' not in df._metadata) def test_ondisk_search_loop_data_meta_ts(self): @@ -2943,8 +2943,8 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._simple_get_diskfile() with df.open(): self.assertTrue('X-Timestamp' in df._metadata) - self.assertEquals(df._metadata['X-Timestamp'], - Timestamp(10).internal) + self.assertEqual(df._metadata['X-Timestamp'], + Timestamp(10).internal) self.assertTrue('deleted' not in df._metadata) def test_ondisk_search_loop_wayward_files_ignored(self): @@ -2962,8 +2962,8 @@ class DiskFileMixin(BaseDiskFileTestMixin): df = self._simple_get_diskfile() with df.open(): self.assertTrue('X-Timestamp' in df._metadata) - self.assertEquals(df._metadata['X-Timestamp'], - Timestamp(10).internal) + self.assertEqual(df._metadata['X-Timestamp'], + Timestamp(10).internal) self.assertTrue('deleted' not in df._metadata) def test_ondisk_search_loop_listdir_error(self): @@ -3021,7 +3021,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df.delete(ts) exp_name = '%s.ts' % str(Timestamp(ts).internal) dl = os.listdir(df._datadir) - self.assertEquals(len(dl), 1) + self.assertEqual(len(dl), 1) self.assertTrue(exp_name in set(dl)) df = self._simple_get_diskfile() exc = None @@ -3053,7 +3053,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): df.delete(ts) exp_name = '%s.ts' % str(Timestamp(ts).internal) dl = os.listdir(df._datadir) - self.assertEquals(len(dl), 1) + self.assertEqual(len(dl), 1) self.assertTrue(exp_name in set(dl)) df = self._simple_get_diskfile() exc = None @@ -3086,7 +3086,7 @@ class DiskFileMixin(BaseDiskFileTestMixin): self.fail("OSError raised when it should have been swallowed") exp_name = '%s.ts' % str(Timestamp(ts).internal) dl = os.listdir(df._datadir) - self.assertEquals(len(dl), file_count + 1) + self.assertEqual(len(dl), file_count + 1) self.assertTrue(exp_name in set(dl)) def _system_can_zero_copy(self): @@ -3908,7 +3908,7 @@ class TestSuffixHashes(unittest.TestCase): df_mgr.hash_cleanup_listdir, path) return files = df_mgr.hash_cleanup_listdir('/whatever') - self.assertEquals(files, output_files) + self.assertEqual(files, output_files) # hash_cleanup_listdir tests - behaviors diff --git a/test/unit/obj/test_reconstructor.py b/test/unit/obj/test_reconstructor.py index 3a8a80b261..7aa5ebc60d 100755 --- a/test/unit/obj/test_reconstructor.py +++ b/test/unit/obj/test_reconstructor.py @@ -784,7 +784,7 @@ class TestGlobalSetupObjectReconstructor(unittest.TestCase): error_lines = self.logger.get_lines_for_level('error') self.assertEqual(len(error_lines), 1) log_args, log_kwargs = self.logger.log_dict['error'][0] - self.assertEquals(str(log_kwargs['exc_info'][1]), 'Ow!') + self.assertEqual(str(log_kwargs['exc_info'][1]), 'Ow!') def test_removes_zbf(self): # After running xfs_repair, a partition directory could become a diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index fd9e1ab6ca..526ff0b7f2 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -314,36 +314,36 @@ class TestObjectReplicator(unittest.TestCase): jobs_by_pol_part = {} for job in jobs: jobs_by_pol_part[str(int(job['policy'])) + job['partition']] = job - self.assertEquals(len(jobs_to_delete), 2) + self.assertEqual(len(jobs_to_delete), 2) self.assertTrue('1', jobs_to_delete[0]['partition']) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['00']['nodes']], [1, 2]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['01']['nodes']], [1, 2, 3]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['02']['nodes']], [2, 3]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['03']['nodes']], [3, 1]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['10']['nodes']], [1, 2]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['11']['nodes']], [1, 2, 3]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['12']['nodes']], [2, 3]) - self.assertEquals( + self.assertEqual( [node['id'] for node in jobs_by_pol_part['13']['nodes']], [3, 1]) for part in ['00', '01', '02', '03']: for node in jobs_by_pol_part[part]['nodes']: - self.assertEquals(node['device'], 'sda') - self.assertEquals(jobs_by_pol_part[part]['path'], - os.path.join(self.objects, part[1:])) + self.assertEqual(node['device'], 'sda') + self.assertEqual(jobs_by_pol_part[part]['path'], + os.path.join(self.objects, part[1:])) for part in ['10', '11', '12', '13']: for node in jobs_by_pol_part[part]['nodes']: - self.assertEquals(node['device'], 'sda') - self.assertEquals(jobs_by_pol_part[part]['path'], - os.path.join(self.objects_1, part[1:])) + self.assertEqual(node['device'], 'sda') + self.assertEqual(jobs_by_pol_part[part]['path'], + os.path.join(self.objects_1, part[1:])) @mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l) def test_collect_jobs_multi_disk(self, mock_shuffle): @@ -373,7 +373,7 @@ class TestObjectReplicator(unittest.TestCase): self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls) jobs_to_delete = [j for j in jobs if j['delete']] - self.assertEquals(len(jobs_to_delete), 4) + self.assertEqual(len(jobs_to_delete), 4) self.assertEqual([ '1', '2', # policy 0; 1 not on sda, 2 not on sdb '1', '2', # policy 1; 1 not on sda, 2 not on sdb @@ -387,64 +387,64 @@ class TestObjectReplicator(unittest.TestCase): str(int(job['policy'])) + job['partition'] + job['device'] ] = job - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['00sda']['nodes']], - [1, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['00sdb']['nodes']], - [0, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['01sda']['nodes']], - [1, 2, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['01sdb']['nodes']], - [2, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['02sda']['nodes']], - [2, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['02sdb']['nodes']], - [2, 3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['03sda']['nodes']], - [3, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['03sdb']['nodes']], - [3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['10sda']['nodes']], - [1, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['10sdb']['nodes']], - [0, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['11sda']['nodes']], - [1, 2, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['11sdb']['nodes']], - [2, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['12sda']['nodes']], - [2, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['12sdb']['nodes']], - [2, 3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['13sda']['nodes']], - [3, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['13sdb']['nodes']], - [3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['00sda']['nodes']], + [1, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['00sdb']['nodes']], + [0, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['01sda']['nodes']], + [1, 2, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['01sdb']['nodes']], + [2, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['02sda']['nodes']], + [2, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['02sdb']['nodes']], + [2, 3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['03sda']['nodes']], + [3, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['03sdb']['nodes']], + [3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['10sda']['nodes']], + [1, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['10sdb']['nodes']], + [0, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['11sda']['nodes']], + [1, 2, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['11sdb']['nodes']], + [2, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['12sda']['nodes']], + [2, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['12sdb']['nodes']], + [2, 3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['13sda']['nodes']], + [3, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['13sdb']['nodes']], + [3, 0]) for part in ['00', '01', '02', '03']: - self.assertEquals(jobs_by_pol_part_dev[part + 'sda']['path'], - os.path.join(self.objects, part[1:])) - self.assertEquals(jobs_by_pol_part_dev[part + 'sdb']['path'], - os.path.join(objects_sdb, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sda']['path'], + os.path.join(self.objects, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdb']['path'], + os.path.join(objects_sdb, part[1:])) for part in ['10', '11', '12', '13']: - self.assertEquals(jobs_by_pol_part_dev[part + 'sda']['path'], - os.path.join(self.objects_1, part[1:])) - self.assertEquals(jobs_by_pol_part_dev[part + 'sdb']['path'], - os.path.join(objects_1_sdb, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sda']['path'], + os.path.join(self.objects_1, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdb']['path'], + os.path.join(objects_1_sdb, part[1:])) @mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l) def test_collect_jobs_multi_disk_diff_ports_normal(self, mock_shuffle): @@ -480,7 +480,7 @@ class TestObjectReplicator(unittest.TestCase): self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls) jobs_to_delete = [j for j in jobs if j['delete']] - self.assertEquals(len(jobs_to_delete), 2) + self.assertEqual(len(jobs_to_delete), 2) self.assertEqual([ '3', # policy 0; 3 not on sdc '3', # policy 1; 3 not on sdc @@ -494,36 +494,36 @@ class TestObjectReplicator(unittest.TestCase): str(int(job['policy'])) + job['partition'] + job['device'] ] = job - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['00sdc']['nodes']], - [0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['01sdc']['nodes']], - [1, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['02sdc']['nodes']], - [3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['03sdc']['nodes']], - [3, 0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['10sdc']['nodes']], - [0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['11sdc']['nodes']], - [1, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['12sdc']['nodes']], - [3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['13sdc']['nodes']], - [3, 0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['00sdc']['nodes']], + [0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['01sdc']['nodes']], + [1, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['02sdc']['nodes']], + [3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['03sdc']['nodes']], + [3, 0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['10sdc']['nodes']], + [0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['11sdc']['nodes']], + [1, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['12sdc']['nodes']], + [3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['13sdc']['nodes']], + [3, 0, 1]) for part in ['00', '01', '02', '03']: - self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'], - os.path.join(objects_sdc, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'], + os.path.join(objects_sdc, part[1:])) for part in ['10', '11', '12', '13']: - self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'], - os.path.join(objects_1_sdc, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'], + os.path.join(objects_1_sdc, part[1:])) @mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l) def test_collect_jobs_multi_disk_servers_per_port(self, mock_shuffle): @@ -561,7 +561,7 @@ class TestObjectReplicator(unittest.TestCase): self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls) jobs_to_delete = [j for j in jobs if j['delete']] - self.assertEquals(len(jobs_to_delete), 4) + self.assertEqual(len(jobs_to_delete), 4) self.assertEqual([ '3', '0', # policy 0; 3 not on sdc, 0 not on sdd '3', '0', # policy 1; 3 not on sdc, 0 not on sdd @@ -575,70 +575,70 @@ class TestObjectReplicator(unittest.TestCase): str(int(job['policy'])) + job['partition'] + job['device'] ] = job - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['00sdc']['nodes']], - [0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['00sdd']['nodes']], - [0, 1, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['01sdc']['nodes']], - [1, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['01sdd']['nodes']], - [1, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['02sdc']['nodes']], - [3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['02sdd']['nodes']], - [2, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['03sdc']['nodes']], - [3, 0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['03sdd']['nodes']], - [0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['10sdc']['nodes']], - [0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['10sdd']['nodes']], - [0, 1, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['11sdc']['nodes']], - [1, 3]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['11sdd']['nodes']], - [1, 2]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['12sdc']['nodes']], - [3, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['12sdd']['nodes']], - [2, 0]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['13sdc']['nodes']], - [3, 0, 1]) - self.assertEquals([node['id'] - for node in jobs_by_pol_part_dev['13sdd']['nodes']], - [0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['00sdc']['nodes']], + [0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['00sdd']['nodes']], + [0, 1, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['01sdc']['nodes']], + [1, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['01sdd']['nodes']], + [1, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['02sdc']['nodes']], + [3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['02sdd']['nodes']], + [2, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['03sdc']['nodes']], + [3, 0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['03sdd']['nodes']], + [0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['10sdc']['nodes']], + [0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['10sdd']['nodes']], + [0, 1, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['11sdc']['nodes']], + [1, 3]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['11sdd']['nodes']], + [1, 2]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['12sdc']['nodes']], + [3, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['12sdd']['nodes']], + [2, 0]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['13sdc']['nodes']], + [3, 0, 1]) + self.assertEqual([node['id'] + for node in jobs_by_pol_part_dev['13sdd']['nodes']], + [0, 1]) for part in ['00', '01', '02', '03']: - self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'], - os.path.join(objects_sdc, part[1:])) - self.assertEquals(jobs_by_pol_part_dev[part + 'sdd']['path'], - os.path.join(objects_sdd, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'], + os.path.join(objects_sdc, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdd']['path'], + os.path.join(objects_sdd, part[1:])) for part in ['10', '11', '12', '13']: - self.assertEquals(jobs_by_pol_part_dev[part + 'sdc']['path'], - os.path.join(objects_1_sdc, part[1:])) - self.assertEquals(jobs_by_pol_part_dev[part + 'sdd']['path'], - os.path.join(objects_1_sdd, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'], + os.path.join(objects_1_sdc, part[1:])) + self.assertEqual(jobs_by_pol_part_dev[part + 'sdd']['path'], + os.path.join(objects_1_sdd, part[1:])) def test_collect_jobs_handoffs_first(self): self.replicator.handoffs_first = True jobs = self.replicator.collect_jobs() self.assertTrue(jobs[0]['delete']) - self.assertEquals('1', jobs[0]['partition']) + self.assertEqual('1', jobs[0]['partition']) def test_replicator_skips_bogus_partition_dirs(self): # A directory in the wrong place shouldn't crash the replicator @@ -1269,7 +1269,7 @@ class TestObjectReplicator(unittest.TestCase): self.assertFalse(process_errors) for i, result in [('0', True), ('1', False), ('2', True), ('3', True)]: - self.assertEquals(os.access( + self.assertEqual(os.access( os.path.join(self.objects, i, diskfile.HASH_FILE), os.F_OK), result) @@ -1391,15 +1391,15 @@ class TestObjectReplicator(unittest.TestCase): self.replicator.update(job) self.assertTrue(error in mock_logger.error.call_args[0][0]) self.assertTrue(expect in mock_logger.exception.call_args[0][0]) - self.assertEquals(len(self.replicator.partition_times), 1) - self.assertEquals(mock_http.call_count, len(ring._devs) - 1) + self.assertEqual(len(self.replicator.partition_times), 1) + self.assertEqual(mock_http.call_count, len(ring._devs) - 1) reqs = [] for node in job['nodes']: reqs.append(mock.call(node['ip'], node['port'], node['device'], job['partition'], 'REPLICATE', '', headers=self.headers)) if job['partition'] == '0': - self.assertEquals(self.replicator.suffix_hash, 0) + self.assertEqual(self.replicator.suffix_hash, 0) mock_http.assert_has_calls(reqs, any_order=True) mock_http.reset_mock() mock_logger.reset_mock() @@ -1411,7 +1411,7 @@ class TestObjectReplicator(unittest.TestCase): set_default(self) self.replicator.update(job) self.assertTrue(error in mock_logger.error.call_args[0][0]) - self.assertEquals(len(self.replicator.partition_times), 1) + self.assertEqual(len(self.replicator.partition_times), 1) mock_logger.reset_mock() # Check successful http_connection and exception with @@ -1422,7 +1422,7 @@ class TestObjectReplicator(unittest.TestCase): set_default(self) self.replicator.update(job) self.assertTrue(expect in mock_logger.exception.call_args[0][0]) - self.assertEquals(len(self.replicator.partition_times), 1) + self.assertEqual(len(self.replicator.partition_times), 1) mock_logger.reset_mock() # Check successful http_connection and correct @@ -1437,12 +1437,12 @@ class TestObjectReplicator(unittest.TestCase): local_job = job.copy() continue self.replicator.update(job) - self.assertEquals(mock_logger.exception.call_count, 0) - self.assertEquals(mock_logger.error.call_count, 0) - self.assertEquals(len(self.replicator.partition_times), 1) - self.assertEquals(self.replicator.suffix_hash, 0) - self.assertEquals(self.replicator.suffix_sync, 0) - self.assertEquals(self.replicator.suffix_count, 0) + self.assertEqual(mock_logger.exception.call_count, 0) + self.assertEqual(mock_logger.error.call_count, 0) + self.assertEqual(len(self.replicator.partition_times), 1) + self.assertEqual(self.replicator.suffix_hash, 0) + self.assertEqual(self.replicator.suffix_sync, 0) + self.assertEqual(self.replicator.suffix_count, 0) mock_logger.reset_mock() # Check successful http_connect and sync for local node @@ -1458,11 +1458,11 @@ class TestObjectReplicator(unittest.TestCase): for node in local_job['nodes']: reqs.append(mock.call(node, local_job, ['a83'])) fake_func.assert_has_calls(reqs, any_order=True) - self.assertEquals(fake_func.call_count, 2) - self.assertEquals(self.replicator.replication_count, 1) - self.assertEquals(self.replicator.suffix_sync, 2) - self.assertEquals(self.replicator.suffix_hash, 1) - self.assertEquals(self.replicator.suffix_count, 1) + self.assertEqual(fake_func.call_count, 2) + self.assertEqual(self.replicator.replication_count, 1) + self.assertEqual(self.replicator.suffix_sync, 2) + self.assertEqual(self.replicator.suffix_hash, 1) + self.assertEqual(self.replicator.suffix_count, 1) # Efficient Replication Case set_default(self) @@ -1477,11 +1477,11 @@ class TestObjectReplicator(unittest.TestCase): # The candidate nodes to replicate (i.e. dev1 and dev3) # belong to another region self.replicator.update(job) - self.assertEquals(fake_func.call_count, 1) - self.assertEquals(self.replicator.replication_count, 1) - self.assertEquals(self.replicator.suffix_sync, 1) - self.assertEquals(self.replicator.suffix_hash, 1) - self.assertEquals(self.replicator.suffix_count, 1) + self.assertEqual(fake_func.call_count, 1) + self.assertEqual(self.replicator.replication_count, 1) + self.assertEqual(self.replicator.suffix_sync, 1) + self.assertEqual(self.replicator.suffix_hash, 1) + self.assertEqual(self.replicator.suffix_count, 1) mock_http.reset_mock() mock_logger.reset_mock() diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index c289a36826..1fb966c991 100755 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -186,7 +186,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Two': 'Two'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', @@ -199,7 +199,7 @@ class TestObjectController(unittest.TestCase): 'Bar': 'barheader', 'Content-Type': 'application/x-test'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) @@ -211,7 +211,7 @@ class TestObjectController(unittest.TestCase): "Bar" in resp.headers and "Baz" not in resp.headers and "Content-Encoding" in resp.headers) - self.assertEquals(resp.headers['Content-Type'], 'application/x-test') + self.assertEqual(resp.headers['Content-Type'], 'application/x-test') req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) @@ -224,7 +224,7 @@ class TestObjectController(unittest.TestCase): "Bar" in resp.headers and "Baz" not in resp.headers and "Content-Encoding" in resp.headers) - self.assertEquals(resp.headers['Content-Type'], 'application/x-test') + self.assertEqual(resp.headers['Content-Type'], 'application/x-test') timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', @@ -232,7 +232,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': timestamp, 'Content-Type': 'application/x-test'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) self.assertTrue("X-Object-Meta-3" not in resp.headers and @@ -240,7 +240,7 @@ class TestObjectController(unittest.TestCase): "Foo" not in resp.headers and "Bar" not in resp.headers and "Content-Encoding" not in resp.headers) - self.assertEquals(resp.headers['Content-Type'], 'application/x-test') + self.assertEqual(resp.headers['Content-Type'], 'application/x-test') # test defaults self.object_controller.allowed_headers = original_headers @@ -256,7 +256,7 @@ class TestObjectController(unittest.TestCase): }) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) self.assertTrue("X-Object-Meta-1" in resp.headers and @@ -264,7 +264,7 @@ class TestObjectController(unittest.TestCase): "Content-Encoding" in resp.headers and "X-Object-Manifest" in resp.headers and "Content-Disposition" in resp.headers) - self.assertEquals(resp.headers['Content-Type'], 'application/x-test') + self.assertEqual(resp.headers['Content-Type'], 'application/x-test') timestamp = normalize_timestamp(time()) req = Request.blank('/sda1/p/a/c/o', @@ -274,7 +274,7 @@ class TestObjectController(unittest.TestCase): 'Foo': 'fooheader', 'Content-Type': 'application/x-test'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) self.assertTrue("X-Object-Meta-1" not in resp.headers and @@ -283,7 +283,7 @@ class TestObjectController(unittest.TestCase): "X-Object-Manifest" not in resp.headers and "Content-Disposition" not in resp.headers and "X-Object-Meta-3" in resp.headers) - self.assertEquals(resp.headers['Content-Type'], 'application/x-test') + self.assertEqual(resp.headers['Content-Type'], 'application/x-test') # Test for empty metadata timestamp = normalize_timestamp(time()) @@ -296,7 +296,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(resp.status_int, 202) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) - self.assertEquals(resp.headers["x-object-meta-3"], '') + self.assertEqual(resp.headers["x-object-meta-3"], '') def test_POST_old_timestamp(self): ts = time() @@ -308,7 +308,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Two': 'Two'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # Same timestamp should result in 409 req = Request.blank('/sda1/p/a/c/o', @@ -319,7 +319,7 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip', 'Content-Type': 'application/x-test'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp) # Earlier timestamp should result in 409 @@ -332,7 +332,7 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip', 'Content-Type': 'application/x-test'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp) def test_POST_not_exist(self): @@ -344,7 +344,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-2': 'Two', 'Content-Type': 'text/plain'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_POST_invalid_path(self): timestamp = normalize_timestamp(time()) @@ -354,7 +354,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-2': 'Two', 'Content-Type': 'text/plain'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_POST_no_timestamp(self): req = Request.blank('/sda1/p/a/c/o', @@ -406,7 +406,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'text/plain', 'Content-Length': '0'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, @@ -419,7 +419,7 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(object_server, 'http_connect', mock_http_connect(202)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, @@ -432,7 +432,7 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(object_server, 'http_connect', mock_http_connect(202, with_exc=True)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, @@ -445,7 +445,7 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(object_server, 'http_connect', mock_http_connect(500)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) def test_POST_quarantine_zbyte(self): timestamp = normalize_timestamp(time()) @@ -454,7 +454,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/x-test'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o', policy=POLICIES.legacy) @@ -465,30 +465,30 @@ class TestObjectController(unittest.TestCase): os.unlink(objfile._data_file) with open(objfile._data_file, 'w') as fp: diskfile.write_metadata(fp, metadata) - self.assertEquals(os.listdir(objfile._datadir)[0], file_name) + self.assertEqual(os.listdir(objfile._datadir)[0], file_name) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': normalize_timestamp(time())}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) quar_dir = os.path.join( self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(objfile._data_file))) - self.assertEquals(os.listdir(quar_dir)[0], file_name) + self.assertEqual(os.listdir(quar_dir)[0], file_name) def test_PUT_invalid_path(self): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_PUT_no_timestamp(self): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT', 'CONTENT_LENGTH': '0'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_PUT_no_content_type(self): req = Request.blank( @@ -497,7 +497,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '6'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_PUT_invalid_content_type(self): req = Request.blank( @@ -507,7 +507,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': '\xff\xff'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) self.assertTrue('Content-Type' in resp.body) def test_PUT_no_content_length(self): @@ -518,7 +518,7 @@ class TestObjectController(unittest.TestCase): req.body = 'VERIFY' del req.headers['Content-Length'] resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 411) + self.assertEqual(resp.status_int, 411) def test_PUT_zero_content_length(self): req = Request.blank( @@ -526,9 +526,9 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Type': 'application/octet-stream'}) req.body = '' - self.assertEquals(req.headers['Content-Length'], '0') + self.assertEqual(req.headers['Content-Length'], '0') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_bad_transfer_encoding(self): req = Request.blank( @@ -551,7 +551,7 @@ class TestObjectController(unittest.TestCase): 'If-None-Match': '*'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # File should already exist so it should fail timestamp = normalize_timestamp(time()) req = Request.blank( @@ -562,7 +562,7 @@ class TestObjectController(unittest.TestCase): 'If-None-Match': '*'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_PUT_if_none_match(self): # PUT with if-none-match set and nothing there should succeed @@ -575,7 +575,7 @@ class TestObjectController(unittest.TestCase): 'If-None-Match': 'notthere'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # PUT with if-none-match of the object etag should fail timestamp = normalize_timestamp(time()) req = Request.blank( @@ -586,7 +586,7 @@ class TestObjectController(unittest.TestCase): 'If-None-Match': '0b4c12d7e0a73840c1c4f148fda3b037'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_PUT_common(self): timestamp = normalize_timestamp(time()) @@ -605,22 +605,22 @@ class TestObjectController(unittest.TestCase): self.object_controller.allowed_headers = ['Custom-Header'] resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.data') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(open(objfile).read(), 'VERIFY') - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': utils.Timestamp(timestamp).internal, - 'Content-Length': '6', - 'ETag': '0b4c12d7e0a73840c1c4f148fda3b037', - 'Content-Type': 'application/octet-stream', - 'name': '/a/c/o', - 'X-Object-Meta-Test': 'one', - 'Custom-Header': '*'}) + self.assertEqual(open(objfile).read(), 'VERIFY') + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': utils.Timestamp(timestamp).internal, + 'Content-Length': '6', + 'ETag': '0b4c12d7e0a73840c1c4f148fda3b037', + 'Content-Type': 'application/octet-stream', + 'name': '/a/c/o', + 'X-Object-Meta-Test': 'one', + 'Custom-Header': '*'}) def test_PUT_overwrite(self): req = Request.blank( @@ -630,7 +630,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) sleep(.00001) timestamp = normalize_timestamp(time()) req = Request.blank( @@ -640,21 +640,21 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip'}) req.body = 'VERIFY TWO' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.data') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(open(objfile).read(), 'VERIFY TWO') - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': utils.Timestamp(timestamp).internal, - 'Content-Length': '10', - 'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039', - 'Content-Type': 'text/plain', - 'name': '/a/c/o', - 'Content-Encoding': 'gzip'}) + self.assertEqual(open(objfile).read(), 'VERIFY TWO') + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': utils.Timestamp(timestamp).internal, + 'Content-Length': '10', + 'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039', + 'Content-Type': 'text/plain', + 'name': '/a/c/o', + 'Content-Encoding': 'gzip'}) def test_PUT_overwrite_w_delete_at(self): req = Request.blank( @@ -701,7 +701,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': normalize_timestamp(ts), @@ -709,7 +709,7 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip'}) req.body = 'VERIFY TWO' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -719,7 +719,7 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip'}) req.body = 'VERIFY THREE' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp) def test_PUT_no_etag(self): @@ -729,7 +729,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'text/plain'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_invalid_etag(self): req = Request.blank( @@ -739,7 +739,7 @@ class TestObjectController(unittest.TestCase): 'ETag': 'invalid'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 422) + self.assertEqual(resp.status_int, 422) def test_PUT_user_metadata(self): timestamp = normalize_timestamp(time()) @@ -752,22 +752,22 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Two': 'Two'}) req.body = 'VERIFY THREE' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.data') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(open(objfile).read(), 'VERIFY THREE') - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': utils.Timestamp(timestamp).internal, - 'Content-Length': '12', - 'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568', - 'Content-Type': 'text/plain', - 'name': '/a/c/o', - 'X-Object-Meta-1': 'One', - 'X-Object-Meta-Two': 'Two'}) + self.assertEqual(open(objfile).read(), 'VERIFY THREE') + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': utils.Timestamp(timestamp).internal, + 'Content-Length': '12', + 'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568', + 'Content-Type': 'text/plain', + 'name': '/a/c/o', + 'X-Object-Meta-1': 'One', + 'X-Object-Meta-Two': 'Two'}) def test_PUT_etag_in_footer(self): timestamp = normalize_timestamp(time()) @@ -1023,7 +1023,7 @@ class TestObjectController(unittest.TestCase): with mock.patch('xattr.getxattr', mock_get_and_setxattr): with mock.patch('xattr.setxattr', mock_get_and_setxattr): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) def test_PUT_client_timeout(self): class FakeTimeout(BaseException): @@ -1045,7 +1045,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '6'}) req.environ['wsgi.input'] = WsgiBytesIO(b'VERIFY') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 408) + self.assertEqual(resp.status_int, 408) def test_PUT_system_metadata(self): # check that sysmeta is stored in diskfile @@ -1060,23 +1060,23 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Two': 'Two'}) req.body = 'VERIFY SYSMETA' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), timestamp + '.data') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(open(objfile).read(), 'VERIFY SYSMETA') - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': timestamp, - 'Content-Length': '14', - 'Content-Type': 'text/plain', - 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', - 'name': '/a/c/o', - 'X-Object-Meta-1': 'One', - 'X-Object-Sysmeta-1': 'One', - 'X-Object-Sysmeta-Two': 'Two'}) + self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA') + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': timestamp, + 'Content-Length': '14', + 'Content-Type': 'text/plain', + 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', + 'name': '/a/c/o', + 'X-Object-Meta-1': 'One', + 'X-Object-Sysmeta-1': 'One', + 'X-Object-Sysmeta-Two': 'Two'}) def test_POST_system_metadata(self): # check that diskfile sysmeta is not changed by a POST @@ -1091,7 +1091,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Two': 'Two'}) req.body = 'VERIFY SYSMETA' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) timestamp2 = normalize_timestamp(time()) req = Request.blank( @@ -1101,7 +1101,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-1': 'Not One', 'X-Object-Sysmeta-Two': 'Not Two'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) # original .data file metadata should be unchanged objfile = os.path.join( @@ -1110,16 +1110,16 @@ class TestObjectController(unittest.TestCase): hash_path('a', 'c', 'o')), timestamp1 + '.data') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(open(objfile).read(), 'VERIFY SYSMETA') - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': timestamp1, - 'Content-Length': '14', - 'Content-Type': 'text/plain', - 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', - 'name': '/a/c/o', - 'X-Object-Meta-1': 'One', - 'X-Object-Sysmeta-1': 'One', - 'X-Object-Sysmeta-Two': 'Two'}) + self.assertEqual(open(objfile).read(), 'VERIFY SYSMETA') + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': timestamp1, + 'Content-Length': '14', + 'Content-Type': 'text/plain', + 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', + 'name': '/a/c/o', + 'X-Object-Meta-1': 'One', + 'X-Object-Sysmeta-1': 'One', + 'X-Object-Sysmeta-Two': 'Two'}) # .meta file metadata should have only user meta items metafile = os.path.join( @@ -1128,10 +1128,10 @@ class TestObjectController(unittest.TestCase): hash_path('a', 'c', 'o')), timestamp2 + '.meta') self.assertTrue(os.path.isfile(metafile)) - self.assertEquals(diskfile.read_metadata(metafile), - {'X-Timestamp': timestamp2, - 'name': '/a/c/o', - 'X-Object-Meta-1': 'Not One'}) + self.assertEqual(diskfile.read_metadata(metafile), + {'X-Timestamp': timestamp2, + 'name': '/a/c/o', + 'X-Object-Meta-1': 'Not One'}) def test_PUT_then_fetch_system_metadata(self): timestamp = normalize_timestamp(time()) @@ -1145,22 +1145,22 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Two': 'Two'}) req.body = 'VERIFY SYSMETA' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def check_response(resp): - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 14) - self.assertEquals(resp.content_type, 'text/plain') - self.assertEquals(resp.headers['content-type'], 'text/plain') - self.assertEquals( + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 14) + self.assertEqual(resp.content_type, 'text/plain') + self.assertEqual(resp.headers['content-type'], 'text/plain') + self.assertEqual( resp.headers['last-modified'], strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp))))) - self.assertEquals(resp.headers['etag'], - '"1000d172764c9dbc3a5798a67ec5bb76"') - self.assertEquals(resp.headers['x-object-meta-1'], 'One') - self.assertEquals(resp.headers['x-object-sysmeta-1'], 'One') - self.assertEquals(resp.headers['x-object-sysmeta-two'], 'Two') + self.assertEqual(resp.headers['etag'], + '"1000d172764c9dbc3a5798a67ec5bb76"') + self.assertEqual(resp.headers['x-object-meta-1'], 'One') + self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One') + self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two') req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) @@ -1184,7 +1184,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Two': 'Two'}) req.body = 'VERIFY SYSMETA' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) timestamp2 = normalize_timestamp(time()) req = Request.blank( @@ -1194,23 +1194,23 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-1': 'Not One', 'X-Object-Sysmeta-Two': 'Not Two'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) def check_response(resp): # user meta should be updated but not sysmeta - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 14) - self.assertEquals(resp.content_type, 'text/plain') - self.assertEquals(resp.headers['content-type'], 'text/plain') - self.assertEquals( + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 14) + self.assertEqual(resp.content_type, 'text/plain') + self.assertEqual(resp.headers['content-type'], 'text/plain') + self.assertEqual( resp.headers['last-modified'], strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp2))))) - self.assertEquals(resp.headers['etag'], - '"1000d172764c9dbc3a5798a67ec5bb76"') - self.assertEquals(resp.headers['x-object-meta-1'], 'Not One') - self.assertEquals(resp.headers['x-object-sysmeta-1'], 'One') - self.assertEquals(resp.headers['x-object-sysmeta-two'], 'Two') + self.assertEqual(resp.headers['etag'], + '"1000d172764c9dbc3a5798a67ec5bb76"') + self.assertEqual(resp.headers['x-object-meta-1'], 'Not One') + self.assertEqual(resp.headers['x-object-sysmeta-1'], 'One') + self.assertEqual(resp.headers['x-object-sysmeta-two'], 'Two') req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) @@ -1243,7 +1243,7 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(self.object_controller, 'allowed_headers', ['Custom-Header']): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = os.path.join( self.testdir, 'sda1', @@ -1251,14 +1251,14 @@ class TestObjectController(unittest.TestCase): hash_path('a', 'c', 'o')), timestamp1 + '.data') # X-Static-Large-Object is disallowed. - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': timestamp1, - 'Content-Type': 'text/plain', - 'Content-Length': '14', - 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', - 'name': '/a/c/o', - 'Custom-Header': 'custom1', - 'X-Object-Meta-1': 'meta1'}) + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': timestamp1, + 'Content-Type': 'text/plain', + 'Content-Length': '14', + 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', + 'name': '/a/c/o', + 'Custom-Header': 'custom1', + 'X-Object-Meta-1': 'meta1'}) # PUT object again with X-Backend-Replication-Headers timestamp2 = normalize_timestamp(time()) @@ -1278,7 +1278,7 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(self.object_controller, 'allowed_headers', ['Custom-Header']): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) objfile = os.path.join( self.testdir, 'sda1', @@ -1287,15 +1287,15 @@ class TestObjectController(unittest.TestCase): timestamp2 + '.data') # X-Static-Large-Object should be copied since it is now allowed by # replication headers. - self.assertEquals(diskfile.read_metadata(objfile), - {'X-Timestamp': timestamp2, - 'Content-Type': 'text/plain', - 'Content-Length': '14', - 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', - 'name': '/a/c/o', - 'Custom-Header': 'custom1', - 'X-Object-Meta-1': 'meta1', - 'X-Static-Large-Object': 'False'}) + self.assertEqual(diskfile.read_metadata(objfile), + {'X-Timestamp': timestamp2, + 'Content-Type': 'text/plain', + 'Content-Length': '14', + 'ETag': '1000d172764c9dbc3a5798a67ec5bb76', + 'name': '/a/c/o', + 'Custom-Header': 'custom1', + 'X-Object-Meta-1': 'meta1', + 'X-Static-Large-Object': 'False'}) def test_PUT_container_connection(self): @@ -1335,7 +1335,7 @@ class TestObjectController(unittest.TestCase): object_server, 'http_connect', mock_http_connect(201)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) timestamp = normalize_timestamp(time()) req = Request.blank( '/sda1/p/a/c/o', @@ -1351,7 +1351,7 @@ class TestObjectController(unittest.TestCase): object_server, 'http_connect', mock_http_connect(500)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) timestamp = normalize_timestamp(time()) req = Request.blank( '/sda1/p/a/c/o', @@ -1367,7 +1367,7 @@ class TestObjectController(unittest.TestCase): object_server, 'http_connect', mock_http_connect(500, with_exc=True)): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) def test_PUT_ssync_multi_frag(self): timestamp = utils.Timestamp(time()).internal @@ -1386,7 +1386,7 @@ class TestObjectController(unittest.TestCase): req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals( + self.assertEqual( resp.status_int, expected_rsp, 'got %s != %s for frag_index=%s node_index=%s' % ( resp.status_int, expected_rsp, @@ -1442,7 +1442,7 @@ class TestObjectController(unittest.TestCase): req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) obj_dir = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(int(policy)), @@ -1463,12 +1463,12 @@ class TestObjectController(unittest.TestCase): # Test swift.obj.server.ObjectController.HEAD req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) self.assertFalse('X-Backend-Timestamp' in resp.headers) timestamp = normalize_timestamp(time()) @@ -1480,23 +1480,23 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Two': 'Two'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.content_length, 6) - self.assertEquals(resp.content_type, 'application/x-test') - self.assertEquals(resp.headers['content-type'], 'application/x-test') - self.assertEquals( + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.content_length, 6) + self.assertEqual(resp.content_type, 'application/x-test') + self.assertEqual(resp.headers['content-type'], 'application/x-test') + self.assertEqual( resp.headers['last-modified'], strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp))))) - self.assertEquals(resp.headers['etag'], - '"0b4c12d7e0a73840c1c4f148fda3b037"') - self.assertEquals(resp.headers['x-object-meta-1'], 'One') - self.assertEquals(resp.headers['x-object-meta-two'], 'Two') + self.assertEqual(resp.headers['etag'], + '"0b4c12d7e0a73840c1c4f148fda3b037"') + self.assertEqual(resp.headers['x-object-meta-1'], 'One') + self.assertEqual(resp.headers['x-object-meta-two'], 'Two') objfile = os.path.join( self.testdir, 'sda1', @@ -1507,7 +1507,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) sleep(.00001) timestamp = normalize_timestamp(time()) @@ -1518,7 +1518,7 @@ class TestObjectController(unittest.TestCase): 'Content-length': '6'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) sleep(.00001) timestamp = normalize_timestamp(time()) @@ -1526,14 +1526,14 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) - self.assertEquals(resp.headers['X-Backend-Timestamp'], - utils.Timestamp(timestamp).internal) + self.assertEqual(resp.status_int, 404) + self.assertEqual(resp.headers['X-Backend-Timestamp'], + utils.Timestamp(timestamp).internal) def test_HEAD_quarantine_zbyte(self): # Test swift.obj.server.ObjectController.GET @@ -1543,7 +1543,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/x-test'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o', policy=POLICIES.legacy) disk_file.open() @@ -1556,16 +1556,16 @@ class TestObjectController(unittest.TestCase): diskfile.write_metadata(fp, metadata) file_name = os.path.basename(disk_file._data_file) - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) quar_dir = os.path.join( self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(disk_file._data_file))) - self.assertEquals(os.listdir(quar_dir)[0], file_name) + self.assertEqual(os.listdir(quar_dir)[0], file_name) def test_OPTIONS(self): conf = {'devices': self.testdir, 'mount_check': 'false'} @@ -1574,24 +1574,24 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/sda1/p/a/c/o', {'REQUEST_METHOD': 'OPTIONS'}) req.content_length = 0 resp = server_handler.OPTIONS(req) - self.assertEquals(200, resp.status_int) + self.assertEqual(200, resp.status_int) for verb in 'OPTIONS GET POST PUT DELETE HEAD REPLICATE \ SSYNC'.split(): self.assertTrue( verb in resp.headers['Allow'].split(', ')) - self.assertEquals(len(resp.headers['Allow'].split(', ')), 8) - self.assertEquals(resp.headers['Server'], - (server_handler.server_type + '/' + swift_version)) + self.assertEqual(len(resp.headers['Allow'].split(', ')), 8) + self.assertEqual(resp.headers['Server'], + (server_handler.server_type + '/' + swift_version)) def test_GET(self): # Test swift.obj.server.ObjectController.GET req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) self.assertFalse('X-Backend-Timestamp' in resp.headers) timestamp = normalize_timestamp(time()) @@ -1602,45 +1602,45 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-Two': 'Two'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, 'VERIFY') - self.assertEquals(resp.content_length, 6) - self.assertEquals(resp.content_type, 'application/x-test') - self.assertEquals(resp.headers['content-length'], '6') - self.assertEquals(resp.headers['content-type'], 'application/x-test') - self.assertEquals( + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.body, 'VERIFY') + self.assertEqual(resp.content_length, 6) + self.assertEqual(resp.content_type, 'application/x-test') + self.assertEqual(resp.headers['content-length'], '6') + self.assertEqual(resp.headers['content-type'], 'application/x-test') + self.assertEqual( resp.headers['last-modified'], strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp))))) - self.assertEquals(resp.headers['etag'], - '"0b4c12d7e0a73840c1c4f148fda3b037"') - self.assertEquals(resp.headers['x-object-meta-1'], 'One') - self.assertEquals(resp.headers['x-object-meta-two'], 'Two') + self.assertEqual(resp.headers['etag'], + '"0b4c12d7e0a73840c1c4f148fda3b037"') + self.assertEqual(resp.headers['x-object-meta-1'], 'One') + self.assertEqual(resp.headers['x-object-meta-two'], 'Two') req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) req.range = 'bytes=1-3' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 206) - self.assertEquals(resp.body, 'ERI') - self.assertEquals(resp.headers['content-length'], '3') + self.assertEqual(resp.status_int, 206) + self.assertEqual(resp.body, 'ERI') + self.assertEqual(resp.headers['content-length'], '3') req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) req.range = 'bytes=1-' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 206) - self.assertEquals(resp.body, 'ERIFY') - self.assertEquals(resp.headers['content-length'], '5') + self.assertEqual(resp.status_int, 206) + self.assertEqual(resp.body, 'ERIFY') + self.assertEqual(resp.headers['content-length'], '5') req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) req.range = 'bytes=-2' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 206) - self.assertEquals(resp.body, 'FY') - self.assertEquals(resp.headers['content-length'], '2') + self.assertEqual(resp.status_int, 206) + self.assertEqual(resp.body, 'FY') + self.assertEqual(resp.headers['content-length'], '2') objfile = os.path.join( self.testdir, 'sda1', @@ -1650,7 +1650,7 @@ class TestObjectController(unittest.TestCase): os.unlink(objfile) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) sleep(.00001) timestamp = normalize_timestamp(time()) @@ -1661,7 +1661,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '6'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) sleep(.00001) timestamp = normalize_timestamp(time()) @@ -1669,13 +1669,13 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) - self.assertEquals(resp.headers['X-Backend-Timestamp'], - utils.Timestamp(timestamp).internal) + self.assertEqual(resp.status_int, 404) + self.assertEqual(resp.headers['X-Backend-Timestamp'], + utils.Timestamp(timestamp).internal) def test_GET_if_match(self): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -1685,44 +1685,44 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) etag = resp.etag req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o2', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Match': '"%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Match': '"11111111111111111111111111111111"'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={ 'If-Match': '"11111111111111111111111111111111", "%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, @@ -1731,7 +1731,7 @@ class TestObjectController(unittest.TestCase): '"11111111111111111111111111111111", ' '"22222222222222222222222222222222"'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_GET_if_match_etag_is_at(self): headers = { @@ -1743,7 +1743,7 @@ class TestObjectController(unittest.TestCase): headers=headers) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) real_etag = resp.etag # match x-backend-etag-is-at @@ -1793,47 +1793,47 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) etag = resp.etag req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o2', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Match': '"%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Match': '"11111111111111111111111111111111"'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={ 'If-Match': '"11111111111111111111111111111111", "%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, @@ -1842,7 +1842,7 @@ class TestObjectController(unittest.TestCase): '"11111111111111111111111111111111", ' '"22222222222222222222222222222222"'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_GET_if_none_match(self): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, @@ -1853,40 +1853,40 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) etag = resp.etag req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-None-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) - self.assertEquals(resp.etag, etag) - self.assertEquals(resp.headers['Content-Type'], 'application/fizzbuzz') - self.assertEquals(resp.headers['X-Object-Meta-Soup'], 'gazpacho') + self.assertEqual(resp.status_int, 304) + self.assertEqual(resp.etag, etag) + self.assertEqual(resp.headers['Content-Type'], 'application/fizzbuzz') + self.assertEqual(resp.headers['X-Object-Meta-Soup'], 'gazpacho') req = Request.blank('/sda1/p/a/c/o2', environ={'REQUEST_METHOD': 'GET'}, headers={'If-None-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-None-Match': '"%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 304) + self.assertEqual(resp.etag, etag) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-None-Match': '"11111111111111111111111111111111"'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, @@ -1894,8 +1894,8 @@ class TestObjectController(unittest.TestCase): '"11111111111111111111111111111111", ' '"%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 304) + self.assertEqual(resp.etag, etag) def test_HEAD_if_none_match(self): req = Request.blank('/sda1/p/a/c/o', @@ -1906,41 +1906,41 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) etag = resp.etag req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-None-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 304) + self.assertEqual(resp.etag, etag) req = Request.blank('/sda1/p/a/c/o2', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-None-Match': '*'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-None-Match': '"%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 304) + self.assertEqual(resp.etag, etag) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-None-Match': '"11111111111111111111111111111111"'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.etag, etag) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, @@ -1948,8 +1948,8 @@ class TestObjectController(unittest.TestCase): '"11111111111111111111111111111111", ' '"%s"' % etag}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) - self.assertEquals(resp.etag, etag) + self.assertEqual(resp.status_int, 304) + self.assertEqual(resp.etag, etag) def test_GET_if_modified_since(self): timestamp = normalize_timestamp(time()) @@ -1960,44 +1960,44 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1)) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) since = \ strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1)) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = \ strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1)) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) since = resp.headers['Last-Modified'] - self.assertEquals(since, strftime('%a, %d %b %Y %H:%M:%S GMT', - gmtime(math.ceil(float(timestamp))))) + self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT', + gmtime(math.ceil(float(timestamp))))) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) timestamp = normalize_timestamp(int(time())) req = Request.blank('/sda1/p/a/c/o2', @@ -2008,7 +2008,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp))) @@ -2016,7 +2016,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) def test_HEAD_if_modified_since(self): timestamp = normalize_timestamp(time()) @@ -2027,12 +2027,12 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1)) @@ -2040,7 +2040,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) since = \ strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 1)) @@ -2048,7 +2048,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = \ strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1)) @@ -2056,20 +2056,20 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) since = resp.headers['Last-Modified'] - self.assertEquals(since, strftime('%a, %d %b %Y %H:%M:%S GMT', - gmtime(math.ceil(float(timestamp))))) + self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT', + gmtime(math.ceil(float(timestamp))))) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Modified-Since': since}) resp = self.object_controller.GET(req) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) timestamp = normalize_timestamp(int(time())) req = Request.blank('/sda1/p/a/c/o2', @@ -2080,7 +2080,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp))) @@ -2088,7 +2088,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Modified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 304) + self.assertEqual(resp.status_int, 304) def test_GET_if_unmodified_since(self): timestamp = normalize_timestamp(time()) @@ -2100,47 +2100,47 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 1)) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = \ strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) - 9)) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) - self.assertEquals(resp.headers['Content-Type'], - 'application/cat-picture') - self.assertEquals(resp.headers['X-Object-Meta-Burr'], 'ito') + self.assertEqual(resp.status_int, 412) + self.assertEqual(resp.headers['Content-Type'], + 'application/cat-picture') + self.assertEqual(resp.headers['X-Object-Meta-Burr'], 'ito') since = \ strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(float(timestamp) + 9)) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) since = resp.headers['Last-Modified'] - self.assertEquals(since, strftime('%a, %d %b %Y %H:%M:%S GMT', - gmtime(math.ceil(float(timestamp))))) + self.assertEqual(since, strftime('%a, %d %b %Y %H:%M:%S GMT', + gmtime(math.ceil(float(timestamp))))) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_HEAD_if_unmodified_since(self): timestamp = normalize_timestamp(time()) @@ -2152,7 +2152,7 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4'}) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp)) + 1)) @@ -2160,7 +2160,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp)))) @@ -2168,7 +2168,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) since = strftime('%a, %d %b %Y %H:%M:%S GMT', gmtime(math.ceil(float(timestamp)) - 1)) @@ -2176,7 +2176,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'If-Unmodified-Since': since}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) def test_GET_quarantine(self): # Test swift.obj.server.ObjectController.GET @@ -2186,7 +2186,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/x-test'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o', policy=POLICIES.legacy) disk_file.open() @@ -2197,19 +2197,19 @@ class TestObjectController(unittest.TestCase): metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o', 'Content-Length': 6, 'ETag': etag} diskfile.write_metadata(disk_file._fp, metadata) - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) quar_dir = os.path.join( self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(disk_file._data_file))) - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) body = resp.body # actually does quarantining - self.assertEquals(body, 'VERIFY') - self.assertEquals(os.listdir(quar_dir)[0], file_name) + self.assertEqual(body, 'VERIFY') + self.assertEqual(os.listdir(quar_dir)[0], file_name) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_GET_quarantine_zbyte(self): # Test swift.obj.server.ObjectController.GET @@ -2219,7 +2219,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/x-test'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o', policy=POLICIES.legacy) disk_file.open() @@ -2230,15 +2230,15 @@ class TestObjectController(unittest.TestCase): with open(disk_file._data_file, 'w') as fp: diskfile.write_metadata(fp, metadata) - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) quar_dir = os.path.join( self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(disk_file._data_file))) - self.assertEquals(os.listdir(quar_dir)[0], file_name) + self.assertEqual(os.listdir(quar_dir)[0], file_name) def test_GET_quarantine_range(self): # Test swift.obj.server.ObjectController.GET @@ -2248,7 +2248,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/x-test'}) req.body = 'VERIFY' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) disk_file = self.df_mgr.get_diskfile('sda1', 'p', 'a', 'c', 'o', policy=POLICIES.legacy) disk_file.open() @@ -2259,7 +2259,7 @@ class TestObjectController(unittest.TestCase): metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o', 'Content-Length': 6, 'ETag': etag} diskfile.write_metadata(disk_file._fp, metadata) - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) req = Request.blank('/sda1/p/a/c/o') req.range = 'bytes=0-4' # partial resp = req.get_response(self.object_controller) @@ -2267,11 +2267,11 @@ class TestObjectController(unittest.TestCase): self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(disk_file._data_file))) resp.body - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) self.assertFalse(os.path.isdir(quar_dir)) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) req = Request.blank('/sda1/p/a/c/o') req.range = 'bytes=1-6' # partial @@ -2280,7 +2280,7 @@ class TestObjectController(unittest.TestCase): self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(disk_file._data_file))) resp.body - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) self.assertFalse(os.path.isdir(quar_dir)) req = Request.blank('/sda1/p/a/c/o') @@ -2289,12 +2289,12 @@ class TestObjectController(unittest.TestCase): quar_dir = os.path.join( self.testdir, 'sda1', 'quarantined', 'objects', os.path.basename(os.path.dirname(disk_file._data_file))) - self.assertEquals(os.listdir(disk_file._datadir)[0], file_name) + self.assertEqual(os.listdir(disk_file._datadir)[0], file_name) resp.body self.assertTrue(os.path.isdir(quar_dir)) req = Request.blank('/sda1/p/a/c/o') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) @mock.patch("time.time", mock_time) def test_DELETE(self): @@ -2302,12 +2302,12 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'DELETE'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) # The following should have created a tombstone file timestamp = normalize_timestamp(1000) @@ -2315,7 +2315,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) ts_1000_file = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', @@ -2323,7 +2323,7 @@ class TestObjectController(unittest.TestCase): utils.Timestamp(timestamp).internal + '.ts') self.assertTrue(os.path.isfile(ts_1000_file)) # There should now be a 1000 ts file. - self.assertEquals(len(os.listdir(os.path.dirname(ts_1000_file))), 1) + self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1) # The following should *not* have created a tombstone file. timestamp = normalize_timestamp(999) @@ -2331,7 +2331,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) ts_999_file = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', @@ -2339,7 +2339,7 @@ class TestObjectController(unittest.TestCase): utils.Timestamp(timestamp).internal + '.ts') self.assertFalse(os.path.isfile(ts_999_file)) self.assertTrue(os.path.isfile(ts_1000_file)) - self.assertEquals(len(os.listdir(os.path.dirname(ts_1000_file))), 1) + self.assertEqual(len(os.listdir(os.path.dirname(ts_1000_file))), 1) orig_timestamp = utils.Timestamp(1002).internal headers = {'X-Timestamp': orig_timestamp, @@ -2349,7 +2349,7 @@ class TestObjectController(unittest.TestCase): headers=headers) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # There should now be 1000 ts and a 1001 data file. data_1002_file = os.path.join( self.testdir, 'sda1', @@ -2357,7 +2357,7 @@ class TestObjectController(unittest.TestCase): hash_path('a', 'c', 'o')), orig_timestamp + '.data') self.assertTrue(os.path.isfile(data_1002_file)) - self.assertEquals(len(os.listdir(os.path.dirname(data_1002_file))), 1) + self.assertEqual(len(os.listdir(os.path.dirname(data_1002_file))), 1) # The following should *not* have created a tombstone file. timestamp = normalize_timestamp(1001) @@ -2365,7 +2365,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) self.assertEqual(resp.headers['X-Backend-Timestamp'], orig_timestamp) ts_1001_file = os.path.join( self.testdir, 'sda1', @@ -2374,21 +2374,21 @@ class TestObjectController(unittest.TestCase): utils.Timestamp(timestamp).internal + '.ts') self.assertFalse(os.path.isfile(ts_1001_file)) self.assertTrue(os.path.isfile(data_1002_file)) - self.assertEquals(len(os.listdir(os.path.dirname(ts_1001_file))), 1) + self.assertEqual(len(os.listdir(os.path.dirname(ts_1001_file))), 1) timestamp = normalize_timestamp(1003) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) ts_1003_file = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.ts') self.assertTrue(os.path.isfile(ts_1003_file)) - self.assertEquals(len(os.listdir(os.path.dirname(ts_1003_file))), 1) + self.assertEqual(len(os.listdir(os.path.dirname(ts_1003_file))), 1) def test_DELETE_container_updates(self): # Test swift.obj.server.ObjectController.DELETE and container @@ -2403,7 +2403,7 @@ class TestObjectController(unittest.TestCase): headers=headers) req.body = 'test' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) calls_made = [0] @@ -2420,7 +2420,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp.internal}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 409) + self.assertEqual(resp.status_int, 409) self.assertEqual(resp.headers['x-backend-timestamp'], orig_timestamp.internal) objfile = os.path.join( @@ -2429,8 +2429,8 @@ class TestObjectController(unittest.TestCase): hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.ts') self.assertFalse(os.path.isfile(objfile)) - self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) - self.assertEquals(0, calls_made[0]) + self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1) + self.assertEqual(0, calls_made[0]) # The following request should return 204, and the object should # be truly deleted (container update is performed) because this @@ -2441,15 +2441,15 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp.internal}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.ts') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(1, calls_made[0]) - self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) + self.assertEqual(1, calls_made[0]) + self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1) # The following request should return a 404, as the object should # already have been deleted, but it should have also performed a @@ -2460,15 +2460,15 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp.internal}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.ts') self.assertTrue(os.path.isfile(objfile)) - self.assertEquals(2, calls_made[0]) - self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) + self.assertEqual(2, calls_made[0]) + self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1) # The following request should return a 404, as the object should # already have been deleted, and it should not have performed a @@ -2479,15 +2479,15 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': timestamp.internal}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', hash_path('a', 'c', 'o')), utils.Timestamp(timestamp).internal + '.ts') self.assertFalse(os.path.isfile(objfile)) - self.assertEquals(2, calls_made[0]) - self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) + self.assertEqual(2, calls_made[0]) + self.assertEqual(len(os.listdir(os.path.dirname(objfile))), 1) finally: self.object_controller.container_update = orig_cu @@ -2511,7 +2511,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) - self.assertEquals(1, len(container_updates)) + self.assertEqual(1, len(container_updates)) for update in container_updates: ip, port, method, path, headers = update self.assertEqual(ip, '10.0.0.1') @@ -2550,7 +2550,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) - self.assertEquals(1, len(container_updates)) + self.assertEqual(1, len(container_updates)) for update in container_updates: ip, port, method, path, headers = update self.assertEqual(ip, '10.0.0.1') @@ -2588,7 +2588,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) - self.assertEquals(1, len(container_updates)) + self.assertEqual(1, len(container_updates)) for update in container_updates: ip, port, method, path, headers = update self.assertEqual(ip, '10.0.0.1') @@ -2626,7 +2626,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 204) - self.assertEquals(1, len(container_updates)) + self.assertEqual(1, len(container_updates)) for update in container_updates: ip, port, method, path, headers = update self.assertEqual(ip, '10.0.0.1') @@ -2657,7 +2657,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 404) - self.assertEquals(1, len(container_updates)) + self.assertEqual(1, len(container_updates)) for update in container_updates: ip, port, method, path, headers = update self.assertEqual(ip, '10.0.0.1') @@ -2702,8 +2702,8 @@ class TestObjectController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '400 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '400 ') def test_call_not_found(self): inbuf = WsgiBytesIO() @@ -2729,8 +2729,8 @@ class TestObjectController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '404 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '404 ') def test_call_bad_method(self): inbuf = WsgiBytesIO() @@ -2756,8 +2756,8 @@ class TestObjectController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_call_name_collision(self): def my_check(*args): @@ -2795,8 +2795,8 @@ class TestObjectController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '201 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '201 ') inbuf = WsgiBytesIO() errbuf = StringIO() @@ -2824,8 +2824,8 @@ class TestObjectController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '403 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '403 ') def test_invalid_method_doesnt_exist(self): errbuf = StringIO() @@ -2838,8 +2838,8 @@ class TestObjectController(unittest.TestCase): 'REQUEST_METHOD': 'method_doesnt_exist', 'PATH_INFO': '/sda1/p/a/c/o'}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_invalid_method_is_not_public(self): errbuf = StringIO() @@ -2851,8 +2851,8 @@ class TestObjectController(unittest.TestCase): self.object_controller.__call__({'REQUEST_METHOD': '__init__', 'PATH_INFO': '/sda1/p/a/c/o'}, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_chunked_put(self): listener = listen(('localhost', 0)) @@ -2870,7 +2870,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', port)) fd = sock.makefile() fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n' @@ -2878,9 +2878,9 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) response = fd.read() - self.assertEquals(response, 'oh hai') + self.assertEqual(response, 'oh hai') killer.kill() def test_chunked_content_length_mismatch_zero(self): @@ -2900,7 +2900,7 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 201' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) sock = connect_tcp(('localhost', port)) fd = sock.makefile() fd.write('GET /sda1/p/a/c/o HTTP/1.1\r\nHost: localhost\r\n' @@ -2908,9 +2908,9 @@ class TestObjectController(unittest.TestCase): fd.flush() headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 200' - self.assertEquals(headers[:len(exp)], exp) + self.assertEqual(headers[:len(exp)], exp) response = fd.read() - self.assertEquals(response, 'oh hai') + self.assertEqual(response, 'oh hai') killer.kill() def test_max_object_name_length(self): @@ -2924,7 +2924,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'DATA' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/' + ('2' * (max_name_len + 1)), environ={'REQUEST_METHOD': 'PUT'}, @@ -2933,7 +2933,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'DATA' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_max_upload_time(self): @@ -2958,7 +2958,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Length': '4', 'Content-Type': 'text/plain'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) self.object_controller.max_upload_time = 0.1 req = Request.blank( '/sda1/p/a/c/o', @@ -2966,7 +2966,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Length': '4', 'Content-Type': 'text/plain'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 408) + self.assertEqual(resp.status_int, 408) def test_short_body(self): @@ -2990,7 +2990,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': normalize_timestamp(time()), 'Content-Length': '4', 'Content-Type': 'text/plain'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 499) + self.assertEqual(resp.status_int, 499) def test_bad_sinces(self): req = Request.blank( @@ -2999,17 +2999,17 @@ class TestObjectController(unittest.TestCase): 'Content-Length': '4', 'Content-Type': 'text/plain'}, body=' ') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Unmodified-Since': 'Not a valid date'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Modified-Since': 'Not a valid date'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) too_big_date_list = list(datetime.datetime.max.timetuple()) too_big_date_list[0] += 1 # bump up the year @@ -3019,7 +3019,7 @@ class TestObjectController(unittest.TestCase): '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'If-Unmodified-Since': too_big_date}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) def test_content_encoding(self): req = Request.blank( @@ -3029,16 +3029,16 @@ class TestObjectController(unittest.TestCase): 'Content-Encoding': 'gzip'}, body=' ') resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.headers['content-encoding'], 'gzip') + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.headers['content-encoding'], 'gzip') req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.headers['content-encoding'], 'gzip') + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.headers['content-encoding'], 'gzip') def test_async_update_http_connect(self): policy = random.choice(list(POLICIES)) @@ -3059,7 +3059,7 @@ class TestObjectController(unittest.TestCase): policy) finally: object_server.http_connect = orig_http_connect - self.assertEquals( + self.assertEqual( given_args, ['127.0.0.1', '1234', 'sdc1', 1, 'PUT', '/a/c/o', { 'x-timestamp': '1', 'x-out': 'set', @@ -3129,8 +3129,8 @@ class TestObjectController(unittest.TestCase): http_connect_args.sort(key=operator.itemgetter('ipaddr')) - self.assertEquals(len(http_connect_args), 3) - self.assertEquals( + self.assertEqual(len(http_connect_args), 3) + self.assertEqual( http_connect_args[0], {'ipaddr': '1.2.3.4', 'port': '5', @@ -3149,7 +3149,7 @@ class TestObjectController(unittest.TestCase): 'user-agent': 'object-server %d' % os.getpid(), 'X-Backend-Storage-Policy-Index': int(policy), 'x-trans-id': '-'})}) - self.assertEquals( + self.assertEqual( http_connect_args[1], {'ipaddr': '10.1.1.1', 'port': '6001', @@ -3168,7 +3168,7 @@ class TestObjectController(unittest.TestCase): # system account storage policy is 0 'X-Backend-Storage-Policy-Index': 0, 'x-trans-id': '-'})}) - self.assertEquals( + self.assertEqual( http_connect_args[2], {'ipaddr': '10.2.2.2', 'port': '6002', @@ -3240,8 +3240,8 @@ class TestObjectController(unittest.TestCase): http_connect_args.sort(key=operator.itemgetter('ipaddr')) - self.assertEquals(len(http_connect_args), 2) - self.assertEquals( + self.assertEqual(len(http_connect_args), 2) + self.assertEqual( http_connect_args[0], {'ipaddr': '1.2.3.4', 'port': '5', @@ -3259,7 +3259,7 @@ class TestObjectController(unittest.TestCase): 'referer': 'PUT http://localhost/sda1/p/a/c/o', 'user-agent': 'object-server %d' % os.getpid(), 'x-trans-id': '-'})}) - self.assertEquals( + self.assertEqual( http_connect_args[1], {'ipaddr': '6.7.8.9', 'port': '10', @@ -3316,7 +3316,7 @@ class TestObjectController(unittest.TestCase): resp = req.get_response(self.object_controller) self.assertRaises(StopIteration, fake_conn.code_iter.next) self.assertEqual(resp.status_int, 201) - self.assertEquals(2, len(container_updates)) + self.assertEqual(2, len(container_updates)) delete_at_update, container_update = container_updates # delete_at_update ip, port, method, path, headers = delete_at_update @@ -3354,11 +3354,11 @@ class TestObjectController(unittest.TestCase): found_files.append(async_file) data = pickle.load(open(async_file)) if data['account'] == 'a': - self.assertEquals( + self.assertEqual( int(data['headers'] ['X-Backend-Storage-Policy-Index']), int(policy)) elif data['account'] == '.expiring_objects': - self.assertEquals( + self.assertEqual( int(data['headers'] ['X-Backend-Storage-Policy-Index']), 0) else: @@ -3386,7 +3386,7 @@ class TestObjectController(unittest.TestCase): object_server.http_connect = orig_http_connect utils.HASH_PATH_PREFIX = _prefix async_dir = diskfile.get_async_dir(policy) - self.assertEquals( + self.assertEqual( pickle.load(open(os.path.join( self.testdir, 'sda1', async_dir, 'a83', '06fbf0b514e5199dfc4e00f42eb5ea83-%s' % @@ -3427,7 +3427,7 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': int(policy)}, 'sda1', policy) async_dir = diskfile.get_async_dir(policy) - self.assertEquals( + self.assertEqual( pickle.load(open(os.path.join( self.testdir, 'sda1', async_dir, 'a83', '06fbf0b514e5199dfc4e00f42eb5ea83-%s' % @@ -3530,7 +3530,7 @@ class TestObjectController(unittest.TestCase): 'x-size': '0', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-content-type': 'text/plain', 'x-timestamp': '1'}, 'sda1', policy) - self.assertEquals(given_args, []) + self.assertEqual(given_args, []) def test_container_update_success(self): container_updates = [] @@ -3807,7 +3807,7 @@ class TestObjectController(unittest.TestCase): fake_async_update): self.object_controller.delete_at_update( 'DELETE', 2, 'a', 'c', 'o', req, 'sda1', policy) - self.assertEquals( + self.assertEqual( given_args, [ 'DELETE', '.expiring_objects', '0000000000', '0000000002-a/c/o', None, None, None, @@ -3837,7 +3837,7 @@ class TestObjectController(unittest.TestCase): int(policy)}) self.object_controller.delete_at_update( 'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy) - self.assertEquals(given_args, [ + self.assertEqual(given_args, [ 'DELETE', '.expiring_objects', '0000000000', '0000000000-a/c/o', None, None, None, HeaderKeyDict({ @@ -3873,7 +3873,7 @@ class TestObjectController(unittest.TestCase): 86400, 'a', 'c', 'o') self.assertEqual(expiring_obj_container, expected_exp_cont) - self.assertEquals(given_args, [ + self.assertEqual(given_args, [ 'DELETE', '.expiring_objects', '9999999999-a/c/o', None, None, None, HeaderKeyDict({ @@ -3906,7 +3906,7 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': int(policy)}) self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o', req, 'sda1', policy) - self.assertEquals( + self.assertEqual( given_args, [ 'PUT', '.expiring_objects', '0000000000', '0000000002-a/c/o', '127.0.0.1:1234', @@ -3943,7 +3943,7 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': int(policy)}) self.object_controller.delete_at_update('PUT', 2, 'a', 'c', 'o', req, 'sda1', policy) - self.assertEquals( + self.assertEqual( self.logger.get_lines_for_level('warning'), ['X-Delete-At-Container header must be specified for expiring ' 'objects background PUT to work properly. Making best guess as ' @@ -3965,7 +3965,7 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': int(policy)}) self.object_controller.delete_at_update('DELETE', 2, 'a', 'c', 'o', req, 'sda1', policy) - self.assertEquals( + self.assertEqual( given_args, [ 'DELETE', '.expiring_objects', '0000000000', '0000000002-a/c/o', None, None, @@ -3995,7 +3995,7 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Storage-Policy-Index': int(policy)}) self.object_controller.delete_at_update( 'DELETE', -2, 'a', 'c', 'o', req, 'sda1', policy) - self.assertEquals(given_args, []) + self.assertEqual(given_args, []) def test_POST_calls_delete_at(self): policy = random.choice(list(POLICIES)) @@ -4015,8 +4015,8 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Ec-Frag-Index': 2}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals(given_args, []) + self.assertEqual(resp.status_int, 201) + self.assertEqual(given_args, []) sleep(.00001) req = Request.blank( @@ -4026,8 +4026,8 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/x-test', 'X-Backend-Storage-Policy-Index': int(policy)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) - self.assertEquals(given_args, []) + self.assertEqual(resp.status_int, 202) + self.assertEqual(given_args, []) sleep(.00001) timestamp1 = normalize_timestamp(time()) @@ -4040,8 +4040,8 @@ class TestObjectController(unittest.TestCase): 'X-Delete-At': delete_at_timestamp1, 'X-Backend-Storage-Policy-Index': int(policy)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) - self.assertEquals( + self.assertEqual(resp.status_int, 202) + self.assertEqual( given_args, [ 'PUT', int(delete_at_timestamp1), 'a', 'c', 'o', given_args[5], 'sda1', policy]) @@ -4060,8 +4060,8 @@ class TestObjectController(unittest.TestCase): 'X-Delete-At': delete_at_timestamp2, 'X-Backend-Storage-Policy-Index': int(policy)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) - self.assertEquals( + self.assertEqual(resp.status_int, 202) + self.assertEqual( given_args, [ 'PUT', int(delete_at_timestamp2), 'a', 'c', 'o', given_args[5], 'sda1', policy, @@ -4086,8 +4086,8 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Ec-Frag-Index': 4}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals(given_args, []) + self.assertEqual(resp.status_int, 201) + self.assertEqual(given_args, []) sleep(.00001) timestamp1 = normalize_timestamp(time()) @@ -4102,8 +4102,8 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Ec-Frag-Index': 3}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals( + self.assertEqual(resp.status_int, 201) + self.assertEqual( given_args, [ 'PUT', int(delete_at_timestamp1), 'a', 'c', 'o', given_args[5], 'sda1', policy]) @@ -4125,8 +4125,8 @@ class TestObjectController(unittest.TestCase): 'X-Object-Sysmeta-Ec-Frag-Index': 3}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals( + self.assertEqual(resp.status_int, 201) + self.assertEqual( given_args, [ 'PUT', int(delete_at_timestamp2), 'a', 'c', 'o', given_args[5], 'sda1', policy, @@ -4149,13 +4149,13 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': normalize_timestamp(test_time)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) orig_time = object_server.time.time try: @@ -4177,13 +4177,13 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': normalize_timestamp(test_time)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) finally: object_server.time.time = orig_time @@ -4196,9 +4196,9 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': normalize_timestamp(t)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) - self.assertEquals(resp.headers['X-Backend-Timestamp'], - utils.Timestamp(put_timestamp)) + self.assertEqual(resp.status_int, 404) + self.assertEqual(resp.headers['X-Backend-Timestamp'], + utils.Timestamp(put_timestamp)) finally: object_server.time.time = orig_time @@ -4218,14 +4218,14 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'X-Timestamp': normalize_timestamp(test_time)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) orig_time = object_server.time.time try: @@ -4247,13 +4247,13 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}, headers={'X-Timestamp': normalize_timestamp(test_time)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) finally: object_server.time.time = orig_time @@ -4266,9 +4266,9 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'HEAD'}, headers={'X-Timestamp': normalize_timestamp(time())}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) - self.assertEquals(resp.headers['X-Backend-Timestamp'], - utils.Timestamp(put_timestamp)) + self.assertEqual(resp.status_int, 404) + self.assertEqual(resp.headers['X-Backend-Timestamp'], + utils.Timestamp(put_timestamp)) finally: object_server.time.time = orig_time @@ -4288,14 +4288,14 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': normalize_timestamp(test_time - 1500)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 202) + self.assertEqual(resp.status_int, 202) delete_at_timestamp = int(time() + 1) delete_at_container = str( @@ -4311,7 +4311,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) orig_time = object_server.time.time try: @@ -4322,7 +4322,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'POST'}, headers={'X-Timestamp': normalize_timestamp(time())}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) finally: object_server.time.time = orig_time @@ -4342,7 +4342,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) orig_time = object_server.time.time try: @@ -4353,7 +4353,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': normalize_timestamp(time())}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) finally: object_server.time.time = orig_time @@ -4375,15 +4375,15 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) # sanity req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': test_timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.body, 'TEST') + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.body, 'TEST') objfile = os.path.join( self.testdir, 'sda1', storage_directory(diskfile.get_data_dir(POLICIES[0]), 'p', @@ -4399,7 +4399,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': test_timestamp}) resp = req.get_response(self.object_controller) # request will 404 - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) # but file still exists self.assertTrue(os.path.isfile(objfile)) @@ -4410,7 +4410,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': delete_at_timestamp, 'X-If-Delete-At': int(time() + 1)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) self.assertTrue(os.path.isfile(objfile)) # make the x-if-delete-at with all the right bits @@ -4420,7 +4420,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': delete_at_timestamp, 'X-If-Delete-At': delete_at_timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) self.assertFalse(os.path.isfile(objfile)) # make the x-if-delete-at with all the right bits (again) @@ -4430,7 +4430,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': delete_at_timestamp, 'X-If-Delete-At': delete_at_timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) self.assertFalse(os.path.isfile(objfile)) # make the x-if-delete-at for some not found @@ -4440,7 +4440,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': delete_at_timestamp, 'X-If-Delete-At': delete_at_timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) def test_DELETE_if_delete_at(self): test_time = time() + 10000 @@ -4451,14 +4451,14 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': normalize_timestamp(test_time - 98)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) delete_at_timestamp = int(test_time - 1) delete_at_container = str( @@ -4474,7 +4474,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', @@ -4482,14 +4482,14 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': normalize_timestamp(test_time - 95), 'X-If-Delete-At': str(int(test_time))}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': normalize_timestamp(test_time - 95)}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) delete_at_timestamp = int(test_time - 1) delete_at_container = str( @@ -4505,28 +4505,28 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': normalize_timestamp(test_time - 92), 'X-If-Delete-At': str(int(test_time))}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 412) + self.assertEqual(resp.status_int, 412) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': normalize_timestamp(test_time - 92), 'X-If-Delete-At': delete_at_timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) + self.assertEqual(resp.status_int, 204) req = Request.blank( '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': normalize_timestamp(test_time - 92), 'X-If-Delete-At': 'abc'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) def test_DELETE_calls_delete_at(self): given_args = [] @@ -4550,8 +4550,8 @@ class TestObjectController(unittest.TestCase): 'X-Delete-At-Container': delete_at_container1}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) - self.assertEquals(given_args, [ + self.assertEqual(resp.status_int, 201) + self.assertEqual(given_args, [ 'PUT', int(delete_at_timestamp1), 'a', 'c', 'o', given_args[5], 'sda1', POLICIES[0]]) @@ -4566,8 +4566,8 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': timestamp2, 'Content-Type': 'application/octet-stream'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 204) - self.assertEquals(given_args, [ + self.assertEqual(resp.status_int, 204) + self.assertEqual(given_args, [ 'DELETE', int(delete_at_timestamp1), 'a', 'c', 'o', given_args[5], 'sda1', POLICIES[0]]) @@ -4580,7 +4580,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) self.assertTrue('X-Delete-At in past' in resp.body) def test_POST_delete_at_in_past(self): @@ -4592,7 +4592,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'TEST' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) req = Request.blank( '/sda1/p/a/c/o', @@ -4600,7 +4600,7 @@ class TestObjectController(unittest.TestCase): headers={'X-Timestamp': normalize_timestamp(time() + 1), 'X-Delete-At': str(int(time() - 1))}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 400) + self.assertEqual(resp.status_int, 400) self.assertTrue('X-Delete-At in past' in resp.body) def test_REPLICATE_works(self): @@ -4620,9 +4620,9 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'REPLICATE'}, headers={}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) + self.assertEqual(resp.status_int, 200) p_data = pickle.loads(resp.body) - self.assertEquals(p_data, {1: 2}) + self.assertEqual(p_data, {1: 2}) finally: tpool.execute = was_tpool_exe diskfile.DiskFileManager._get_hashes = was_get_hashes @@ -4701,7 +4701,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream', 'Expect': '100-continue'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 507) + self.assertEqual(resp.status_int, 507) self.assertFalse(body_reader.read_called) finally: diskfile.fallocate = orig_fallocate @@ -4743,7 +4743,7 @@ class TestObjectController(unittest.TestCase): def test_serv_reserv(self): # Test replication_server flag was set from configuration file. conf = {'devices': self.testdir, 'mount_check': 'false'} - self.assertEquals( + self.assertEqual( object_server.ObjectController(conf).replication_server, None) for val in [True, '1', 'True', 'true']: conf['replication_server'] = val @@ -4763,7 +4763,7 @@ class TestObjectController(unittest.TestCase): self.assertFalse(hasattr(method, 'replication')) for method_name in repl_methods: method = getattr(self.object_controller, method_name) - self.assertEquals(method.replication, True) + self.assertEqual(method.replication, True) def test_correct_allowed_method(self): # Test correct work for allowed method using @@ -4885,8 +4885,8 @@ class TestObjectController(unittest.TestCase): 'wsgi.multiprocess': False, 'wsgi.run_once': False} self.object_controller(env, start_response) - self.assertEquals(errbuf.getvalue(), '') - self.assertEquals(outbuf.getvalue()[:4], '405 ') + self.assertEqual(errbuf.getvalue(), '') + self.assertEqual(outbuf.getvalue()[:4], '405 ') def test_not_utf8_and_not_logging_requests(self): inbuf = WsgiBytesIO() @@ -5053,7 +5053,7 @@ class TestObjectController(unittest.TestCase): object_dir = self.testdir + "/sda1/objects-1" self.assertFalse(os.path.isdir(object_dir)) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) self.assertTrue(os.path.isdir(object_dir)) # make sure no idx in header uses policy 0 data_dir @@ -5070,7 +5070,7 @@ class TestObjectController(unittest.TestCase): with mock.patch.object(POLICIES, 'get_by_index', lambda _: True): resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) self.assertTrue(os.path.isdir(object_dir)) def test_storage_policy_index_is_validated(self): @@ -5112,7 +5112,7 @@ class TestObjectController(unittest.TestCase): req.body = 'VERIFY' object_dir = self.testdir + "/sda1/objects-%s" % index resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 503) + self.assertEqual(resp.status_int, 503) self.assertFalse(os.path.isdir(object_dir)) def test_race_doesnt_quarantine(self): @@ -5141,7 +5141,7 @@ class TestObjectController(unittest.TestCase): 'Content-Type': 'application/octet-stream'}) req.body = 'some data' resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 201) + self.assertEqual(resp.status_int, 201) return listing with mock.patch('os.listdir', mock_listdir): @@ -5149,7 +5149,7 @@ class TestObjectController(unittest.TestCase): '/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': delete_timestamp}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 404) + self.assertEqual(resp.status_int, 404) qdir = os.path.join(self.testdir, 'sda1', 'quarantined') self.assertFalse(os.path.exists(qdir)) @@ -5157,8 +5157,8 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'HEAD'}) resp = req.get_response(self.object_controller) - self.assertEquals(resp.status_int, 200) - self.assertEquals(resp.headers['X-Timestamp'], put_timestamp) + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.headers['X-Timestamp'], put_timestamp) @patch_policies(test_policies) diff --git a/test/unit/obj/test_ssync_receiver.py b/test/unit/obj/test_ssync_receiver.py index 30a47a662b..9c757e0ae1 100644 --- a/test/unit/obj/test_ssync_receiver.py +++ b/test/unit/obj/test_ssync_receiver.py @@ -1205,7 +1205,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) - self.assertEquals(len(_PUT_request), 1) # sanity + self.assertEqual(len(_PUT_request), 1) # sanity req = _PUT_request[0] self.assertEqual(req.path, '/device/partition/a/c/o') self.assertEqual(req.content_length, 1) @@ -1321,7 +1321,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) - self.assertEquals(len(_PUT_request), 1) # sanity + self.assertEqual(len(_PUT_request), 1) # sanity req = _PUT_request[0] self.assertEqual(req.path, '/device/partition/a/c/o') self.assertEqual(req.content_length, 1) @@ -1378,7 +1378,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) - self.assertEquals(len(_PUT_request), 1) # sanity + self.assertEqual(len(_PUT_request), 1) # sanity req = _PUT_request[0] self.assertEqual(req.path, '/device/partition/a/c/o') self.assertEqual(req.content_length, 1) @@ -1423,7 +1423,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) - self.assertEquals(len(_DELETE_request), 1) # sanity + self.assertEqual(len(_DELETE_request), 1) # sanity req = _DELETE_request[0] self.assertEqual(req.path, '/device/partition/a/c/o') self.assertEqual(req.headers, { @@ -1459,7 +1459,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.controller.logger.exception.assert_called_once_with( 'None/device/partition EXCEPTION in replication.Receiver') - self.assertEquals(len(_BONK_request), 1) # sanity + self.assertEqual(len(_BONK_request), 1) # sanity self.assertEqual(_BONK_request[0], None) def test_UPDATES_multiple(self): @@ -1520,7 +1520,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) - self.assertEquals(len(_requests), 6) # sanity + self.assertEqual(len(_requests), 6) # sanity req = _requests.pop(0) self.assertEqual(req.method, 'PUT') self.assertEqual(req.path, '/device/partition/a/c/o1') @@ -1645,7 +1645,7 @@ class TestReceiver(unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertFalse(self.controller.logger.exception.called) self.assertFalse(self.controller.logger.error.called) - self.assertEquals(len(_requests), 2) # sanity + self.assertEqual(len(_requests), 2) # sanity req = _requests.pop(0) self.assertEqual(req.path, '/device/partition/a/c/o1') self.assertEqual(req.content_length, 3) diff --git a/test/unit/obj/test_ssync_sender.py b/test/unit/obj/test_ssync_sender.py index 20960e83f1..53f40c757a 100644 --- a/test/unit/obj/test_ssync_sender.py +++ b/test/unit/obj/test_ssync_sender.py @@ -159,7 +159,7 @@ class TestSender(BaseTestSender): self.sender.suffixes = ['abc'] success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') self.assertEqual(1, len(error_lines)) self.assertEqual('1.2.3.4:5678/sda1/9 1 second: test connect', @@ -178,7 +178,7 @@ class TestSender(BaseTestSender): self.sender.suffixes = ['abc'] success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') self.assertEqual(1, len(error_lines)) self.assertEqual('1.2.3.4:5678/sda1/9 test connect', @@ -193,7 +193,7 @@ class TestSender(BaseTestSender): self.sender.connect = 'cause exception' success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') for line in error_lines: self.assertTrue(line.startswith( @@ -206,7 +206,7 @@ class TestSender(BaseTestSender): self.sender.connect = 'cause exception' success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') for line in error_lines: self.assertTrue(line.startswith( @@ -220,7 +220,7 @@ class TestSender(BaseTestSender): self.sender.disconnect = mock.MagicMock() success, candidates = self.sender() self.assertTrue(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) self.sender.connect.assert_called_once_with() self.sender.missing_check.assert_called_once_with() self.sender.updates.assert_called_once_with() @@ -235,7 +235,7 @@ class TestSender(BaseTestSender): self.sender.failures = 1 success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) self.sender.connect.assert_called_once_with() self.sender.missing_check.assert_called_once_with() self.sender.updates.assert_called_once_with() @@ -270,10 +270,10 @@ class TestSender(BaseTestSender): } for method_name, expected_calls in expectations.items(): mock_method = getattr(mock_conn, method_name) - self.assertEquals(expected_calls, mock_method.mock_calls, - 'connection method "%s" got %r not %r' % ( - method_name, mock_method.mock_calls, - expected_calls)) + self.assertEqual(expected_calls, mock_method.mock_calls, + 'connection method "%s" got %r not %r' % ( + method_name, mock_method.mock_calls, + expected_calls)) def test_connect_handoff(self): node = dict(replication_ip='1.2.3.4', replication_port=5678, @@ -304,10 +304,10 @@ class TestSender(BaseTestSender): } for method_name, expected_calls in expectations.items(): mock_method = getattr(mock_conn, method_name) - self.assertEquals(expected_calls, mock_method.mock_calls, - 'connection method "%s" got %r not %r' % ( - method_name, mock_method.mock_calls, - expected_calls)) + self.assertEqual(expected_calls, mock_method.mock_calls, + 'connection method "%s" got %r not %r' % ( + method_name, mock_method.mock_calls, + expected_calls)) def test_connect_handoff_replicated(self): node = dict(replication_ip='1.2.3.4', replication_port=5678, @@ -339,10 +339,10 @@ class TestSender(BaseTestSender): } for method_name, expected_calls in expectations.items(): mock_method = getattr(mock_conn, method_name) - self.assertEquals(expected_calls, mock_method.mock_calls, - 'connection method "%s" got %r not %r' % ( - method_name, mock_method.mock_calls, - expected_calls)) + self.assertEqual(expected_calls, mock_method.mock_calls, + 'connection method "%s" got %r not %r' % ( + method_name, mock_method.mock_calls, + expected_calls)) def test_call(self): def patch_sender(sender): @@ -535,7 +535,7 @@ class TestSender(BaseTestSender): 'putrequest', putrequest): success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') for line in error_lines: self.assertTrue(line.startswith( @@ -559,7 +559,7 @@ class TestSender(BaseTestSender): FakeBufferedHTTPConnection): success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') for line in error_lines: self.assertTrue(line.startswith( @@ -586,7 +586,7 @@ class TestSender(BaseTestSender): self.daemon, node, job, ['abc']) success, candidates = self.sender() self.assertFalse(success) - self.assertEquals(candidates, {}) + self.assertEqual(candidates, {}) error_lines = self.daemon.logger.get_lines_for_level('error') for line in error_lines: self.assertTrue(line.startswith( diff --git a/test/unit/obj/test_updater.py b/test/unit/obj/test_updater.py index 2c6df8d70d..c863d2bae8 100644 --- a/test/unit/obj/test_updater.py +++ b/test/unit/obj/test_updater.py @@ -87,10 +87,10 @@ class TestObjectUpdater(unittest.TestCase): 'node_timeout': '5'}) self.assertTrue(hasattr(cu, 'logger')) self.assertTrue(cu.logger is not None) - self.assertEquals(cu.devices, self.devices_dir) - self.assertEquals(cu.interval, 1) - self.assertEquals(cu.concurrency, 2) - self.assertEquals(cu.node_timeout, 5) + self.assertEqual(cu.devices, self.devices_dir) + self.assertEqual(cu.interval, 1) + self.assertEqual(cu.concurrency, 2) + self.assertEqual(cu.node_timeout, 5) self.assertTrue(cu.get_container_ring() is not None) @mock.patch('os.listdir') @@ -183,7 +183,7 @@ class TestObjectUpdater(unittest.TestCase): 'node_timeout': '5'}) cu.logger = mock_logger = mock.MagicMock() cu.object_sweep(self.sda1) - self.assertEquals(mock_logger.warn.call_count, warn) + self.assertEqual(mock_logger.warn.call_count, warn) self.assertTrue( os.path.exists(os.path.join(self.sda1, 'not_a_dir'))) if should_skip: @@ -315,8 +315,8 @@ class TestObjectUpdater(unittest.TestCase): out.write('HTTP/1.1 %d OK\r\nContent-Length: 0\r\n\r\n' % return_code) out.flush() - self.assertEquals(inc.readline(), - 'PUT /sda1/0/a/c/o HTTP/1.1\r\n') + self.assertEqual(inc.readline(), + 'PUT /sda1/0/a/c/o HTTP/1.1\r\n') headers = swob.HeaderKeyDict() line = inc.readline() while line and line != '\r\n': diff --git a/test/unit/test_locale/test_locale.py b/test/unit/test_locale/test_locale.py index 377f9b9ecb..33544eb64b 100644 --- a/test/unit/test_locale/test_locale.py +++ b/test/unit/test_locale/test_locale.py @@ -66,7 +66,7 @@ class TestTranslations(unittest.TestCase): def test_translations(self): path = ':'.join(sys.path) translated_message = check_output(['python', __file__, path]) - self.assertEquals(translated_message, 'prova mesaĝo\n') + self.assertEqual(translated_message, 'prova mesaĝo\n') if __name__ == "__main__": From 4ac1fea5d111c669ff827f4eb29c0735cbad6ba5 Mon Sep 17 00:00:00 2001 From: Zhao Lei Date: Fri, 7 Aug 2015 22:07:01 +0800 Subject: [PATCH 28/70] Fix some spelling typo in comments s/overide/override for object-expirer.conf and sample. s/automaticaly/automatically for swift/proxy/controllers/obj.py Change-Id: Ife107c7a1005a5d4959288db50a7f8f33c522dd4 Signed-off-by: Zhao Lei --- doc/saio/swift/object-expirer.conf | 2 +- etc/object-expirer.conf-sample | 2 +- swift/proxy/controllers/obj.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/saio/swift/object-expirer.conf b/doc/saio/swift/object-expirer.conf index 5610f69afd..6e77e9cdf7 100644 --- a/doc/saio/swift/object-expirer.conf +++ b/doc/saio/swift/object-expirer.conf @@ -37,7 +37,7 @@ interval = 300 # config value # processes = 0 # process is which of the parts a particular process will work on -# process can also be specified on the command line and will overide the config +# process can also be specified on the command line and will override the config # value # process is "zero based", if you want to use 3 processes, you should run # processes with process set to 0, 1, and 2 diff --git a/etc/object-expirer.conf-sample b/etc/object-expirer.conf-sample index 87840a48bf..6276fd5cfa 100644 --- a/etc/object-expirer.conf-sample +++ b/etc/object-expirer.conf-sample @@ -41,7 +41,7 @@ # config value # processes = 0 # process is which of the parts a particular process will work on -# process can also be specified on the command line and will overide the config +# process can also be specified on the command line and will override the config # value # process is "zero based", if you want to use 3 processes, you should run # processes with process set to 0, 1, and 2 diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index e86b35debe..a1563c81db 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -834,7 +834,7 @@ class BaseObjectController(Controller): data_source = iter(lambda: reader(self.app.client_chunk_size), '') update_response = lambda req, resp: resp - # check if object is set to be automaticaly deleted (i.e. expired) + # check if object is set to be automatically deleted (i.e. expired) req, delete_at_container, delete_at_part, \ delete_at_nodes = self._config_obj_expiration(req) From 968c91a465627bb9317ed6e2ae9ccd818c83c0c4 Mon Sep 17 00:00:00 2001 From: Alistair Coles Date: Tue, 28 Jul 2015 10:31:54 +0100 Subject: [PATCH 29/70] Replace assertTrue(not ) with assertFalse() The replacement of assert_ with assertTrue [1] resulted in a number of tests using calls of the form assertTrue(not ). This patch replaces those with assertFalse(). [1] change I74705c6498249337bfdf955d62e0ad972035bc1f Change-Id: I78b49558f4425c2335df187b1793d1e4b3c514b1 --- test/functional/tests.py | 54 +++++++++++++------------- test/probe/test_account_failures.py | 7 ++-- test/probe/test_object_async_update.py | 2 +- 3 files changed, 32 insertions(+), 31 deletions(-) diff --git a/test/functional/tests.py b/test/functional/tests.py index 18b3d4716d..04cac14f1c 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -133,7 +133,7 @@ class TestAccount(Base): def testInvalidUTF8Path(self): invalid_utf8 = Utils.create_utf8_name()[::-1] container = self.env.account.container(invalid_utf8) - self.assertTrue(not container.create(cfg={'no_path_quote': True})) + self.assertFalse(container.create(cfg={'no_path_quote': True})) self.assert_status(412) self.assert_body('Invalid UTF8 or contains NULL') @@ -313,7 +313,7 @@ class TestAccountNoContainers(Base): def testGetRequest(self): for format_type in [None, 'json', 'xml']: - self.assertTrue(not self.env.account.containers( + self.assertFalse(self.env.account.containers( parms={'format': format_type})) if format_type is None: @@ -371,7 +371,7 @@ class TestContainer(Base): self.assertTrue(cont.create()) self.assert_status(201) else: - self.assertTrue(not cont.create()) + self.assertFalse(cont.create()) self.assert_status(400) def testFileThenContainerDelete(self): @@ -490,7 +490,7 @@ class TestContainer(Base): self.assertTrue(container.delete()) container = self.env.account.container(invalid_utf8) - self.assertTrue(not container.create(cfg={'no_path_quote': True})) + self.assertFalse(container.create(cfg={'no_path_quote': True})) self.assert_status(412) self.assertRaises(ResponseError, container.files, cfg={'no_path_quote': True}) @@ -516,8 +516,8 @@ class TestContainer(Base): cont_name = cont_name.encode('utf-8') cont = self.env.account.container(cont_name) - self.assertTrue(not cont.create(cfg={'no_path_quote': True}), - 'created container with name %s' % (cont_name)) + self.assertFalse(cont.create(cfg={'no_path_quote': True}), + 'created container with name %s' % (cont_name)) self.assert_status(404) self.assertNotIn(cont.name, self.env.account.containers()) @@ -531,7 +531,7 @@ class TestContainer(Base): def testDeleteOnContainerThatDoesNotExist(self): cont = self.env.account.container(Utils.create_name()) - self.assertTrue(not cont.delete()) + self.assertFalse(cont.delete()) self.assert_status(404) def testDeleteOnContainerWithFiles(self): @@ -540,7 +540,7 @@ class TestContainer(Base): file_item = cont.file(Utils.create_name()) file_item.write_random(self.env.file_size) self.assertIn(file_item.name, cont.files()) - self.assertTrue(not cont.delete()) + self.assertFalse(cont.delete()) self.assert_status(409) def testFileCreateInContainerThatDoesNotExist(self): @@ -625,8 +625,8 @@ class TestContainer(Base): def testTooLongName(self): cont = self.env.account.container('x' * 257) - self.assertTrue(not cont.create(), - 'created container with name %s' % (cont.name)) + self.assertFalse(cont.create(), + 'created container with name %s' % (cont.name)) self.assert_status(400) def testContainerExistenceCachingProblem(self): @@ -967,24 +967,24 @@ class TestFile(Base): # invalid source container source_cont = self.env.account.container(Utils.create_name()) file_item = source_cont.file(source_filename) - self.assertTrue(not file_item.copy( + self.assertFalse(file_item.copy( '%s%s' % (prefix, self.env.container), Utils.create_name())) self.assert_status(404) - self.assertTrue(not file_item.copy('%s%s' % (prefix, dest_cont), - Utils.create_name())) + self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont), + Utils.create_name())) self.assert_status(404) # invalid source object file_item = self.env.container.file(Utils.create_name()) - self.assertTrue(not file_item.copy( + self.assertFalse(file_item.copy( '%s%s' % (prefix, self.env.container), Utils.create_name())) self.assert_status(404) - self.assertTrue(not file_item.copy('%s%s' % (prefix, dest_cont), - Utils.create_name())) + self.assertFalse(file_item.copy('%s%s' % (prefix, dest_cont), + Utils.create_name())) self.assert_status(404) # invalid destination container @@ -1016,7 +1016,7 @@ class TestFile(Base): # invalid source container source_cont = self.env.account.container(Utils.create_name()) file_item = source_cont.file(source_filename) - self.assertTrue(not file_item.copy_account( + self.assertFalse(file_item.copy_account( acct, '%s%s' % (prefix, self.env.container), Utils.create_name())) @@ -1027,7 +1027,7 @@ class TestFile(Base): else: self.assert_status(404) - self.assertTrue(not file_item.copy_account( + self.assertFalse(file_item.copy_account( acct, '%s%s' % (prefix, cont), Utils.create_name())) @@ -1035,7 +1035,7 @@ class TestFile(Base): # invalid source object file_item = self.env.container.file(Utils.create_name()) - self.assertTrue(not file_item.copy_account( + self.assertFalse(file_item.copy_account( acct, '%s%s' % (prefix, self.env.container), Utils.create_name())) @@ -1046,7 +1046,7 @@ class TestFile(Base): else: self.assert_status(404) - self.assertTrue(not file_item.copy_account( + self.assertFalse(file_item.copy_account( acct, '%s%s' % (prefix, cont), Utils.create_name())) @@ -1054,7 +1054,7 @@ class TestFile(Base): # invalid destination container file_item = self.env.container.file(source_filename) - self.assertTrue(not file_item.copy_account( + self.assertFalse(file_item.copy_account( acct, '%s%s' % (prefix, Utils.create_name()), Utils.create_name())) @@ -1071,9 +1071,9 @@ class TestFile(Base): file_item.write_random() file_item = self.env.container.file(source_filename) - self.assertTrue(not file_item.copy(Utils.create_name(), - Utils.create_name(), - cfg={'no_destination': True})) + self.assertFalse(file_item.copy(Utils.create_name(), + Utils.create_name(), + cfg={'no_destination': True})) self.assert_status(412) def testCopyDestinationSlashProblems(self): @@ -1082,9 +1082,9 @@ class TestFile(Base): file_item.write_random() # no slash - self.assertTrue(not file_item.copy(Utils.create_name(), - Utils.create_name(), - cfg={'destination': Utils.create_name()})) + self.assertFalse(file_item.copy(Utils.create_name(), + Utils.create_name(), + cfg={'destination': Utils.create_name()})) self.assert_status(412) def testCopyFromHeader(self): diff --git a/test/probe/test_account_failures.py b/test/probe/test_account_failures.py index f45394e6bf..4d6b1496b9 100755 --- a/test/probe/test_account_failures.py +++ b/test/probe/test_account_failures.py @@ -123,7 +123,7 @@ class TestAccountFailures(ReplProbeTest): found2 = True self.assertEqual(container['count'], 1) self.assertEqual(container['bytes'], 4) - self.assertTrue(not found1) + self.assertFalse(found1) self.assertTrue(found2) # Run container updaters @@ -143,7 +143,7 @@ class TestAccountFailures(ReplProbeTest): found2 = True self.assertEqual(container['count'], 2) self.assertEqual(container['bytes'], 9) - self.assertTrue(not found1) + self.assertFalse(found1) self.assertTrue(found2) # Restart other primary account server @@ -187,7 +187,8 @@ class TestAccountFailures(ReplProbeTest): found2 = True self.assertEqual(container['count'], 2) self.assertEqual(container['bytes'], 9) - self.assertTrue(not found1) + self.assertEquals(container['bytes'], 9) + self.assertFalse(found1) self.assertTrue(found2) diff --git a/test/probe/test_object_async_update.py b/test/probe/test_object_async_update.py index df900ea808..379b0be556 100755 --- a/test/probe/test_object_async_update.py +++ b/test/probe/test_object_async_update.py @@ -54,7 +54,7 @@ class TestObjectAsyncUpdate(ReplProbeTest): self.ipport2server, self.pids) # Assert it does not know about container/obj - self.assertTrue(not direct_client.direct_get_container( + self.assertFalse(direct_client.direct_get_container( cnode, cpart, self.account, container)[1]) # Run the object-updaters From 035a411660ca02983cd486312266c67d78a2359c Mon Sep 17 00:00:00 2001 From: Thiago da Silva Date: Sun, 9 Nov 2014 13:13:27 -0500 Subject: [PATCH 30/70] versioned writes middleware Rewrite object versioning as middleware to simplify the PUT method in the object controller. The functionality remains basically the same with the only major difference being the ability to now version slo manifest files. dlo manifests are still not supported as part of this patch. Co-Authored-By: Clay Gerrard DocImpact Change-Id: Ie899290b3312e201979eafefb253d1a60b65b837 Signed-off-by: Thiago da Silva Signed-off-by: Prashanth Pai --- doc/saio/swift/container-server/1.conf | 1 - doc/saio/swift/container-server/2.conf | 1 - doc/saio/swift/container-server/3.conf | 1 - doc/saio/swift/container-server/4.conf | 1 - doc/saio/swift/proxy-server.conf | 6 +- doc/source/logs.rst | 1 + doc/source/middleware.rst | 9 + doc/source/overview_object_versioning.rst | 89 +- etc/proxy-server.conf-sample | 13 +- setup.cfg | 1 + swift/common/constraints.py | 30 +- swift/common/middleware/versioned_writes.py | 490 +++++++ swift/proxy/controllers/obj.py | 191 +-- swift/proxy/server.py | 3 + test/functional/swift_test_client.py | 22 +- test/functional/tests.py | 213 +++- test/unit/common/middleware/helpers.py | 2 +- test/unit/common/middleware/test_dlo.py | 2 +- .../middleware/test_versioned_writes.py | 558 ++++++++ test/unit/common/test_constraints.py | 18 + test/unit/common/test_wsgi.py | 12 +- test/unit/proxy/test_server.py | 1132 +++++++---------- 22 files changed, 1816 insertions(+), 980 deletions(-) create mode 100644 swift/common/middleware/versioned_writes.py create mode 100644 test/unit/common/middleware/test_versioned_writes.py diff --git a/doc/saio/swift/container-server/1.conf b/doc/saio/swift/container-server/1.conf index 3062ca3a5a..176096dbe1 100644 --- a/doc/saio/swift/container-server/1.conf +++ b/doc/saio/swift/container-server/1.conf @@ -9,7 +9,6 @@ user = log_facility = LOG_LOCAL2 recon_cache_path = /var/cache/swift eventlet_debug = true -allow_versions = true [pipeline:main] pipeline = recon container-server diff --git a/doc/saio/swift/container-server/2.conf b/doc/saio/swift/container-server/2.conf index 6365215931..7100710b3c 100644 --- a/doc/saio/swift/container-server/2.conf +++ b/doc/saio/swift/container-server/2.conf @@ -9,7 +9,6 @@ user = log_facility = LOG_LOCAL3 recon_cache_path = /var/cache/swift2 eventlet_debug = true -allow_versions = true [pipeline:main] pipeline = recon container-server diff --git a/doc/saio/swift/container-server/3.conf b/doc/saio/swift/container-server/3.conf index b925427ff0..06ec47414d 100644 --- a/doc/saio/swift/container-server/3.conf +++ b/doc/saio/swift/container-server/3.conf @@ -9,7 +9,6 @@ user = log_facility = LOG_LOCAL4 recon_cache_path = /var/cache/swift3 eventlet_debug = true -allow_versions = true [pipeline:main] pipeline = recon container-server diff --git a/doc/saio/swift/container-server/4.conf b/doc/saio/swift/container-server/4.conf index 16799a524a..1acc3b5c54 100644 --- a/doc/saio/swift/container-server/4.conf +++ b/doc/saio/swift/container-server/4.conf @@ -9,7 +9,6 @@ user = log_facility = LOG_LOCAL5 recon_cache_path = /var/cache/swift4 eventlet_debug = true -allow_versions = true [pipeline:main] pipeline = recon container-server diff --git a/doc/saio/swift/proxy-server.conf b/doc/saio/swift/proxy-server.conf index dd037edb8f..c25e0ed90d 100644 --- a/doc/saio/swift/proxy-server.conf +++ b/doc/saio/swift/proxy-server.conf @@ -9,7 +9,7 @@ eventlet_debug = true [pipeline:main] # Yes, proxy-logging appears twice. This is so that # middleware-originated requests get logged too. -pipeline = catch_errors gatekeeper healthcheck proxy-logging cache bulk tempurl ratelimit crossdomain tempauth staticweb container-quotas account-quotas slo dlo proxy-logging proxy-server +pipeline = catch_errors gatekeeper healthcheck proxy-logging cache bulk tempurl ratelimit crossdomain tempauth staticweb container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server [filter:catch_errors] use = egg:swift#catch_errors @@ -60,6 +60,10 @@ use = egg:swift#memcache [filter:gatekeeper] use = egg:swift#gatekeeper +[filter:versioned_writes] +use = egg:swift#versioned_writes +allow_versioned_writes = true + [app:proxy-server] use = egg:swift#proxy allow_account_management = true diff --git a/doc/source/logs.rst b/doc/source/logs.rst index f738861843..75b669f1a5 100644 --- a/doc/source/logs.rst +++ b/doc/source/logs.rst @@ -102,6 +102,7 @@ DLO :ref:`dynamic-large-objects` LE :ref:`list_endpoints` KS :ref:`keystoneauth` RL :ref:`ratelimit` +VW :ref:`versioned_writes` ======================= ============================= diff --git a/doc/source/middleware.rst b/doc/source/middleware.rst index f78dbb1947..4e304ed6fb 100644 --- a/doc/source/middleware.rst +++ b/doc/source/middleware.rst @@ -155,6 +155,15 @@ Name Check (Forbidden Character Filter) :members: :show-inheritance: +.. _versioned_writes: + +Object Versioning +================= + +.. automodule:: swift.common.middleware.versioned_writes + :members: + :show-inheritance: + Proxy Logging ============= diff --git a/doc/source/overview_object_versioning.rst b/doc/source/overview_object_versioning.rst index cac5a898d9..78d0b07ad1 100644 --- a/doc/source/overview_object_versioning.rst +++ b/doc/source/overview_object_versioning.rst @@ -1,89 +1,6 @@ -================= Object Versioning ================= --------- -Overview --------- - -Object versioning in swift is implemented by setting a flag on the container -to tell swift to version all objects in the container. The flag is the -``X-Versions-Location`` header on the container, and its value is the -container where the versions are stored. It is recommended to use a different -``X-Versions-Location`` container for each container that is being versioned. - -When data is ``PUT`` into a versioned container (a container with the -versioning flag turned on), the existing data in the file is redirected to a -new object and the data in the ``PUT`` request is saved as the data for the -versioned object. The new object name (for the previous version) is -``//``, where ``length`` -is the 3-character zero-padded hexadecimal length of the ```` and -```` is the timestamp of when the previous version was created. - -A ``GET`` to a versioned object will return the current version of the object -without having to do any request redirects or metadata lookups. - -A ``POST`` to a versioned object will update the object metadata as normal, -but will not create a new version of the object. In other words, new versions -are only created when the content of the object changes. - -A ``DELETE`` to a versioned object will only remove the current version of the -object. If you have 5 total versions of the object, you must delete the -object 5 times to completely remove the object. - -Note: A large object manifest file cannot be versioned, but a large object -manifest may point to versioned segments. - --------------------------------------------------- -How to Enable Object Versioning in a Swift Cluster --------------------------------------------------- - -Set ``allow_versions`` to ``True`` in the container server config. - ------------------------ -Examples Using ``curl`` ------------------------ - -First, create a container with the ``X-Versions-Location`` header or add the -header to an existing container. Also make sure the container referenced by -the ``X-Versions-Location`` exists. In this example, the name of that -container is "versions":: - - curl -i -XPUT -H "X-Auth-Token: " \ - -H "X-Versions-Location: versions" http:///container - curl -i -XPUT -H "X-Auth-Token: " http:///versions - -Create an object (the first version):: - - curl -i -XPUT --data-binary 1 -H "X-Auth-Token: " \ - http:///container/myobject - -Now create a new version of that object:: - - curl -i -XPUT --data-binary 2 -H "X-Auth-Token: " \ - http:///container/myobject - -See a listing of the older versions of the object:: - - curl -i -H "X-Auth-Token: " \ - http:///versions?prefix=008myobject/ - -Now delete the current version of the object and see that the older version is -gone:: - - curl -i -XDELETE -H "X-Auth-Token: " \ - http:///container/myobject - curl -i -H "X-Auth-Token: " \ - http:///versions?prefix=008myobject/ - ---------------------------------------------------- -How to Disable Object Versioning in a Swift Cluster ---------------------------------------------------- - -If you want to disable all functionality, set ``allow_versions`` back to -``False`` in the container server config. - -Disable versioning a versioned container (x is any value except empty):: - - curl -i -XPOST -H "X-Auth-Token: " \ - -H "X-Remove-Versions-Location: x" http:///container +.. automodule:: swift.common.middleware.versioned_writes + :members: + :show-inheritance: diff --git a/etc/proxy-server.conf-sample b/etc/proxy-server.conf-sample index 55b6137ae0..b37101c37a 100644 --- a/etc/proxy-server.conf-sample +++ b/etc/proxy-server.conf-sample @@ -77,7 +77,7 @@ bind_port = 8080 # eventlet_debug = false [pipeline:main] -pipeline = catch_errors gatekeeper healthcheck proxy-logging cache container_sync bulk tempurl ratelimit tempauth container-quotas account-quotas slo dlo proxy-logging proxy-server +pipeline = catch_errors gatekeeper healthcheck proxy-logging cache container_sync bulk tempurl ratelimit tempauth container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server [app:proxy-server] use = egg:swift#proxy @@ -703,3 +703,14 @@ use = egg:swift#xprofile # # unwind the iterator of applications # unwind = false + +# Note: Put after slo, dlo in the pipeline. +# If you don't put it in the pipeline, it will be inserted automatically. +[filter:versioned_writes] +use = egg:swift#versioned_writes +# Enables using versioned writes middleware and exposing configuration +# settings via HTTP GET /info. +# WARNING: Setting this option bypasses the "allow_versions" option +# in the container configuration file, which will be eventually +# deprecated. See documentation for more details. +# allow_versioned_writes = false diff --git a/setup.cfg b/setup.cfg index a40fc535ee..a819a57f02 100644 --- a/setup.cfg +++ b/setup.cfg @@ -95,6 +95,7 @@ paste.filter_factory = gatekeeper = swift.common.middleware.gatekeeper:filter_factory container_sync = swift.common.middleware.container_sync:filter_factory xprofile = swift.common.middleware.xprofile:filter_factory + versioned_writes = swift.common.middleware.versioned_writes:filter_factory [build_sphinx] all_files = 1 diff --git a/swift/common/constraints.py b/swift/common/constraints.py index aae5f25aac..36f9d5eae8 100644 --- a/swift/common/constraints.py +++ b/swift/common/constraints.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools import os import urllib import time @@ -406,28 +407,33 @@ def check_destination_header(req): '/') -def check_account_format(req, account): +def check_name_format(req, name, target_type): """ - Validate that the header contains valid account name. - We assume the caller ensures that - destination header is present in req.headers. + Validate that the header contains valid account or container name. :param req: HTTP request object - :returns: A properly encoded account name + :param name: header value to validate + :param target_type: which header is being validated (Account or Container) + :returns: A properly encoded account name or container name :raise: HTTPPreconditionFailed if account header is not well formatted. """ - if not account: + if not name: raise HTTPPreconditionFailed( request=req, - body='Account name cannot be empty') - if isinstance(account, unicode): - account = account.encode('utf-8') - if '/' in account: + body='%s name cannot be empty' % target_type) + if isinstance(name, unicode): + name = name.encode('utf-8') + if '/' in name: raise HTTPPreconditionFailed( request=req, - body='Account name cannot contain slashes') - return account + body='%s name cannot contain slashes' % target_type) + return name + +check_account_format = functools.partial(check_name_format, + target_type='Account') +check_container_format = functools.partial(check_name_format, + target_type='Container') def valid_api_version(version): diff --git a/swift/common/middleware/versioned_writes.py b/swift/common/middleware/versioned_writes.py new file mode 100644 index 0000000000..e3f56f6fd1 --- /dev/null +++ b/swift/common/middleware/versioned_writes.py @@ -0,0 +1,490 @@ +# Copyright (c) 2014 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Object versioning in swift is implemented by setting a flag on the container +to tell swift to version all objects in the container. The flag is the +``X-Versions-Location`` header on the container, and its value is the +container where the versions are stored. It is recommended to use a different +``X-Versions-Location`` container for each container that is being versioned. + +When data is ``PUT`` into a versioned container (a container with the +versioning flag turned on), the existing data in the file is redirected to a +new object and the data in the ``PUT`` request is saved as the data for the +versioned object. The new object name (for the previous version) is +``//``, where ``length`` +is the 3-character zero-padded hexadecimal length of the ```` and +```` is the timestamp of when the previous version was created. + +A ``GET`` to a versioned object will return the current version of the object +without having to do any request redirects or metadata lookups. + +A ``POST`` to a versioned object will update the object metadata as normal, +but will not create a new version of the object. In other words, new versions +are only created when the content of the object changes. + +A ``DELETE`` to a versioned object will only remove the current version of the +object. If you have 5 total versions of the object, you must delete the +object 5 times to completely remove the object. + +-------------------------------------------------- +How to Enable Object Versioning in a Swift Cluster +-------------------------------------------------- + +This middleware was written as an effort to refactor parts of the proxy server, +so this functionality was already available in previous releases and every +attempt was made to maintain backwards compatibility. To allow operators to +perform a seamless upgrade, it is not required to add the middleware to the +proxy pipeline and the flag ``allow_versions`` in the container server +configuration files are still valid. In future releases, ``allow_versions`` +will be deprecated in favor of adding this middleware to the pipeline to enable +or disable the feature. + +In case the middleware is added to the proxy pipeline, you must also +set ``allow_versioned_writes`` to ``True`` in the middleware options +to enable the information about this middleware to be returned in a /info +request. + +Upgrade considerations: If ``allow_versioned_writes`` is set in the filter +configuration, you can leave the ``allow_versions`` flag in the container +server configuration files untouched. If you decide to disable or remove the +``allow_versions`` flag, you must re-set any existing containers that had +the 'X-Versions-Location' flag configured so that it can now be tracked by the +versioned_writes middleware. + +----------------------- +Examples Using ``curl`` +----------------------- + +First, create a container with the ``X-Versions-Location`` header or add the +header to an existing container. Also make sure the container referenced by +the ``X-Versions-Location`` exists. In this example, the name of that +container is "versions":: + + curl -i -XPUT -H "X-Auth-Token: " \ +-H "X-Versions-Location: versions" http:///container + curl -i -XPUT -H "X-Auth-Token: " http:///versions + +Create an object (the first version):: + + curl -i -XPUT --data-binary 1 -H "X-Auth-Token: " \ +http:///container/myobject + +Now create a new version of that object:: + + curl -i -XPUT --data-binary 2 -H "X-Auth-Token: " \ +http:///container/myobject + +See a listing of the older versions of the object:: + + curl -i -H "X-Auth-Token: " \ +http:///versions?prefix=008myobject/ + +Now delete the current version of the object and see that the older version is +gone:: + + curl -i -XDELETE -H "X-Auth-Token: " \ +http:///container/myobject + curl -i -H "X-Auth-Token: " \ +http:///versions?prefix=008myobject/ + +--------------------------------------------------- +How to Disable Object Versioning in a Swift Cluster +--------------------------------------------------- + +If you want to disable all functionality, set ``allow_versioned_writes`` to +``False`` in the middleware options. + +Disable versioning from a container (x is any value except empty):: + + curl -i -XPOST -H "X-Auth-Token: " \ +-H "X-Remove-Versions-Location: x" http:///container +""" + +import time +from urllib import quote, unquote +from swift.common.utils import get_logger, Timestamp, json, \ + register_swift_info, config_true_value +from swift.common.request_helpers import get_sys_meta_prefix +from swift.common.wsgi import WSGIContext, make_pre_authed_request +from swift.common.swob import Request +from swift.common.constraints import ( + check_account_format, check_container_format, check_destination_header) +from swift.proxy.controllers.base import get_container_info +from swift.common.http import ( + is_success, is_client_error, HTTP_NOT_FOUND) +from swift.common.swob import HTTPPreconditionFailed, HTTPServiceUnavailable, \ + HTTPServerError +from swift.common.exceptions import ( + ListingIterNotFound, ListingIterError) + + +class VersionedWritesContext(WSGIContext): + + def __init__(self, wsgi_app, logger): + WSGIContext.__init__(self, wsgi_app) + self.logger = logger + + def _listing_iter(self, account_name, lcontainer, lprefix, env): + for page in self._listing_pages_iter(account_name, + lcontainer, lprefix, env): + for item in page: + yield item + + def _listing_pages_iter(self, account_name, lcontainer, lprefix, env): + marker = '' + while True: + lreq = make_pre_authed_request( + env, method='GET', swift_source='VW', + path='/v1/%s/%s' % (account_name, lcontainer)) + lreq.environ['QUERY_STRING'] = \ + 'format=json&prefix=%s&marker=%s' % (quote(lprefix), + quote(marker)) + lresp = lreq.get_response(self.app) + if not is_success(lresp.status_int): + if lresp.status_int == HTTP_NOT_FOUND: + raise ListingIterNotFound() + elif is_client_error(lresp.status_int): + raise HTTPPreconditionFailed() + else: + raise ListingIterError() + + if not lresp.body: + break + + sublisting = json.loads(lresp.body) + if not sublisting: + break + marker = sublisting[-1]['name'].encode('utf-8') + yield sublisting + + def handle_obj_versions_put(self, req, object_versions, + object_name, policy_index): + ret = None + + # do a HEAD request to check object versions + _headers = {'X-Newest': 'True', + 'X-Backend-Storage-Policy-Index': policy_index, + 'x-auth-token': req.headers.get('x-auth-token')} + + # make a pre_auth request in case the user has write access + # to container, but not READ. This was allowed in previous version + # (i.e., before middleware) so keeping the same behavior here + head_req = make_pre_authed_request( + req.environ, path=req.path_info, + headers=_headers, method='HEAD', swift_source='VW') + hresp = head_req.get_response(self.app) + + is_dlo_manifest = 'X-Object-Manifest' in req.headers or \ + 'X-Object-Manifest' in hresp.headers + + # if there's an existing object, then copy it to + # X-Versions-Location + if is_success(hresp.status_int) and not is_dlo_manifest: + lcontainer = object_versions.split('/')[0] + prefix_len = '%03x' % len(object_name) + lprefix = prefix_len + object_name + '/' + ts_source = hresp.environ.get('swift_x_timestamp') + if ts_source is None: + ts_source = time.mktime(time.strptime( + hresp.headers['last-modified'], + '%a, %d %b %Y %H:%M:%S GMT')) + new_ts = Timestamp(ts_source).internal + vers_obj_name = lprefix + new_ts + copy_headers = { + 'Destination': '%s/%s' % (lcontainer, vers_obj_name), + 'x-auth-token': req.headers.get('x-auth-token')} + + # COPY implementation sets X-Newest to True when it internally + # does a GET on source object. So, we don't have to explicity + # set it in request headers here. + copy_req = make_pre_authed_request( + req.environ, path=req.path_info, + headers=copy_headers, method='COPY', swift_source='VW') + copy_resp = copy_req.get_response(self.app) + + if is_success(copy_resp.status_int): + # success versioning previous existing object + # return None and handle original request + ret = None + else: + if is_client_error(copy_resp.status_int): + # missing container or bad permissions + ret = HTTPPreconditionFailed(request=req) + else: + # could not copy the data, bail + ret = HTTPServiceUnavailable(request=req) + + else: + if hresp.status_int == HTTP_NOT_FOUND or is_dlo_manifest: + # nothing to version + # return None and handle original request + ret = None + else: + # if not HTTP_NOT_FOUND, return error immediately + ret = hresp + + return ret + + def handle_obj_versions_delete(self, req, object_versions, + account_name, container_name, object_name): + lcontainer = object_versions.split('/')[0] + prefix_len = '%03x' % len(object_name) + lprefix = prefix_len + object_name + '/' + item_list = [] + try: + for _item in self._listing_iter(account_name, lcontainer, lprefix, + req.environ): + item_list.append(_item) + except ListingIterNotFound: + pass + except HTTPPreconditionFailed: + return HTTPPreconditionFailed(request=req) + except ListingIterError: + return HTTPServerError(request=req) + + if item_list: + # we're about to start making COPY requests - need to validate the + # write access to the versioned container + if 'swift.authorize' in req.environ: + container_info = get_container_info( + req.environ, self.app) + req.acl = container_info.get('write_acl') + aresp = req.environ['swift.authorize'](req) + if aresp: + return aresp + + while len(item_list) > 0: + previous_version = item_list.pop() + + # there are older versions so copy the previous version to the + # current object and delete the previous version + prev_obj_name = previous_version['name'].encode('utf-8') + + copy_path = '/v1/' + account_name + '/' + \ + lcontainer + '/' + prev_obj_name + + copy_headers = {'X-Newest': 'True', + 'Destination': container_name + '/' + object_name, + 'x-auth-token': req.headers.get('x-auth-token')} + + copy_req = make_pre_authed_request( + req.environ, path=copy_path, + headers=copy_headers, method='COPY', swift_source='VW') + copy_resp = copy_req.get_response(self.app) + + # if the version isn't there, keep trying with previous version + if copy_resp.status_int == HTTP_NOT_FOUND: + continue + + if not is_success(copy_resp.status_int): + if is_client_error(copy_resp.status_int): + # some user error, maybe permissions + return HTTPPreconditionFailed(request=req) + else: + # could not copy the data, bail + return HTTPServiceUnavailable(request=req) + + # reset these because the COPY changed them + new_del_req = make_pre_authed_request( + req.environ, path=copy_path, method='DELETE', + swift_source='VW') + req = new_del_req + + # remove 'X-If-Delete-At', since it is not for the older copy + if 'X-If-Delete-At' in req.headers: + del req.headers['X-If-Delete-At'] + break + + # handle DELETE request here in case it was modified + return req.get_response(self.app) + + def handle_container_request(self, env, start_response): + app_resp = self._app_call(env) + if self._response_headers is None: + self._response_headers = [] + sysmeta_version_hdr = get_sys_meta_prefix('container') + \ + 'versions-location' + location = '' + for key, val in self._response_headers: + if key.lower() == sysmeta_version_hdr: + location = val + + if location: + self._response_headers.extend([('X-Versions-Location', location)]) + + start_response(self._response_status, + self._response_headers, + self._response_exc_info) + return app_resp + + +class VersionedWritesMiddleware(object): + + def __init__(self, app, conf): + self.app = app + self.conf = conf + self.logger = get_logger(conf, log_route='versioned_writes') + + def container_request(self, req, start_response, enabled): + sysmeta_version_hdr = get_sys_meta_prefix('container') + \ + 'versions-location' + + # set version location header as sysmeta + if 'X-Versions-Location' in req.headers: + val = req.headers.get('X-Versions-Location') + if val: + # diferently from previous version, we are actually + # returning an error if user tries to set versions location + # while feature is explicitly disabled. + if not config_true_value(enabled) and \ + req.method in ('PUT', 'POST'): + raise HTTPPreconditionFailed( + request=req, content_type='text/plain', + body='Versioned Writes is disabled') + + location = check_container_format(req, val) + req.headers[sysmeta_version_hdr] = location + + # reset original header to maintain sanity + # now only sysmeta is source of Versions Location + req.headers['X-Versions-Location'] = '' + + # if both headers are in the same request + # adding location takes precendence over removing + if 'X-Remove-Versions-Location' in req.headers: + del req.headers['X-Remove-Versions-Location'] + else: + # empty value is the same as X-Remove-Versions-Location + req.headers['X-Remove-Versions-Location'] = 'x' + + # handle removing versions container + val = req.headers.get('X-Remove-Versions-Location') + if val: + req.headers.update({sysmeta_version_hdr: ''}) + req.headers.update({'X-Versions-Location': ''}) + del req.headers['X-Remove-Versions-Location'] + + # send request and translate sysmeta headers from response + vw_ctx = VersionedWritesContext(self.app, self.logger) + return vw_ctx.handle_container_request(req.environ, start_response) + + def object_request(self, req, version, account, container, obj, + allow_versioned_writes): + account_name = unquote(account) + container_name = unquote(container) + object_name = unquote(obj) + container_info = None + resp = None + is_enabled = config_true_value(allow_versioned_writes) + if req.method in ('PUT', 'DELETE'): + container_info = get_container_info( + req.environ, self.app) + elif req.method == 'COPY' and 'Destination' in req.headers: + if 'Destination-Account' in req.headers: + account_name = req.headers.get('Destination-Account') + account_name = check_account_format(req, account_name) + container_name, object_name = check_destination_header(req) + req.environ['PATH_INFO'] = "/%s/%s/%s/%s" % ( + version, account_name, container_name, object_name) + container_info = get_container_info( + req.environ, self.app) + + if not container_info: + return self.app + + # To maintain backwards compatibility, container version + # location could be stored as sysmeta or not, need to check both. + # If stored as sysmeta, check if middleware is enabled. If sysmeta + # is not set, but versions property is set in container_info, then + # for backwards compatibility feature is enabled. + object_versions = container_info.get( + 'sysmeta', {}).get('versions-location') + if object_versions and isinstance(object_versions, unicode): + object_versions = object_versions.encode('utf-8') + elif not object_versions: + object_versions = container_info.get('versions') + # if allow_versioned_writes is not set in the configuration files + # but 'versions' is configured, enable feature to maintain + # backwards compatibility + if not allow_versioned_writes and object_versions: + is_enabled = True + + if is_enabled and object_versions: + object_versions = unquote(object_versions) + vw_ctx = VersionedWritesContext(self.app, self.logger) + if req.method in ('PUT', 'COPY'): + policy_idx = req.headers.get( + 'X-Backend-Storage-Policy-Index', + container_info['storage_policy']) + resp = vw_ctx.handle_obj_versions_put( + req, object_versions, object_name, policy_idx) + else: # handle DELETE + resp = vw_ctx.handle_obj_versions_delete( + req, object_versions, account_name, + container_name, object_name) + + if resp: + return resp + else: + return self.app + + def __call__(self, env, start_response): + # making a duplicate, because if this is a COPY request, we will + # modify the PATH_INFO to find out if the 'Destination' is in a + # versioned container + req = Request(env.copy()) + try: + (version, account, container, obj) = req.split_path(3, 4, True) + except ValueError: + return self.app(env, start_response) + + # In case allow_versioned_writes is set in the filter configuration, + # the middleware becomes the authority on whether object + # versioning is enabled or not. In case it is not set, then + # the option in the container configuration is still checked + # for backwards compatibility + + # For a container request, first just check if option is set, + # can be either true or false. + # If set, check if enabled when actually trying to set container + # header. If not set, let request be handled by container server + # for backwards compatibility. + # For an object request, also check if option is set (either T or F). + # If set, check if enabled when checking versions container in + # sysmeta property. If it is not set check 'versions' property in + # container_info + allow_versioned_writes = self.conf.get('allow_versioned_writes') + if allow_versioned_writes and container and not obj: + return self.container_request(req, start_response, + allow_versioned_writes) + elif obj and req.method in ('PUT', 'COPY', 'DELETE'): + return self.object_request( + req, version, account, container, obj, + allow_versioned_writes)(env, start_response) + else: + return self.app(env, start_response) + + +def filter_factory(global_conf, **local_conf): + conf = global_conf.copy() + conf.update(local_conf) + if config_true_value(conf.get('allow_versioned_writes')): + register_swift_info('versioned_writes') + + def obj_versions_filter(app): + return VersionedWritesMiddleware(app, conf) + + return obj_versions_filter diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index e86b35debe..78af923124 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -51,13 +51,12 @@ from swift.common.constraints import check_metadata, check_object_creation, \ check_account_format from swift.common import constraints from swift.common.exceptions import ChunkReadTimeout, \ - ChunkWriteTimeout, ConnectionTimeout, ListingIterNotFound, \ - ListingIterNotAuthorized, ListingIterError, ResponseTimeout, \ + ChunkWriteTimeout, ConnectionTimeout, ResponseTimeout, \ InsufficientStorage, FooterNotSupported, MultiphasePUTNotSupported, \ PutterConnectError from swift.common.http import ( - is_success, is_client_error, is_server_error, HTTP_CONTINUE, HTTP_CREATED, - HTTP_MULTIPLE_CHOICES, HTTP_NOT_FOUND, HTTP_INTERNAL_SERVER_ERROR, + is_success, is_server_error, HTTP_CONTINUE, HTTP_CREATED, + HTTP_MULTIPLE_CHOICES, HTTP_INTERNAL_SERVER_ERROR, HTTP_SERVICE_UNAVAILABLE, HTTP_INSUFFICIENT_STORAGE, HTTP_PRECONDITION_FAILED, HTTP_CONFLICT, is_informational) from swift.common.storage_policy import (POLICIES, REPL_POLICY, EC_POLICY, @@ -139,46 +138,6 @@ class BaseObjectController(Controller): self.container_name = unquote(container_name) self.object_name = unquote(object_name) - def _listing_iter(self, lcontainer, lprefix, env): - for page in self._listing_pages_iter(lcontainer, lprefix, env): - for item in page: - yield item - - def _listing_pages_iter(self, lcontainer, lprefix, env): - lpartition = self.app.container_ring.get_part( - self.account_name, lcontainer) - marker = '' - while True: - lreq = Request.blank('i will be overridden by env', environ=env) - # Don't quote PATH_INFO, by WSGI spec - lreq.environ['PATH_INFO'] = \ - '/v1/%s/%s' % (self.account_name, lcontainer) - lreq.environ['REQUEST_METHOD'] = 'GET' - lreq.environ['QUERY_STRING'] = \ - 'format=json&prefix=%s&marker=%s' % (quote(lprefix), - quote(marker)) - container_node_iter = self.app.iter_nodes(self.app.container_ring, - lpartition) - lresp = self.GETorHEAD_base( - lreq, _('Container'), container_node_iter, lpartition, - lreq.swift_entity_path) - if 'swift.authorize' in env: - lreq.acl = lresp.headers.get('x-container-read') - aresp = env['swift.authorize'](lreq) - if aresp: - raise ListingIterNotAuthorized(aresp) - if lresp.status_int == HTTP_NOT_FOUND: - raise ListingIterNotFound() - elif not is_success(lresp.status_int): - raise ListingIterError() - if not lresp.body: - break - sublisting = json.loads(lresp.body) - if not sublisting: - break - marker = sublisting[-1]['name'].encode('utf-8') - yield sublisting - def iter_nodes_local_first(self, ring, partition): """ Yields nodes for a ring partition. @@ -548,71 +507,6 @@ class BaseObjectController(Controller): # until copy request handling moves to middleware return None, req, data_source, update_response - def _handle_object_versions(self, req): - """ - This method handles versionining of objects in containers that - have the feature enabled. - - When a new PUT request is sent, the proxy checks for previous versions - of that same object name. If found, it is copied to a different - container and the new version is stored in its place. - - This method was added as part of the PUT method refactoring and the - functionality is expected to be moved to middleware - """ - container_info = self.container_info( - self.account_name, self.container_name, req) - policy_index = req.headers.get('X-Backend-Storage-Policy-Index', - container_info['storage_policy']) - obj_ring = self.app.get_object_ring(policy_index) - partition, nodes = obj_ring.get_nodes( - self.account_name, self.container_name, self.object_name) - object_versions = container_info['versions'] - - # do a HEAD request for checking object versions - if object_versions and not req.environ.get('swift_versioned_copy'): - # make sure proxy-server uses the right policy index - _headers = {'X-Backend-Storage-Policy-Index': policy_index, - 'X-Newest': 'True'} - hreq = Request.blank(req.path_info, headers=_headers, - environ={'REQUEST_METHOD': 'HEAD'}) - hnode_iter = self.app.iter_nodes(obj_ring, partition) - hresp = self.GETorHEAD_base( - hreq, _('Object'), hnode_iter, partition, - hreq.swift_entity_path) - - is_manifest = 'X-Object-Manifest' in req.headers or \ - 'X-Object-Manifest' in hresp.headers - if hresp.status_int != HTTP_NOT_FOUND and not is_manifest: - # This is a version manifest and needs to be handled - # differently. First copy the existing data to a new object, - # then write the data from this request to the version manifest - # object. - lcontainer = object_versions.split('/')[0] - prefix_len = '%03x' % len(self.object_name) - lprefix = prefix_len + self.object_name + '/' - ts_source = hresp.environ.get('swift_x_timestamp') - if ts_source is None: - ts_source = time.mktime(time.strptime( - hresp.headers['last-modified'], - '%a, %d %b %Y %H:%M:%S GMT')) - new_ts = Timestamp(ts_source).internal - vers_obj_name = lprefix + new_ts - copy_headers = { - 'Destination': '%s/%s' % (lcontainer, vers_obj_name)} - copy_environ = {'REQUEST_METHOD': 'COPY', - 'swift_versioned_copy': True - } - copy_req = Request.blank(req.path_info, headers=copy_headers, - environ=copy_environ) - copy_resp = self.COPY(copy_req) - if is_client_error(copy_resp.status_int): - # missing container or bad permissions - raise HTTPPreconditionFailed(request=req) - elif not is_success(copy_resp.status_int): - # could not copy the data, bail - raise HTTPServiceUnavailable(request=req) - def _update_content_type(self, req): # Sometimes the 'content-type' header exists, but is set to None. req.content_type_manually_set = True @@ -819,9 +713,6 @@ class BaseObjectController(Controller): self._update_x_timestamp(req) - # check if versioning is enabled and handle copying previous version - self._handle_object_versions(req) - # check if request is a COPY of an existing object source_header = req.headers.get('X-Copy-From') if source_header: @@ -865,86 +756,10 @@ class BaseObjectController(Controller): containers = container_info['nodes'] req.acl = container_info['write_acl'] req.environ['swift_sync_key'] = container_info['sync_key'] - object_versions = container_info['versions'] if 'swift.authorize' in req.environ: aresp = req.environ['swift.authorize'](req) if aresp: return aresp - if object_versions: - # this is a version manifest and needs to be handled differently - object_versions = unquote(object_versions) - lcontainer = object_versions.split('/')[0] - prefix_len = '%03x' % len(self.object_name) - lprefix = prefix_len + self.object_name + '/' - item_list = [] - try: - for _item in self._listing_iter(lcontainer, lprefix, - req.environ): - item_list.append(_item) - except ListingIterNotFound: - # no worries, last_item is None - pass - except ListingIterNotAuthorized as err: - return err.aresp - except ListingIterError: - return HTTPServerError(request=req) - - while len(item_list) > 0: - previous_version = item_list.pop() - # there are older versions so copy the previous version to the - # current object and delete the previous version - orig_container = self.container_name - orig_obj = self.object_name - self.container_name = lcontainer - self.object_name = previous_version['name'].encode('utf-8') - - copy_path = '/v1/' + self.account_name + '/' + \ - self.container_name + '/' + self.object_name - - copy_headers = {'X-Newest': 'True', - 'Destination': orig_container + '/' + orig_obj - } - copy_environ = {'REQUEST_METHOD': 'COPY', - 'swift_versioned_copy': True - } - creq = Request.blank(copy_path, headers=copy_headers, - environ=copy_environ) - copy_resp = self.COPY(creq) - if copy_resp.status_int == HTTP_NOT_FOUND: - # the version isn't there so we'll try with previous - self.container_name = orig_container - self.object_name = orig_obj - continue - if is_client_error(copy_resp.status_int): - # some user error, maybe permissions - return HTTPPreconditionFailed(request=req) - elif not is_success(copy_resp.status_int): - # could not copy the data, bail - return HTTPServiceUnavailable(request=req) - # reset these because the COPY changed them - self.container_name = lcontainer - self.object_name = previous_version['name'].encode('utf-8') - new_del_req = Request.blank(copy_path, environ=req.environ) - container_info = self.container_info( - self.account_name, self.container_name, req) - policy_idx = container_info['storage_policy'] - obj_ring = self.app.get_object_ring(policy_idx) - # pass the policy index to storage nodes via req header - new_del_req.headers['X-Backend-Storage-Policy-Index'] = \ - policy_idx - container_partition = container_info['partition'] - containers = container_info['nodes'] - new_del_req.acl = container_info['write_acl'] - new_del_req.path_info = copy_path - req = new_del_req - # remove 'X-If-Delete-At', since it is not for the older copy - if 'X-If-Delete-At' in req.headers: - del req.headers['X-If-Delete-At'] - if 'swift.authorize' in req.environ: - aresp = req.environ['swift.authorize'](req) - if aresp: - return aresp - break if not containers: return HTTPNotFound(request=req) partition, nodes = obj_ring.get_nodes( diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 65044a1868..d55dcdab92 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -64,6 +64,9 @@ required_filters = [ if pipe.startswith('catch_errors') else [])}, {'name': 'dlo', 'after_fn': lambda _junk: [ + 'staticweb', 'tempauth', 'keystoneauth', + 'catch_errors', 'gatekeeper', 'proxy_logging']}, + {'name': 'versioned_writes', 'after_fn': lambda _junk: [ 'staticweb', 'tempauth', 'keystoneauth', 'catch_errors', 'gatekeeper', 'proxy_logging']}] diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py index c93b2eab09..750181bc06 100644 --- a/test/functional/swift_test_client.py +++ b/test/functional/swift_test_client.py @@ -236,6 +236,9 @@ class Connection(object): if not cfg.get('no_auth_token'): headers['X-Auth-Token'] = self.storage_token + if cfg.get('use_token'): + headers['X-Auth-Token'] = cfg.get('use_token') + if isinstance(hdrs, dict): headers.update(hdrs) return headers @@ -507,6 +510,18 @@ class Container(Base): return self.conn.make_request('PUT', self.path, hdrs=hdrs, parms=parms, cfg=cfg) in (201, 202) + def update_metadata(self, hdrs=None, cfg=None): + if hdrs is None: + hdrs = {} + if cfg is None: + cfg = {} + + self.conn.make_request('POST', self.path, hdrs=hdrs, cfg=cfg) + if not 200 <= self.conn.response.status <= 299: + raise ResponseError(self.conn.response, 'POST', + self.conn.make_path(self.path)) + return True + def delete(self, hdrs=None, parms=None): if hdrs is None: hdrs = {} @@ -637,6 +652,9 @@ class File(Base): else: headers['Content-Length'] = 0 + if cfg.get('use_token'): + headers['X-Auth-Token'] = cfg.get('use_token') + if cfg.get('no_content_type'): pass elif self.content_type: @@ -711,13 +729,13 @@ class File(Base): return self.conn.make_request('COPY', self.path, hdrs=headers, parms=parms) == 201 - def delete(self, hdrs=None, parms=None): + def delete(self, hdrs=None, parms=None, cfg=None): if hdrs is None: hdrs = {} if parms is None: parms = {} if self.conn.make_request('DELETE', self.path, hdrs=hdrs, - parms=parms) != 204: + cfg=cfg, parms=parms) != 204: raise ResponseError(self.conn.response, 'DELETE', self.conn.make_path(self.path)) diff --git a/test/functional/tests.py b/test/functional/tests.py index 18b3d4716d..8bc628c7c9 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -2598,7 +2598,7 @@ class TestObjectVersioningEnv(object): @classmethod def setUp(cls): cls.conn = Connection(tf.config) - cls.conn.authenticate() + cls.storage_url, cls.storage_token = cls.conn.authenticate() cls.account = Account(cls.conn, tf.config.get('account', tf.config['username'])) @@ -2628,6 +2628,30 @@ class TestObjectVersioningEnv(object): # if versioning is off, then X-Versions-Location won't persist cls.versioning_enabled = 'versions' in container_info + # setup another account to test ACLs + config2 = deepcopy(tf.config) + config2['account'] = tf.config['account2'] + config2['username'] = tf.config['username2'] + config2['password'] = tf.config['password2'] + cls.conn2 = Connection(config2) + cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate() + cls.account2 = cls.conn2.get_account() + cls.account2.delete_containers() + + # setup another account with no access to anything to test ACLs + config3 = deepcopy(tf.config) + config3['account'] = tf.config['account'] + config3['username'] = tf.config['username3'] + config3['password'] = tf.config['password3'] + cls.conn3 = Connection(config3) + cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate() + cls.account3 = cls.conn3.get_account() + + @classmethod + def tearDown(cls): + cls.account.delete_containers() + cls.account2.delete_containers() + class TestCrossPolicyObjectVersioningEnv(object): # tri-state: None initially, then True/False @@ -2650,14 +2674,14 @@ class TestCrossPolicyObjectVersioningEnv(object): cls.multiple_policies_enabled = True else: cls.multiple_policies_enabled = False - # We have to lie here that versioning is enabled. We actually - # don't know, but it does not matter. We know these tests cannot - # run without multiple policies present. If multiple policies are - # present, we won't be setting this field to any value, so it - # should all still work. - cls.versioning_enabled = True + cls.versioning_enabled = False return + if cls.versioning_enabled is None: + cls.versioning_enabled = 'versioned_writes' in cluster_info + if not cls.versioning_enabled: + return + policy = cls.policies.select() version_policy = cls.policies.exclude(name=policy['name']).select() @@ -2691,6 +2715,25 @@ class TestCrossPolicyObjectVersioningEnv(object): # if versioning is off, then X-Versions-Location won't persist cls.versioning_enabled = 'versions' in container_info + # setup another account to test ACLs + config2 = deepcopy(tf.config) + config2['account'] = tf.config['account2'] + config2['username'] = tf.config['username2'] + config2['password'] = tf.config['password2'] + cls.conn2 = Connection(config2) + cls.storage_url2, cls.storage_token2 = cls.conn2.authenticate() + cls.account2 = cls.conn2.get_account() + cls.account2.delete_containers() + + # setup another account with no access to anything to test ACLs + config3 = deepcopy(tf.config) + config3['account'] = tf.config['account'] + config3['username'] = tf.config['username3'] + config3['password'] = tf.config['password3'] + cls.conn3 = Connection(config3) + cls.storage_url3, cls.storage_token3 = cls.conn3.authenticate() + cls.account3 = cls.conn3.get_account() + class TestObjectVersioning(Base): env = TestObjectVersioningEnv @@ -2709,40 +2752,103 @@ class TestObjectVersioning(Base): def tearDown(self): super(TestObjectVersioning, self).tearDown() try: - # delete versions first! + # only delete files and not container + # as they were configured in self.env self.env.versions_container.delete_files() self.env.container.delete_files() except ResponseError: pass + def test_clear_version_option(self): + # sanity + self.assertEqual(self.env.container.info()['versions'], + self.env.versions_container.name) + self.env.container.update_metadata( + hdrs={'X-Versions-Location': ''}) + self.assertEqual(self.env.container.info().get('versions'), None) + + # set location back to the way it was + self.env.container.update_metadata( + hdrs={'X-Versions-Location': self.env.versions_container.name}) + self.assertEqual(self.env.container.info()['versions'], + self.env.versions_container.name) + def test_overwriting(self): container = self.env.container versions_container = self.env.versions_container + cont_info = container.info() + self.assertEquals(cont_info['versions'], versions_container.name) + obj_name = Utils.create_name() versioned_obj = container.file(obj_name) - versioned_obj.write("aaaaa") + versioned_obj.write("aaaaa", hdrs={'Content-Type': 'text/jibberish01'}) + obj_info = versioned_obj.info() + self.assertEqual('text/jibberish01', obj_info['content_type']) self.assertEqual(0, versions_container.info()['object_count']) - - versioned_obj.write("bbbbb") + versioned_obj.write("bbbbb", hdrs={'Content-Type': 'text/jibberish02', + 'X-Object-Meta-Foo': 'Bar'}) + versioned_obj.initialize() + self.assertEqual(versioned_obj.content_type, 'text/jibberish02') + self.assertEqual(versioned_obj.metadata['foo'], 'Bar') # the old version got saved off self.assertEqual(1, versions_container.info()['object_count']) versioned_obj_name = versions_container.files()[0] - self.assertEqual( - "aaaaa", versions_container.file(versioned_obj_name).read()) + prev_version = versions_container.file(versioned_obj_name) + prev_version.initialize() + self.assertEqual("aaaaa", prev_version.read()) + self.assertEqual(prev_version.content_type, 'text/jibberish01') + + # make sure the new obj metadata did not leak to the prev. version + self.assertTrue('foo' not in prev_version.metadata) + + # check that POST does not create a new version + versioned_obj.sync_metadata(metadata={'fu': 'baz'}) + self.assertEqual(1, versions_container.info()['object_count']) # if we overwrite it again, there are two versions versioned_obj.write("ccccc") self.assertEqual(2, versions_container.info()['object_count']) + versioned_obj_name = versions_container.files()[1] + prev_version = versions_container.file(versioned_obj_name) + prev_version.initialize() + self.assertEqual("bbbbb", prev_version.read()) + self.assertEqual(prev_version.content_type, 'text/jibberish02') + self.assertTrue('foo' in prev_version.metadata) + self.assertTrue('fu' in prev_version.metadata) # as we delete things, the old contents return self.assertEqual("ccccc", versioned_obj.read()) + + # test copy from a different container + src_container = self.env.account.container(Utils.create_name()) + self.assertTrue(src_container.create()) + src_name = Utils.create_name() + src_obj = src_container.file(src_name) + src_obj.write("ddddd", hdrs={'Content-Type': 'text/jibberish04'}) + src_obj.copy(container.name, obj_name) + + self.assertEqual("ddddd", versioned_obj.read()) + versioned_obj.initialize() + self.assertEqual(versioned_obj.content_type, 'text/jibberish04') + + # make sure versions container has the previous version + self.assertEqual(3, versions_container.info()['object_count']) + versioned_obj_name = versions_container.files()[2] + prev_version = versions_container.file(versioned_obj_name) + prev_version.initialize() + self.assertEqual("ccccc", prev_version.read()) + + # test delete + versioned_obj.delete() + self.assertEqual("ccccc", versioned_obj.read()) versioned_obj.delete() self.assertEqual("bbbbb", versioned_obj.read()) versioned_obj.delete() self.assertEqual("aaaaa", versioned_obj.read()) + self.assertEqual(0, versions_container.info()['object_count']) versioned_obj.delete() self.assertRaises(ResponseError, versioned_obj.read) @@ -2774,6 +2880,87 @@ class TestObjectVersioning(Base): self.assertEqual(3, versions_container.info()['object_count']) self.assertEqual("112233", man_file.read()) + def test_versioning_container_acl(self): + # create versions container and DO NOT give write access to account2 + versions_container = self.env.account.container(Utils.create_name()) + self.assertTrue(versions_container.create(hdrs={ + 'X-Container-Write': '' + })) + + # check account2 cannot write to versions container + fail_obj_name = Utils.create_name() + fail_obj = versions_container.file(fail_obj_name) + self.assertRaises(ResponseError, fail_obj.write, "should fail", + cfg={'use_token': self.env.storage_token2}) + + # create container and give write access to account2 + # don't set X-Versions-Location just yet + container = self.env.account.container(Utils.create_name()) + self.assertTrue(container.create(hdrs={ + 'X-Container-Write': self.env.conn2.user_acl})) + + # check account2 cannot set X-Versions-Location on container + self.assertRaises(ResponseError, container.update_metadata, hdrs={ + 'X-Versions-Location': versions_container}, + cfg={'use_token': self.env.storage_token2}) + + # good! now let admin set the X-Versions-Location + # p.s.: sticking a 'x-remove' header here to test precedence + # of both headers. Setting the location should succeed. + self.assertTrue(container.update_metadata(hdrs={ + 'X-Remove-Versions-Location': versions_container, + 'X-Versions-Location': versions_container})) + + # write object twice to container and check version + obj_name = Utils.create_name() + versioned_obj = container.file(obj_name) + self.assertTrue(versioned_obj.write("never argue with the data", + cfg={'use_token': self.env.storage_token2})) + self.assertEqual(versioned_obj.read(), "never argue with the data") + + self.assertTrue( + versioned_obj.write("we don't have no beer, just tequila", + cfg={'use_token': self.env.storage_token2})) + self.assertEqual(versioned_obj.read(), + "we don't have no beer, just tequila") + self.assertEqual(1, versions_container.info()['object_count']) + + # read the original uploaded object + for filename in versions_container.files(): + backup_file = versions_container.file(filename) + break + self.assertEqual(backup_file.read(), "never argue with the data") + + # user3 (some random user with no access to anything) + # tries to read from versioned container + self.assertRaises(ResponseError, backup_file.read, + cfg={'use_token': self.env.storage_token3}) + + # user3 cannot write or delete from source container either + self.assertRaises(ResponseError, versioned_obj.write, + "some random user trying to write data", + cfg={'use_token': self.env.storage_token3}) + self.assertRaises(ResponseError, versioned_obj.delete, + cfg={'use_token': self.env.storage_token3}) + + # user2 can't read or delete from versions-location + self.assertRaises(ResponseError, backup_file.read, + cfg={'use_token': self.env.storage_token2}) + self.assertRaises(ResponseError, backup_file.delete, + cfg={'use_token': self.env.storage_token2}) + + # but is able to delete from the source container + # this could be a helpful scenario for dev ops that want to setup + # just one container to hold object versions of multiple containers + # and each one of those containers are owned by different users + self.assertTrue(versioned_obj.delete( + cfg={'use_token': self.env.storage_token2})) + + # tear-down since we create these containers here + # and not in self.env + versions_container.delete_recursive() + container.delete_recursive() + def test_versioning_check_acl(self): container = self.env.container versions_container = self.env.versions_container diff --git a/test/unit/common/middleware/helpers.py b/test/unit/common/middleware/helpers.py index 7c1b45571e..bc6ad50fdd 100644 --- a/test/unit/common/middleware/helpers.py +++ b/test/unit/common/middleware/helpers.py @@ -76,7 +76,7 @@ class FakeSwift(object): path += '?' + env['QUERY_STRING'] if 'swift.authorize' in env: - resp = env['swift.authorize']() + resp = env['swift.authorize'](swob.Request(env)) if resp: return resp(env, start_response) diff --git a/test/unit/common/middleware/test_dlo.py b/test/unit/common/middleware/test_dlo.py index c290430e08..702eb2432d 100644 --- a/test/unit/common/middleware/test_dlo.py +++ b/test/unit/common/middleware/test_dlo.py @@ -793,7 +793,7 @@ class TestDloGetManifest(DloTestCase): def test_get_with_auth_overridden(self): auth_got_called = [0] - def my_auth(): + def my_auth(req): auth_got_called[0] += 1 return None diff --git a/test/unit/common/middleware/test_versioned_writes.py b/test/unit/common/middleware/test_versioned_writes.py new file mode 100644 index 0000000000..1d38b73f68 --- /dev/null +++ b/test/unit/common/middleware/test_versioned_writes.py @@ -0,0 +1,558 @@ +# Copyright (c) 2013 OpenStack Foundation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +from swift.common import swob +from swift.common.middleware import versioned_writes +from swift.common.swob import Request +from test.unit.common.middleware.helpers import FakeSwift + + +class FakeCache(object): + + def __init__(self, val): + if 'status' not in val: + val['status'] = 200 + self.val = val + + def get(self, *args): + return self.val + + +class VersionedWritesTestCase(unittest.TestCase): + def setUp(self): + self.app = FakeSwift() + conf = {'allow_versioned_writes': 'true'} + self.vw = versioned_writes.filter_factory(conf)(self.app) + + def call_app(self, req, app=None, expect_exception=False): + if app is None: + app = self.app + + self.authorized = [] + + def authorize(req): + self.authorized.append(req) + + if 'swift.authorize' not in req.environ: + req.environ['swift.authorize'] = authorize + + req.headers.setdefault("User-Agent", "Marula Kruger") + + status = [None] + headers = [None] + + def start_response(s, h, ei=None): + status[0] = s + headers[0] = h + + body_iter = app(req.environ, start_response) + body = '' + caught_exc = None + try: + for chunk in body_iter: + body += chunk + except Exception as exc: + if expect_exception: + caught_exc = exc + else: + raise + + if expect_exception: + return status[0], headers[0], body, caught_exc + else: + return status[0], headers[0], body + + def call_vw(self, req, **kwargs): + return self.call_app(req, app=self.vw, **kwargs) + + def assertRequestEqual(self, req, other): + self.assertEqual(req.method, other.method) + self.assertEqual(req.path, other.path) + + def test_put_container(self): + self.app.register('PUT', '/v1/a/c', swob.HTTPOk, {}, 'passed') + req = Request.blank('/v1/a/c', + headers={'X-Versions-Location': 'ver_cont'}, + environ={'REQUEST_METHOD': 'PUT'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + + # check for sysmeta header + calls = self.app.calls_with_headers + method, path, req_headers = calls[0] + self.assertEquals('PUT', method) + self.assertEquals('/v1/a/c', path) + self.assertTrue('x-container-sysmeta-versions-location' in req_headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_container_allow_versioned_writes_false(self): + self.vw.conf = {'allow_versioned_writes': 'false'} + + # PUT/POST container must fail as 412 when allow_versioned_writes + # set to false + for method in ('PUT', 'POST'): + req = Request.blank('/v1/a/c', + headers={'X-Versions-Location': 'ver_cont'}, + environ={'REQUEST_METHOD': method}) + try: + status, headers, body = self.call_vw(req) + except swob.HTTPException as e: + pass + self.assertEquals(e.status_int, 412) + + # GET/HEAD performs as normal + self.app.register('GET', '/v1/a/c', swob.HTTPOk, {}, 'passed') + self.app.register('HEAD', '/v1/a/c', swob.HTTPOk, {}, 'passed') + + for method in ('GET', 'HEAD'): + req = Request.blank('/v1/a/c', + headers={'X-Versions-Location': 'ver_cont'}, + environ={'REQUEST_METHOD': method}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + + def test_remove_versions_location(self): + self.app.register('POST', '/v1/a/c', swob.HTTPOk, {}, 'passed') + req = Request.blank('/v1/a/c', + headers={'X-Remove-Versions-Location': 'x'}, + environ={'REQUEST_METHOD': 'POST'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + + # check for sysmeta header + calls = self.app.calls_with_headers + method, path, req_headers = calls[0] + self.assertEquals('POST', method) + self.assertEquals('/v1/a/c', path) + self.assertTrue('x-container-sysmeta-versions-location' in req_headers) + self.assertTrue('x-versions-location' in req_headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_remove_add_versions_precedence(self): + self.app.register( + 'POST', '/v1/a/c', swob.HTTPOk, + {'x-container-sysmeta-versions-location': 'ver_cont'}, + 'passed') + req = Request.blank('/v1/a/c', + headers={'X-Remove-Versions-Location': 'x', + 'X-Versions-Location': 'ver_cont'}, + environ={'REQUEST_METHOD': 'POST'}) + + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertTrue(('X-Versions-Location', 'ver_cont') in headers) + + # check for sysmeta header + calls = self.app.calls_with_headers + method, path, req_headers = calls[0] + self.assertEquals('POST', method) + self.assertEquals('/v1/a/c', path) + self.assertTrue('x-container-sysmeta-versions-location' in req_headers) + self.assertTrue('x-remove-versions-location' not in req_headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_get_container(self): + self.app.register( + 'GET', '/v1/a/c', swob.HTTPOk, + {'x-container-sysmeta-versions-location': 'ver_cont'}, None) + req = Request.blank( + '/v1/a/c', + environ={'REQUEST_METHOD': 'GET'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertTrue(('X-Versions-Location', 'ver_cont') in headers) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_get_head(self): + self.app.register('GET', '/v1/a/c/o', swob.HTTPOk, {}, None) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'GET'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk, {}, None) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'HEAD'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_put_object_no_versioning(self): + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + + cache = FakeCache({}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_put_first_object_success(self): + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, None) + + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_PUT_versioning_with_nonzero_default_policy(self): + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c/o', swob.HTTPNotFound, {}, None) + + cache = FakeCache({'versions': 'ver_cont', 'storage_policy': '2'}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + + # check for 'X-Backend-Storage-Policy-Index' in HEAD request + calls = self.app.calls_with_headers + method, path, req_headers = calls[0] + self.assertEquals('HEAD', method) + self.assertEquals('/v1/a/c/o', path) + self.assertTrue('X-Backend-Storage-Policy-Index' in req_headers) + self.assertEquals('2', + req_headers.get('X-Backend-Storage-Policy-Index')) + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_put_object_no_versioning_with_container_config_true(self): + # set False to versions_write obsously and expect no COPY occurred + self.vw.conf = {'allow_versioned_writes': 'false'} + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPCreated, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed') + cache = FakeCache({'versions': 'ver_cont'}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '201 Created') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + called_method = [method for (method, path, hdrs) in self.app._calls] + self.assertTrue('COPY' not in called_method) + + def test_delete_object_no_versioning_with_container_config_true(self): + # set False to versions_write obviously and expect no GET versioning + # container and COPY called (just delete object as normal) + self.vw.conf = {'allow_versioned_writes': 'false'} + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPNoContent, {}, 'passed') + cache = FakeCache({'versions': 'ver_cont'}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '204 No Content') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + called_method = \ + [method for (method, path, rheaders) in self.app._calls] + self.assertTrue('COPY' not in called_method) + self.assertTrue('GET' not in called_method) + + def test_copy_object_no_versioning_with_container_config_true(self): + # set False to versions_write obviously and expect no extra + # COPY called (just copy object as normal) + self.vw.conf = {'allow_versioned_writes': 'false'} + self.app.register( + 'COPY', '/v1/a/c/o', swob.HTTPCreated, {}, None) + cache = FakeCache({'versions': 'ver_cont'}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '201 Created') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + called_method = \ + [method for (method, path, rheaders) in self.app._calls] + self.assertTrue('COPY' in called_method) + self.assertEquals(called_method.count('COPY'), 1) + + def test_new_version_success(self): + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed') + self.app.register( + 'COPY', '/v1/a/c/o', swob.HTTPCreated, {}, None) + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_new_version_sysmeta_precedence(self): + self.app.register( + 'PUT', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/c/o', swob.HTTPOk, + {'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed') + self.app.register( + 'COPY', '/v1/a/c/o', swob.HTTPCreated, {}, None) + + # fill cache with two different values for versions location + # new middleware should use sysmeta first + cache = FakeCache({'versions': 'old_ver_cont', + 'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'PUT', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + # check that sysmeta header was used + calls = self.app.calls_with_headers + method, path, req_headers = calls[1] + self.assertEquals('COPY', method) + self.assertEquals('/v1/a/c/o', path) + self.assertTrue(req_headers['Destination'].startswith('ver_cont/')) + + def test_copy_first_version(self): + self.app.register( + 'COPY', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/tgt_cont/tgt_obj', swob.HTTPNotFound, {}, None) + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'tgt_cont/tgt_obj'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_copy_new_version(self): + self.app.register( + 'COPY', '/v1/a/src_cont/src_obj', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/a/tgt_cont/tgt_obj', swob.HTTPOk, + {'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed') + self.app.register( + 'COPY', '/v1/a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, None) + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'tgt_cont/tgt_obj'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_copy_new_version_different_account(self): + self.app.register( + 'COPY', '/v1/src_a/src_cont/src_obj', swob.HTTPOk, {}, 'passed') + self.app.register( + 'HEAD', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPOk, + {'last-modified': 'Wed, 19 Nov 2014 18:19:02 GMT'}, 'passed') + self.app.register( + 'COPY', '/v1/tgt_a/tgt_cont/tgt_obj', swob.HTTPCreated, {}, None) + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/src_a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'tgt_cont/tgt_obj', + 'Destination-Account': 'tgt_a'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_delete_first_object_success(self): + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/&marker=', + swob.HTTPNotFound, {}, None) + + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_delete_latest_version_success(self): + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/&marker=', + swob.HTTPOk, {}, + '[{"hash": "x", ' + '"last_modified": "2014-11-21T14:14:27.409100", ' + '"bytes": 3, ' + '"name": "001o/1", ' + '"content_type": "text/plain"}, ' + '{"hash": "y", ' + '"last_modified": "2014-11-21T14:23:02.206740", ' + '"bytes": 3, ' + '"name": "001o/2", ' + '"content_type": "text/plain"}]') + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/' + '&marker=001o/2', + swob.HTTPNotFound, {}, None) + self.app.register( + 'COPY', '/v1/a/ver_cont/001o/2', swob.HTTPCreated, + {}, None) + self.app.register( + 'DELETE', '/v1/a/ver_cont/001o/2', swob.HTTPOk, + {}, None) + + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + headers={'X-If-Delete-At': 1}, + environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + # check that X-If-Delete-At was removed from DELETE request + calls = self.app.calls_with_headers + method, path, req_headers = calls.pop() + self.assertEquals('DELETE', method) + self.assertTrue(path.startswith('/v1/a/ver_cont/001o/2')) + self.assertFalse('x-if-delete-at' in req_headers or + 'X-If-Delete-At' in req_headers) + + def test_DELETE_on_expired_versioned_object(self): + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/&marker=', + swob.HTTPOk, {}, + '[{"hash": "x", ' + '"last_modified": "2014-11-21T14:14:27.409100", ' + '"bytes": 3, ' + '"name": "001o/1", ' + '"content_type": "text/plain"}, ' + '{"hash": "y", ' + '"last_modified": "2014-11-21T14:23:02.206740", ' + '"bytes": 3, ' + '"name": "001o/2", ' + '"content_type": "text/plain"}]') + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/' + '&marker=001o/2', + swob.HTTPNotFound, {}, None) + + # expired object + self.app.register( + 'COPY', '/v1/a/ver_cont/001o/2', swob.HTTPNotFound, + {}, None) + self.app.register( + 'COPY', '/v1/a/ver_cont/001o/1', swob.HTTPCreated, + {}, None) + self.app.register( + 'DELETE', '/v1/a/ver_cont/001o/1', swob.HTTPOk, + {}, None) + + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '200 OK') + self.assertEqual(len(self.authorized), 1) + self.assertRequestEqual(req, self.authorized[0]) + + def test_denied_DELETE_of_versioned_object(self): + authorize_call = [] + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/&marker=', + swob.HTTPOk, {}, + '[{"hash": "x", ' + '"last_modified": "2014-11-21T14:14:27.409100", ' + '"bytes": 3, ' + '"name": "001o/1", ' + '"content_type": "text/plain"}, ' + '{"hash": "y", ' + '"last_modified": "2014-11-21T14:23:02.206740", ' + '"bytes": 3, ' + '"name": "001o/2", ' + '"content_type": "text/plain"}]') + self.app.register( + 'GET', '/v1/a/ver_cont?format=json&prefix=001o/' + '&marker=001o/2', + swob.HTTPNotFound, {}, None) + self.app.register( + 'DELETE', '/v1/a/c/o', swob.HTTPForbidden, + {}, None) + + def fake_authorize(req): + authorize_call.append(req) + return swob.HTTPForbidden() + + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/a/c/o', + environ={'REQUEST_METHOD': 'DELETE', 'swift.cache': cache, + 'swift.authorize': fake_authorize, + 'CONTENT_LENGTH': '0'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '403 Forbidden') + self.assertEqual(len(authorize_call), 1) + self.assertRequestEqual(req, authorize_call[0]) diff --git a/test/unit/common/test_constraints.py b/test/unit/common/test_constraints.py index 7808511425..1fd3411ad2 100644 --- a/test/unit/common/test_constraints.py +++ b/test/unit/common/test_constraints.py @@ -515,6 +515,24 @@ class TestConstraints(unittest.TestCase): constraints.check_account_format, req, req.headers['X-Copy-From-Account']) + def test_check_container_format(self): + invalid_versions_locations = ( + 'container/with/slashes', + '', # empty + ) + for versions_location in invalid_versions_locations: + req = Request.blank( + '/v/a/c/o', headers={ + 'X-Versions-Location': versions_location}) + try: + constraints.check_container_format( + req, req.headers['X-Versions-Location']) + except HTTPException as e: + self.assertTrue(e.body.startswith('Container name cannot')) + else: + self.fail('check_container_format did not raise error for %r' % + req.headers['X-Versions-Location']) + class TestConstraintsConfig(unittest.TestCase): diff --git a/test/unit/common/test_wsgi.py b/test/unit/common/test_wsgi.py index cf92edeb76..a165ecb5f2 100644 --- a/test/unit/common/test_wsgi.py +++ b/test/unit/common/test_wsgi.py @@ -141,6 +141,11 @@ class TestWSGI(unittest.TestCase): expected = swift.common.middleware.dlo.DynamicLargeObject self.assertTrue(isinstance(app, expected)) + app = app.app + expected = \ + swift.common.middleware.versioned_writes.VersionedWritesMiddleware + self.assert_(isinstance(app, expected)) + app = app.app expected = swift.proxy.server.Application self.assertTrue(isinstance(app, expected)) @@ -1414,6 +1419,7 @@ class TestPipelineModification(unittest.TestCase): ['swift.common.middleware.catch_errors', 'swift.common.middleware.gatekeeper', 'swift.common.middleware.dlo', + 'swift.common.middleware.versioned_writes', 'swift.proxy.server']) def test_proxy_modify_wsgi_pipeline(self): @@ -1444,6 +1450,7 @@ class TestPipelineModification(unittest.TestCase): ['swift.common.middleware.catch_errors', 'swift.common.middleware.gatekeeper', 'swift.common.middleware.dlo', + 'swift.common.middleware.versioned_writes', 'swift.common.middleware.healthcheck', 'swift.proxy.server']) @@ -1541,6 +1548,7 @@ class TestPipelineModification(unittest.TestCase): 'swift.common.middleware.catch_errors', 'swift.common.middleware.gatekeeper', 'swift.common.middleware.dlo', + 'swift.common.middleware.versioned_writes', 'swift.common.middleware.healthcheck', 'swift.proxy.server']) @@ -1554,6 +1562,7 @@ class TestPipelineModification(unittest.TestCase): 'swift.common.middleware.healthcheck', 'swift.common.middleware.catch_errors', 'swift.common.middleware.dlo', + 'swift.common.middleware.versioned_writes', 'swift.proxy.server']) def test_catch_errors_gatekeeper_configured_not_at_start(self): @@ -1566,6 +1575,7 @@ class TestPipelineModification(unittest.TestCase): 'swift.common.middleware.catch_errors', 'swift.common.middleware.gatekeeper', 'swift.common.middleware.dlo', + 'swift.common.middleware.versioned_writes', 'swift.proxy.server']) @with_tempdir @@ -1598,7 +1608,7 @@ class TestPipelineModification(unittest.TestCase): tempdir, policy.ring_name + '.ring.gz') app = wsgi.loadapp(conf_path) - proxy_app = app.app.app.app.app + proxy_app = app.app.app.app.app.app self.assertEqual(proxy_app.account_ring.serialized_path, account_ring_path) self.assertEqual(proxy_app.container_ring.serialized_path, diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 2a7cb04328..d113a70afe 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -56,7 +56,7 @@ from swift.proxy.controllers.obj import ReplicatedObjectController from swift.account import server as account_server from swift.container import server as container_server from swift.obj import server as object_server -from swift.common.middleware import proxy_logging +from swift.common.middleware import proxy_logging, versioned_writes from swift.common.middleware.acl import parse_acl, format_acl from swift.common.exceptions import ChunkReadTimeout, DiskFileNotExist, \ APIVersionError @@ -70,7 +70,7 @@ from swift.proxy.controllers.base import get_container_memcache_key, \ import swift.proxy.controllers import swift.proxy.controllers.obj from swift.common.swob import Request, Response, HTTPUnauthorized, \ - HTTPException, HTTPForbidden, HeaderKeyDict + HTTPException, HeaderKeyDict from swift.common import storage_policy from swift.common.storage_policy import StoragePolicy, ECStoragePolicy, \ StoragePolicyCollection, POLICIES @@ -107,7 +107,7 @@ def do_setup(the_object_server): conf = {'devices': _testdir, 'swift_dir': _testdir, 'mount_check': 'false', 'allowed_headers': 'content-encoding, x-object-manifest, content-disposition, foo', - 'allow_versions': 'True'} + 'allow_versions': 't'} prolis = listen(('localhost', 0)) acc1lis = listen(('localhost', 0)) acc2lis = listen(('localhost', 0)) @@ -2710,162 +2710,6 @@ class TestObjectController(unittest.TestCase): exp = 'HTTP/1.1 200' self.assertEqual(headers[:len(exp)], exp) - def test_expirer_DELETE_on_versioned_object(self): - test_errors = [] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if method == 'DELETE': - if 'x-if-delete-at' in headers or 'X-If-Delete-At' in headers: - test_errors.append('X-If-Delete-At in headers') - - body = json.dumps( - [{"name": "001o/1", - "hash": "x", - "bytes": 0, - "content_type": "text/plain", - "last_modified": "1970-01-01T00:00:01.000000"}]) - body_iter = ('', '', body, '', '', '', '', '', '', '', '', '', '', '') - with save_globals(): - controller = ReplicatedObjectController( - self.app, 'a', 'c', 'o') - # HEAD HEAD GET GET HEAD GET GET GET PUT PUT - # PUT DEL DEL DEL - set_http_connect(200, 200, 200, 200, 200, 200, 200, 200, 201, 201, - 201, 204, 204, 204, - give_connect=test_connect, - body_iter=body_iter, - headers={'x-versions-location': 'foo'}) - self.app.memcache.store = {} - req = Request.blank('/v1/a/c/o', - headers={'X-If-Delete-At': 1}, - environ={'REQUEST_METHOD': 'DELETE'}) - self.app.update_request(req) - controller.DELETE(req) - self.assertEqual(test_errors, []) - - @patch_policies([ - StoragePolicy(0, 'zero', False, object_ring=FakeRing()), - StoragePolicy(1, 'one', True, object_ring=FakeRing()) - ]) - def test_DELETE_on_expired_versioned_object(self): - # reset the router post patch_policies - self.app.obj_controller_router = proxy_server.ObjectControllerRouter() - methods = set() - authorize_call_count = [0] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - methods.add((method, path)) - - def fake_container_info(account, container, req): - return {'status': 200, 'sync_key': None, - 'meta': {}, 'cors': {'allow_origin': None, - 'expose_headers': None, - 'max_age': None}, - 'sysmeta': {}, 'read_acl': None, 'object_count': None, - 'write_acl': None, 'versions': 'foo', - 'partition': 1, 'bytes': None, 'storage_policy': '1', - 'nodes': [{'zone': 0, 'ip': '10.0.0.0', 'region': 0, - 'id': 0, 'device': 'sda', 'port': 1000}, - {'zone': 1, 'ip': '10.0.0.1', 'region': 1, - 'id': 1, 'device': 'sdb', 'port': 1001}, - {'zone': 2, 'ip': '10.0.0.2', 'region': 0, - 'id': 2, 'device': 'sdc', 'port': 1002}]} - - def fake_list_iter(container, prefix, env): - object_list = [{'name': '1'}, {'name': '2'}, {'name': '3'}] - for obj in object_list: - yield obj - - def fake_authorize(req): - authorize_call_count[0] += 1 - return None # allow the request - - with save_globals(): - controller = ReplicatedObjectController( - self.app, 'a', 'c', 'o') - controller.container_info = fake_container_info - controller._listing_iter = fake_list_iter - set_http_connect(404, 404, 404, # get for the previous version - 200, 200, 200, # get for the pre-previous - 201, 201, 201, # put move the pre-previous - 204, 204, 204, # delete for the pre-previous - give_connect=test_connect) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE', - 'swift.authorize': fake_authorize}) - - self.app.memcache.store = {} - self.app.update_request(req) - controller.DELETE(req) - exp_methods = [('GET', '/a/foo/3'), - ('GET', '/a/foo/2'), - ('PUT', '/a/c/o'), - ('DELETE', '/a/foo/2')] - self.assertEqual(set(exp_methods), (methods)) - self.assertEqual(authorize_call_count[0], 2) - - @patch_policies([ - StoragePolicy(0, 'zero', False, object_ring=FakeRing()), - StoragePolicy(1, 'one', True, object_ring=FakeRing()) - ]) - def test_denied_DELETE_of_versioned_object(self): - # Verify that a request with read access to a versions container - # is unable to cause any write operations on the versioned container. - - # reset the router post patch_policies - self.app.obj_controller_router = proxy_server.ObjectControllerRouter() - methods = set() - authorize_call_count = [0] - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - methods.add((method, path)) - - def fake_container_info(account, container, req): - return {'status': 200, 'sync_key': None, - 'meta': {}, 'cors': {'allow_origin': None, - 'expose_headers': None, - 'max_age': None}, - 'sysmeta': {}, 'read_acl': None, 'object_count': None, - 'write_acl': None, 'versions': 'foo', - 'partition': 1, 'bytes': None, 'storage_policy': '1', - 'nodes': [{'zone': 0, 'ip': '10.0.0.0', 'region': 0, - 'id': 0, 'device': 'sda', 'port': 1000}, - {'zone': 1, 'ip': '10.0.0.1', 'region': 1, - 'id': 1, 'device': 'sdb', 'port': 1001}, - {'zone': 2, 'ip': '10.0.0.2', 'region': 0, - 'id': 2, 'device': 'sdc', 'port': 1002}]} - - def fake_list_iter(container, prefix, env): - object_list = [{'name': '1'}, {'name': '2'}, {'name': '3'}] - for obj in object_list: - yield obj - - def fake_authorize(req): - # deny write access - authorize_call_count[0] += 1 - return HTTPForbidden(req) # allow the request - - with save_globals(): - controller = ReplicatedObjectController(self.app, 'a', 'c', 'o') - controller.container_info = fake_container_info - # patching _listing_iter simulates request being authorized - # to list versions container - controller._listing_iter = fake_list_iter - set_http_connect(give_connect=test_connect) - req = Request.blank('/v1/a/c/o', - environ={'REQUEST_METHOD': 'DELETE', - 'swift.authorize': fake_authorize}) - - self.app.memcache.store = {} - self.app.update_request(req) - resp = controller.DELETE(req) - self.assertEqual(403, resp.status_int) - self.assertFalse(methods, methods) - self.assertEqual(authorize_call_count[0], 1) - def test_PUT_auto_content_type(self): with save_globals(): controller = ReplicatedObjectController( @@ -5309,394 +5153,6 @@ class TestObjectController(unittest.TestCase): body = fd.read() self.assertEqual(body, 'oh hai123456789abcdef') - @unpatch_policies - def test_version_manifest(self, oc='versions', vc='vers', o='name'): - versions_to_create = 3 - # Create a container for our versioned object testing - (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, - obj2lis, obj3lis) = _test_sockets - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - pre = quote('%03x' % len(o)) - osub = '%s/sub' % o - presub = quote('%03x' % len(osub)) - osub = quote(osub) - presub = quote(presub) - oc = quote(oc) - vc = quote(vc) - fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\nX-Versions-Location: %s\r\n\r\n' - % (oc, vc)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # check that the header was set - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEqual(headers[:len(exp)], exp) - self.assertTrue('X-Versions-Location: %s' % vc in headers) - # make the container for the object versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\n\r\n' % vc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # Create the versioned file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'X-Object-Meta-Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # Create the object versions - for segment in range(1, versions_to_create): - sleep(.01) # guarantee that the timestamp changes - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s' - '\r\n\r\n%05d\r\n' % (oc, o, segment, segment)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # Ensure retrieving the manifest file gets the latest version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n' - '\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - self.assertTrue( - 'Content-Type: text/jibberish%s' % segment in headers) - self.assertTrue('X-Object-Meta-Foo: barbaz' not in headers) - body = fd.read() - self.assertEqual(body, '%05d' % segment) - # Ensure we have the right number of versions saved - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - body = fd.read() - versions = [x for x in body.split('\n') if x] - self.assertEqual(len(versions), versions_to_create - 1) - # copy a version and make sure the version info is stripped - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('COPY /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nDestination: %s/copied_name\r\n' - 'Content-Length: 0\r\n\r\n' % (oc, o, oc)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response to the COPY - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/copied_name HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - body = fd.read() - self.assertEqual(body, '%05d' % segment) - # post and make sure it's updated - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('POST /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: ' - 't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n' - 'X-Object-Meta-Bar: foo\r\n\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response to the POST - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - self.assertTrue('Content-Type: foo/bar' in headers) - self.assertTrue('X-Object-Meta-Bar: foo' in headers) - body = fd.read() - self.assertEqual(body, '%05d' % segment) - # Delete the object versions - for segment in range(versions_to_create - 1, 0, -1): - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r' - '\nConnection: close\r\nX-Storage-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEqual(headers[:len(exp)], exp) - # Ensure retrieving the manifest file gets the latest version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Auth-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 200' - self.assertEqual(headers[:len(exp)], exp) - self.assertTrue('Content-Type: text/jibberish%s' % (segment - 1) - in headers) - body = fd.read() - self.assertEqual(body, '%05d' % (segment - 1)) - # Ensure we have the right number of versions saved - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r' - '\n' % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEqual(headers[:len(exp)], exp) - body = fd.read() - versions = [x for x in body.split('\n') if x] - self.assertEqual(len(versions), segment - 1) - # there is now one segment left (in the manifest) - # Ensure we have no saved versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 204 No Content' - self.assertEqual(headers[:len(exp)], exp) - # delete the last verision - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEqual(headers[:len(exp)], exp) - # Ensure it's all gone - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 404' - self.assertEqual(headers[:len(exp)], exp) - - # make sure dlo manifest files don't get versioned - for _junk in range(1, versions_to_create): - sleep(.01) # guarantee that the timestamp changes - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 0\r\n' - 'Content-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\nX-Object-Manifest: %s/%s/\r\n\r\n' - % (oc, o, oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - - # Ensure we have no saved versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, pre, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 204 No Content' - self.assertEqual(headers[:len(exp)], exp) - - # DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\n00001\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\nsub1\r\n' % (oc, osub)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' - 'Foo: barbaz\r\n\r\nsub2\r\n' % (oc, osub)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEqual(headers[:len(exp)], exp) - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' - % (vc, presub, osub)) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx series response - self.assertEqual(headers[:len(exp)], exp) - body = fd.read() - versions = [x for x in body.split('\n') if x] - self.assertEqual(len(versions), 1) - - # Check for when the versions target container doesn't exist - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%swhoops HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n' - 'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # Create the versioned file - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n00000\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 201' - self.assertEqual(headers[:len(exp)], exp) - # Create another version - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' - 'localhost\r\nConnection: close\r\nX-Storage-Token: ' - 't\r\nContent-Length: 5\r\n\r\n00001\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 412' - self.assertEqual(headers[:len(exp)], exp) - # Delete the object - sock = connect_tcp(('localhost', prolis.getsockname()[1])) - fd = sock.makefile() - fd.write('DELETE /v1/a/%swhoops/foo HTTP/1.1\r\nHost: localhost\r\n' - 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % oc) - fd.flush() - headers = readuntil2crlfs(fd) - exp = 'HTTP/1.1 2' # 2xx response - self.assertEqual(headers[:len(exp)], exp) - - @unpatch_policies - def test_version_manifest_utf8(self): - oc = '0_oc_non_ascii\xc2\xa3' - vc = '0_vc_non_ascii\xc2\xa3' - o = '0_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - @unpatch_policies - def test_version_manifest_utf8_container(self): - oc = '1_oc_non_ascii\xc2\xa3' - vc = '1_vc_ascii' - o = '1_o_ascii' - self.test_version_manifest(oc, vc, o) - - @unpatch_policies - def test_version_manifest_utf8_version_container(self): - oc = '2_oc_ascii' - vc = '2_vc_non_ascii\xc2\xa3' - o = '2_o_ascii' - self.test_version_manifest(oc, vc, o) - - @unpatch_policies - def test_version_manifest_utf8_containers(self): - oc = '3_oc_non_ascii\xc2\xa3' - vc = '3_vc_non_ascii\xc2\xa3' - o = '3_o_ascii' - self.test_version_manifest(oc, vc, o) - - @unpatch_policies - def test_version_manifest_utf8_object(self): - oc = '4_oc_ascii' - vc = '4_vc_ascii' - o = '4_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - @unpatch_policies - def test_version_manifest_utf8_version_container_utf_object(self): - oc = '5_oc_ascii' - vc = '5_vc_non_ascii\xc2\xa3' - o = '5_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - - @unpatch_policies - def test_version_manifest_utf8_container_utf_object(self): - oc = '6_oc_non_ascii\xc2\xa3' - vc = '6_vc_ascii' - o = '6_o_non_ascii\xc2\xa3' - self.test_version_manifest(oc, vc, o) - @unpatch_policies def test_conditional_range_get(self): (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis, @@ -5924,129 +5380,6 @@ class TestObjectController(unittest.TestCase): finally: time.time = orig_time - @patch_policies([ - StoragePolicy(0, 'zero', False, object_ring=FakeRing()), - StoragePolicy(1, 'one', True, object_ring=FakeRing()) - ]) - def test_PUT_versioning_with_nonzero_default_policy(self): - # reset the router post patch_policies - self.app.obj_controller_router = proxy_server.ObjectControllerRouter() - - def test_connect(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - if method == "HEAD": - self.assertEqual(path, '/a/c/o.jpg') - self.assertNotEquals(None, - headers['X-Backend-Storage-Policy-Index']) - self.assertEqual(1, int(headers - ['X-Backend-Storage-Policy-Index'])) - - def fake_container_info(account, container, req): - return {'status': 200, 'sync_key': None, 'storage_policy': '1', - 'meta': {}, 'cors': {'allow_origin': None, - 'expose_headers': None, - 'max_age': None}, - 'sysmeta': {}, 'read_acl': None, 'object_count': None, - 'write_acl': None, 'versions': 'c-versions', - 'partition': 1, 'bytes': None, - 'nodes': [{'zone': 0, 'ip': '10.0.0.0', 'region': 0, - 'id': 0, 'device': 'sda', 'port': 1000}, - {'zone': 1, 'ip': '10.0.0.1', 'region': 1, - 'id': 1, 'device': 'sdb', 'port': 1001}, - {'zone': 2, 'ip': '10.0.0.2', 'region': 0, - 'id': 2, 'device': 'sdc', 'port': 1002}]} - with save_globals(): - controller = ReplicatedObjectController( - self.app, 'a', 'c', 'o.jpg') - - controller.container_info = fake_container_info - set_http_connect(200, 200, 200, # head: for the last version - 200, 200, 200, # get: for the last version - 201, 201, 201, # put: move the current version - 201, 201, 201, # put: save the new version - give_connect=test_connect) - req = Request.blank('/v1/a/c/o.jpg', - environ={'REQUEST_METHOD': 'PUT'}, - headers={'Content-Length': '0'}) - self.app.update_request(req) - self.app.memcache.store = {} - res = controller.PUT(req) - self.assertEqual(201, res.status_int) - - @patch_policies([ - StoragePolicy(0, 'zero', False, object_ring=FakeRing()), - StoragePolicy(1, 'one', True, object_ring=FakeRing()) - ]) - def test_cross_policy_DELETE_versioning(self): - # reset the router post patch_policies - self.app.obj_controller_router = proxy_server.ObjectControllerRouter() - requests = [] - - def capture_requests(ipaddr, port, device, partition, method, path, - headers=None, query_string=None): - requests.append((method, path, headers)) - - def fake_container_info(app, env, account, container, **kwargs): - info = {'status': 200, 'sync_key': None, 'storage_policy': None, - 'meta': {}, 'cors': {'allow_origin': None, - 'expose_headers': None, - 'max_age': None}, - 'sysmeta': {}, 'read_acl': None, 'object_count': None, - 'write_acl': None, 'versions': None, - 'partition': 1, 'bytes': None, - 'nodes': [{'zone': 0, 'ip': '10.0.0.0', 'region': 0, - 'id': 0, 'device': 'sda', 'port': 1000}, - {'zone': 1, 'ip': '10.0.0.1', 'region': 1, - 'id': 1, 'device': 'sdb', 'port': 1001}, - {'zone': 2, 'ip': '10.0.0.2', 'region': 0, - 'id': 2, 'device': 'sdc', 'port': 1002}]} - if container == 'c': - info['storage_policy'] = '1' - info['versions'] = 'c-versions' - elif container == 'c-versions': - info['storage_policy'] = '0' - else: - self.fail('Unexpected call to get_info for %r' % container) - return info - container_listing = json.dumps([{'name': 'old_version'}]) - with save_globals(): - resp_status = ( - 200, 200, # listings for versions container - 200, 200, 200, # get: for the last version - 201, 201, 201, # put: move the last version - 200, 200, 200, # delete: for the last version - ) - body_iter = iter([container_listing] + [ - '' for x in range(len(resp_status) - 1)]) - set_http_connect(*resp_status, body_iter=body_iter, - give_connect=capture_requests) - req = Request.blank('/v1/a/c/current_version', method='DELETE') - self.app.update_request(req) - self.app.memcache.store = {} - with mock.patch('swift.proxy.controllers.base.get_info', - fake_container_info): - resp = self.app.handle_request(req) - self.assertEqual(200, resp.status_int) - expected = [('GET', '/a/c-versions')] * 2 + \ - [('GET', '/a/c-versions/old_version')] * 3 + \ - [('PUT', '/a/c/current_version')] * 3 + \ - [('DELETE', '/a/c-versions/old_version')] * 3 - self.assertEqual(expected, [(m, p) for m, p, h in requests]) - for method, path, headers in requests: - if 'current_version' in path: - expected_storage_policy = 1 - elif 'old_version' in path: - expected_storage_policy = 0 - else: - continue - storage_policy_index = \ - int(headers['X-Backend-Storage-Policy-Index']) - self.assertEqual( - expected_storage_policy, storage_policy_index, - 'Unexpected %s request for %s ' - 'with storage policy index %s' % ( - method, path, storage_policy_index)) - @unpatch_policies def test_leak_1(self): _request_instances = weakref.WeakKeyDictionary() @@ -9186,6 +8519,465 @@ class TestSwiftInfo(unittest.TestCase): self.assertEqual(sorted_pols[2]['name'], 'migrated') +class TestSocketObjectVersions(unittest.TestCase): + + def setUp(self): + self.prolis = prolis = listen(('localhost', 0)) + self._orig_prolis = _test_sockets[0] + allowed_headers = ', '.join([ + 'content-encoding', + 'x-object-manifest', + 'content-disposition', + 'foo' + ]) + conf = {'devices': _testdir, 'swift_dir': _testdir, + 'mount_check': 'false', 'allowed_headers': allowed_headers} + prosrv = versioned_writes.VersionedWritesMiddleware( + proxy_logging.ProxyLoggingMiddleware( + _test_servers[0], conf, + logger=_test_servers[0].logger), + {}) + self.coro = spawn(wsgi.server, prolis, prosrv, NullLogger()) + # replace global prosrv with one that's filtered with version + # middleware + global _test_sockets + self.sockets = list(_test_sockets) + self.sockets[0] = prolis + _test_sockets = tuple(self.sockets) + + def tearDown(self): + self.coro.kill() + # put the global state back + global _test_sockets + self.sockets[0] = self._orig_prolis + _test_sockets = tuple(self.sockets) + + def test_version_manifest(self, oc='versions', vc='vers', o='name'): + versions_to_create = 3 + # Create a container for our versioned object testing + (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, + obj2lis, obj3lis) = _test_sockets + pre = quote('%03x' % len(o)) + osub = '%s/sub' % o + presub = quote('%03x' % len(osub)) + osub = quote(osub) + presub = quote(presub) + oc = quote(oc) + vc = quote(vc) + + def put_container(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\nX-Versions-Location: %s\r\n\r\n' + % (oc, vc)) + fd.flush() + headers = readuntil2crlfs(fd) + fd.read() + return headers + + headers = put_container() + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + def get_container(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\n' + 'X-Storage-Token: t\r\n\r\n\r\n' % oc) + fd.flush() + headers = readuntil2crlfs(fd) + body = fd.read() + return headers, body + + # check that the header was set + headers, body = get_container() + exp = 'HTTP/1.1 2' # 2xx series response + self.assertEqual(headers[:len(exp)], exp) + self.assert_('X-Versions-Location: %s' % vc in headers) + + def put_version_container(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\n\r\n' % vc) + fd.flush() + headers = readuntil2crlfs(fd) + fd.read() + return headers + + # make the container for the object versions + headers = put_version_container() + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + def put(version): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s' + '\r\n\r\n%05d\r\n' % (oc, o, version, version)) + fd.flush() + headers = readuntil2crlfs(fd) + fd.read() + return headers + + def get(container=oc, obj=o): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n' + '\r\n' % (container, obj)) + fd.flush() + headers = readuntil2crlfs(fd) + body = fd.read() + return headers, body + + # Create the versioned file + headers = put(0) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + # Create the object versions + for version in range(1, versions_to_create): + sleep(.01) # guarantee that the timestamp changes + headers = put(version) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + # Ensure retrieving the manifest file gets the latest version + headers, body = get() + exp = 'HTTP/1.1 200' + self.assertEqual(headers[:len(exp)], exp) + self.assert_('Content-Type: text/jibberish%s' % version in headers) + self.assert_('X-Object-Meta-Foo: barbaz' not in headers) + self.assertEqual(body, '%05d' % version) + + def get_version_container(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\n' + 'X-Storage-Token: t\r\n\r\n' % vc) + fd.flush() + headers = readuntil2crlfs(fd) + body = fd.read() + return headers, body + + # Ensure we have the right number of versions saved + headers, body = get_version_container() + exp = 'HTTP/1.1 200' + self.assertEqual(headers[:len(exp)], exp) + versions = [x for x in body.split('\n') if x] + self.assertEqual(len(versions), versions_to_create - 1) + + def delete(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r' + '\nConnection: close\r\nX-Storage-Token: t\r\n\r\n' + % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + fd.read() + return headers + + def copy(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('COPY /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\nDestination: %s/copied_name\r\n' + 'Content-Length: 0\r\n\r\n' % (oc, o, oc)) + fd.flush() + headers = readuntil2crlfs(fd) + fd.read() + return headers + + # copy a version and make sure the version info is stripped + headers = copy() + exp = 'HTTP/1.1 2' # 2xx series response to the COPY + self.assertEqual(headers[:len(exp)], exp) + + def get_copy(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s/copied_name HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\n' + 'X-Auth-Token: t\r\n\r\n' % oc) + fd.flush() + headers = readuntil2crlfs(fd) + body = fd.read() + return headers, body + + headers, body = get_copy() + exp = 'HTTP/1.1 200' + self.assertEqual(headers[:len(exp)], exp) + self.assertEqual(body, '%05d' % version) + + def post(): + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('POST /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: ' + 't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n' + 'X-Object-Meta-Bar: foo\r\n\r\n' % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + fd.read() + return headers + + # post and make sure it's updated + headers = post() + exp = 'HTTP/1.1 2' # 2xx series response to the POST + self.assertEqual(headers[:len(exp)], exp) + + headers, body = get() + self.assert_('Content-Type: foo/bar' in headers) + self.assert_('X-Object-Meta-Bar: foo' in headers) + self.assertEqual(body, '%05d' % version) + + # check container listing + headers, body = get_container() + exp = 'HTTP/1.1 200' + self.assertEqual(headers[:len(exp)], exp) + + # Delete the object versions + for segment in range(versions_to_create - 1, 0, -1): + + headers = delete() + exp = 'HTTP/1.1 2' # 2xx series response + self.assertEqual(headers[:len(exp)], exp) + + # Ensure retrieving the manifest file gets the latest version + headers, body = get() + exp = 'HTTP/1.1 200' + self.assertEqual(headers[:len(exp)], exp) + self.assert_('Content-Type: text/jibberish%s' % (segment - 1) + in headers) + self.assertEqual(body, '%05d' % (segment - 1)) + # Ensure we have the right number of versions saved + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r' + '\n' % (vc, pre, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 2' # 2xx series response + self.assertEqual(headers[:len(exp)], exp) + body = fd.read() + versions = [x for x in body.split('\n') if x] + self.assertEqual(len(versions), segment - 1) + + # there is now one version left (in the manifest) + # Ensure we have no saved versions + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (vc, pre, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 204 No Content' + self.assertEqual(headers[:len(exp)], exp) + + # delete the last version + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 2' # 2xx series response + self.assertEqual(headers[:len(exp)], exp) + + # Ensure it's all gone + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 404' + self.assertEqual(headers[:len(exp)], exp) + + # make sure manifest files will be ignored + for _junk in range(1, versions_to_create): + sleep(.01) # guarantee that the timestamp changes + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 0\r\n' + 'Content-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\nX-Object-Manifest: %s/%s/\r\n\r\n' + % (oc, o, oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nhost: ' + 'localhost\r\nconnection: close\r\nx-auth-token: t\r\n\r\n' + % (vc, pre, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 204 No Content' + self.assertEqual(headers[:len(exp)], exp) + + # DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\n\r\n00000\r\n' % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\n\r\n00001\r\n' % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\n\r\nsub1\r\n' % (oc, osub)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n' + 'Foo: barbaz\r\n\r\nsub2\r\n' % (oc, osub)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 2' # 2xx series response + self.assertEqual(headers[:len(exp)], exp) + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n' + % (vc, presub, osub)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 2' # 2xx series response + self.assertEqual(headers[:len(exp)], exp) + body = fd.read() + versions = [x for x in body.split('\n') if x] + self.assertEqual(len(versions), 1) + + # Check for when the versions target container doesn't exist + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%swhoops HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n' + 'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n' % oc) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + # Create the versioned file + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\n\r\n00000\r\n' % oc) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + # Create another version + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: ' + 'localhost\r\nConnection: close\r\nX-Storage-Token: ' + 't\r\nContent-Length: 5\r\n\r\n00001\r\n' % oc) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 412' + self.assertEqual(headers[:len(exp)], exp) + # Delete the object + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + fd.write('DELETE /v1/a/%swhoops/foo HTTP/1.1\r\nHost: localhost\r\n' + 'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % oc) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 2' # 2xx response + self.assertEqual(headers[:len(exp)], exp) + + def test_version_manifest_utf8(self): + oc = '0_oc_non_ascii\xc2\xa3' + vc = '0_vc_non_ascii\xc2\xa3' + o = '0_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_container(self): + oc = '1_oc_non_ascii\xc2\xa3' + vc = '1_vc_ascii' + o = '1_o_ascii' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_version_container(self): + oc = '2_oc_ascii' + vc = '2_vc_non_ascii\xc2\xa3' + o = '2_o_ascii' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_containers(self): + oc = '3_oc_non_ascii\xc2\xa3' + vc = '3_vc_non_ascii\xc2\xa3' + o = '3_o_ascii' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_object(self): + oc = '4_oc_ascii' + vc = '4_vc_ascii' + o = '4_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_version_container_utf_object(self): + oc = '5_oc_ascii' + vc = '5_vc_non_ascii\xc2\xa3' + o = '5_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + def test_version_manifest_utf8_container_utf_object(self): + oc = '6_oc_non_ascii\xc2\xa3' + vc = '6_vc_ascii' + o = '6_o_non_ascii\xc2\xa3' + self.test_version_manifest(oc, vc, o) + + if __name__ == '__main__': setup() try: From 7064706b2721c36c5181267f58704b9344ad4ac0 Mon Sep 17 00:00:00 2001 From: Samuel Merritt Date: Tue, 11 Aug 2015 10:17:20 -0500 Subject: [PATCH 31/70] Fix 500 in versioned writes with bad Destination When this code lived in the proxy, it was protected by an "except HTTPException" clause in proxy.Application.handle_request(). When it moved to middleware, it lost that, and then things like constraints.check_name_format that raised HTTPException would cause 500s. The HTTPException would make it all the way out to catch_errors and get translated to a 500. This commit just wraps a couple try/excepts around the bits in versioned writes that can raise HTTPException. I tried to make it use wsgify so I could get that for free, but that wound up being a real pain because env/start_response are plumbed through pretty much the whole versioned-writes middleware. Closes-Bug: 1483705 Change-Id: Ife165bf709e64f313ed07c779b21914045e51f25 --- swift/common/middleware/versioned_writes.py | 18 ++++++++++++------ .../common/middleware/test_versioned_writes.py | 18 +++++++++++++----- 2 files changed, 25 insertions(+), 11 deletions(-) diff --git a/swift/common/middleware/versioned_writes.py b/swift/common/middleware/versioned_writes.py index e3f56f6fd1..6a25c66461 100644 --- a/swift/common/middleware/versioned_writes.py +++ b/swift/common/middleware/versioned_writes.py @@ -119,7 +119,7 @@ from swift.common.utils import get_logger, Timestamp, json, \ register_swift_info, config_true_value from swift.common.request_helpers import get_sys_meta_prefix from swift.common.wsgi import WSGIContext, make_pre_authed_request -from swift.common.swob import Request +from swift.common.swob import Request, HTTPException from swift.common.constraints import ( check_account_format, check_container_format, check_destination_header) from swift.proxy.controllers.base import get_container_info @@ -468,12 +468,18 @@ class VersionedWritesMiddleware(object): # container_info allow_versioned_writes = self.conf.get('allow_versioned_writes') if allow_versioned_writes and container and not obj: - return self.container_request(req, start_response, - allow_versioned_writes) + try: + return self.container_request(req, start_response, + allow_versioned_writes) + except HTTPException as error_response: + return error_response(env, start_response) elif obj and req.method in ('PUT', 'COPY', 'DELETE'): - return self.object_request( - req, version, account, container, obj, - allow_versioned_writes)(env, start_response) + try: + return self.object_request( + req, version, account, container, obj, + allow_versioned_writes)(env, start_response) + except HTTPException as error_response: + return error_response(env, start_response) else: return self.app(env, start_response) diff --git a/test/unit/common/middleware/test_versioned_writes.py b/test/unit/common/middleware/test_versioned_writes.py index 1d38b73f68..52cb6fd44b 100644 --- a/test/unit/common/middleware/test_versioned_writes.py +++ b/test/unit/common/middleware/test_versioned_writes.py @@ -108,11 +108,8 @@ class VersionedWritesTestCase(unittest.TestCase): req = Request.blank('/v1/a/c', headers={'X-Versions-Location': 'ver_cont'}, environ={'REQUEST_METHOD': method}) - try: - status, headers, body = self.call_vw(req) - except swob.HTTPException as e: - pass - self.assertEquals(e.status_int, 412) + status, headers, body = self.call_vw(req) + self.assertEquals(status, "412 Precondition Failed") # GET/HEAD performs as normal self.app.register('GET', '/v1/a/c', swob.HTTPOk, {}, 'passed') @@ -414,6 +411,17 @@ class VersionedWritesTestCase(unittest.TestCase): self.assertEqual(len(self.authorized), 1) self.assertRequestEqual(req, self.authorized[0]) + def test_copy_new_version_bogus_account(self): + cache = FakeCache({'sysmeta': {'versions-location': 'ver_cont'}}) + req = Request.blank( + '/v1/src_a/src_cont/src_obj', + environ={'REQUEST_METHOD': 'COPY', 'swift.cache': cache, + 'CONTENT_LENGTH': '100'}, + headers={'Destination': 'tgt_cont/tgt_obj', + 'Destination-Account': '/im/on/a/boat'}) + status, headers, body = self.call_vw(req) + self.assertEquals(status, '412 Precondition Failed') + def test_delete_first_object_success(self): self.app.register( 'DELETE', '/v1/a/c/o', swob.HTTPOk, {}, 'passed') From 5b246e875fe9ac3d764ea581ad52b04238f5bcc8 Mon Sep 17 00:00:00 2001 From: Kota Tsuyuzaki Date: Wed, 22 Jul 2015 10:39:22 -0700 Subject: [PATCH 32/70] Fix EC range GET/COPY handling When range GET (or COPY) for an EC object requested, if the requested range starts from more than last segments alignment (i.e. ceil(object_size/segment_size) * segment_size), proxy server will return the original content length w/o body, though Swift should return an error massage as a body and the length of message as the content length. The current behavior will cause stuck on some client. (e.g. curl) This patch fixes that proxy enables to return correct response, even if such an over range requested. Co-Authored-By: Clay Gerrard Change-Id: I21f81c842f563ac4dddc69011ed759b744bb20bd Closes-Bug: #1475499 --- swift/proxy/controllers/base.py | 2 +- swift/proxy/controllers/obj.py | 13 ++-- test/unit/proxy/controllers/test_obj.py | 88 +++++++++++++++++++++++++ 3 files changed, 95 insertions(+), 8 deletions(-) diff --git a/swift/proxy/controllers/base.py b/swift/proxy/controllers/base.py index 554469cc06..9449607d36 100644 --- a/swift/proxy/controllers/base.py +++ b/swift/proxy/controllers/base.py @@ -1016,7 +1016,7 @@ class ResumingGetter(object): self.statuses.append(possible_source.status) self.reasons.append(possible_source.reason) - self.bodies.append('') + self.bodies.append(None) self.source_headers.append(possible_source.getheaders()) sources.append((possible_source, node)) if not self.newest: # one good source is enough diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 2aac83f2e5..7ce8463496 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -2029,13 +2029,12 @@ class ECObjectController(BaseObjectController): # EC fragment archives each have different bytes, hence different # etags. However, they all have the original object's etag stored in # sysmeta, so we copy that here so the client gets it. - resp.headers['Etag'] = resp.headers.get( - 'X-Object-Sysmeta-Ec-Etag') - resp.headers['Content-Length'] = resp.headers.get( - 'X-Object-Sysmeta-Ec-Content-Length') - resp.fix_conditional_response() - - return resp + if is_success(resp.status_int): + resp.headers['Etag'] = resp.headers.get( + 'X-Object-Sysmeta-Ec-Etag') + resp.headers['Content-Length'] = resp.headers.get( + 'X-Object-Sysmeta-Ec-Content-Length') + resp.fix_conditional_response() def _connect_put_node(self, node_iter, part, path, headers, logger_thread_locals): diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index 3cc9ce65dc..93c16215c4 100755 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -1467,6 +1467,34 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): self.assertEqual(1, len(error_lines)) self.assertTrue('retrying' in error_lines[0]) + def test_fix_response_HEAD(self): + headers = {'X-Object-Sysmeta-Ec-Content-Length': '10', + 'X-Object-Sysmeta-Ec-Etag': 'foo'} + + # sucsessful HEAD + responses = [(200, '', headers)] + status_codes, body_iter, headers = zip(*responses) + req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD') + with set_http_connect(*status_codes, body_iter=body_iter, + headers=headers): + resp = req.get_response(self.app) + self.assertEquals(resp.status_int, 200) + self.assertEquals(resp.body, '') + # 200OK shows original object content length + self.assertEquals(resp.headers['Content-Length'], '10') + self.assertEquals(resp.headers['Etag'], 'foo') + + # not found HEAD + responses = [(404, '', {})] * self.replicas() * 2 + status_codes, body_iter, headers = zip(*responses) + req = swift.common.swob.Request.blank('/v1/a/c/o', method='HEAD') + with set_http_connect(*status_codes, body_iter=body_iter, + headers=headers): + resp = req.get_response(self.app) + self.assertEquals(resp.status_int, 404) + # 404 shows actual response body size (i.e. 0 for HEAD) + self.assertEquals(resp.headers['Content-Length'], '0') + def test_PUT_with_slow_commits(self): # It's important that this timeout be much less than the delay in # the slow commit responses so that the slow commits are not waited @@ -1530,6 +1558,66 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 201) + def test_GET_with_invalid_ranges(self): + # reall body size is segment_size - 10 (just 1 segment) + segment_size = self.policy.ec_segment_size + real_body = ('a' * segment_size)[:-10] + + # range is out of real body but in segment size + self._test_invalid_ranges('GET', real_body, + segment_size, '%s-' % (segment_size - 10)) + # range is out of both real body and segment size + self._test_invalid_ranges('GET', real_body, + segment_size, '%s-' % (segment_size + 10)) + + def test_COPY_with_invalid_ranges(self): + # reall body size is segment_size - 10 (just 1 segment) + segment_size = self.policy.ec_segment_size + real_body = ('a' * segment_size)[:-10] + + # range is out of real body but in segment size + self._test_invalid_ranges('COPY', real_body, + segment_size, '%s-' % (segment_size - 10)) + # range is out of both real body and segment size + self._test_invalid_ranges('COPY', real_body, + segment_size, '%s-' % (segment_size + 10)) + + def _test_invalid_ranges(self, method, real_body, segment_size, req_range): + # make a request with range starts from more than real size. + req = swift.common.swob.Request.blank( + '/v1/a/c/o', method=method, + headers={'Destination': 'c1/o', + 'Range': 'bytes=%s' % (req_range)}) + + fragments = self.policy.pyeclib_driver.encode(real_body) + fragment_payloads = [fragments] + + node_fragments = zip(*fragment_payloads) + self.assertEqual(len(node_fragments), self.replicas()) # sanity + headers = {'X-Object-Sysmeta-Ec-Content-Length': str(len(real_body))} + start = int(req_range.split('-')[0]) + self.assertTrue(start >= 0) # sanity + title, exp = swob.RESPONSE_REASONS[416] + range_not_satisfiable_body = \ + '

%s

%s

' % (title, exp) + if start >= segment_size: + responses = [(416, range_not_satisfiable_body, headers) + for i in range(POLICIES.default.ec_ndata)] + else: + responses = [(200, ''.join(node_fragments[i]), headers) + for i in range(POLICIES.default.ec_ndata)] + status_codes, body_iter, headers = zip(*responses) + expect_headers = { + 'X-Obj-Metadata-Footer': 'yes', + 'X-Obj-Multiphase-Commit': 'yes' + } + with set_http_connect(*status_codes, body_iter=body_iter, + headers=headers, expect_headers=expect_headers): + resp = req.get_response(self.app) + self.assertEquals(resp.status_int, 416) + self.assertEquals(resp.content_length, len(range_not_satisfiable_body)) + self.assertEquals(resp.body, range_not_satisfiable_body) + if __name__ == '__main__': unittest.main() From fa35e38c9f83f704eb087f80157cbe33b23b9db2 Mon Sep 17 00:00:00 2001 From: Kai Zhang Date: Fri, 14 Aug 2015 16:46:35 -0700 Subject: [PATCH 33/70] Fix some minor typos Fixed some typos in function name and comments. Change-Id: Ida76ab4b331a51b71e57650702acc136e66ba4b2 --- swift/proxy/controllers/obj.py | 2 +- test/probe/test_reconstructor_durable.py | 2 +- test/probe/test_reconstructor_revert.py | 4 ++-- test/unit/obj/test_diskfile.py | 2 +- test/unit/proxy/test_server.py | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 2aac83f2e5..9633c4b271 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -623,7 +623,7 @@ class BaseObjectController(Controller): """ This method is responsible for establishing connection with storage nodes and sending the data to each one of those - nodes. The process of transfering data is specific to each + nodes. The process of transferring data is specific to each Storage Policy, thus it is required for each policy specific ObjectController to provide their own implementation of this method. diff --git a/test/probe/test_reconstructor_durable.py b/test/probe/test_reconstructor_durable.py index cbb94163e9..ccd9e1c78c 100644 --- a/test/probe/test_reconstructor_durable.py +++ b/test/probe/test_reconstructor_durable.py @@ -95,7 +95,7 @@ class TestReconstructorPropDurable(ECProbeTest): if e.errno != errno.ENOENT: raise - # fire up reconstructor to propogate the .durable + # fire up reconstructor to propagate the .durable self.reconstructor.once() # fragment is still exactly as it was before! diff --git a/test/probe/test_reconstructor_revert.py b/test/probe/test_reconstructor_revert.py index 1daf7a3725..5e10c1337e 100755 --- a/test/probe/test_reconstructor_revert.py +++ b/test/probe/test_reconstructor_revert.py @@ -159,7 +159,7 @@ class TestReconstructorRevert(ECProbeTest): hnode_id = (hnode['port'] - 6000) / 10 self.reconstructor.once(number=hnode_id) - # first threee primaries have data again + # first three primaries have data again for onode in (onodes[0], onodes[2]): self.direct_get(onode, opart) @@ -218,7 +218,7 @@ class TestReconstructorRevert(ECProbeTest): # enable the first node again self.revive_drive(p_dev2) - # propogate the delete... + # propagate the delete... # fire up reconstructor on handoff nodes only for hnode in hnodes: hnode_id = (hnode['port'] - 6000) / 10 diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 6e9c498028..47ef9b102d 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -3598,7 +3598,7 @@ class TestECDiskFile(DiskFileMixin, unittest.TestCase): ts.internal + '.durable', ]) - def test_purge_non_existant_fragment_index(self): + def test_purge_non_existent_fragment_index(self): ts = self.ts() frag_index = 7 df = self._simple_get_diskfile(frag_index=frag_index) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 85c27fa2ed..153e45c0c0 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -6081,7 +6081,7 @@ class TestObjectECRangedGET(unittest.TestCase): self.assertEqual(gotten_obj, self.obj[3783:7879]) def test_aligned_left(self): - # Firts byte is aligned to a segment boundary, last byte is not + # First byte is aligned to a segment boundary, last byte is not status, headers, gotten_obj = self._get_obj("bytes=0-5500") self.assertEqual(status, 206) self.assertEqual(headers['Content-Length'], "5501") From b75d2a4e37d4c86763a2cc56c6dd53ebe2e0de19 Mon Sep 17 00:00:00 2001 From: Bill Huber Date: Mon, 17 Aug 2015 13:54:44 -0500 Subject: [PATCH 34/70] Quorum on durable response is too low Increase the .durable quorum from 2 to "parity + 1" to guarantee that we will never fail to rebuild an object. Otherwise, with low durable responses back (< parity + 1), the putter objects return with failed attribute set to true, thereby failing the rebuild of fragments for an object. Change-Id: I80d666f61273e589d0990baa78fd657b3470785d Closes-Bug: 1484565 --- swift/proxy/controllers/obj.py | 7 +++---- test/unit/proxy/controllers/test_obj.py | 17 ++++++++++++++++- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 2aac83f2e5..043a7f3b0a 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -2370,10 +2370,9 @@ class ECObjectController(BaseObjectController): need_quorum = False # The .durable file will propagate in a replicated fashion; if # one exists, the reconstructor will spread it around. Thus, we - # don't require as many .durable files to be successfully - # written as we do fragment archives in order to call the PUT a - # success. - min_conns = 2 + # require "parity + 1" .durable files to be successfully written + # as we do fragment archives in order to call the PUT a success. + min_conns = policy.ec_nparity + 1 putters = [p for p in putters if not p.failed] # ignore response etags, and quorum boolean statuses, reasons, bodies, _etags, _quorum = \ diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index 3cc9ce65dc..ea4b165c70 100755 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -1479,7 +1479,7 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): codes = [FakeStatus(201, response_sleep=response_sleep) for i in range(self.replicas())] # swap out some with regular fast responses - number_of_fast_responses_needed_to_be_quick_enough = 2 + number_of_fast_responses_needed_to_be_quick_enough = 5 fast_indexes = random.sample( range(self.replicas()), number_of_fast_responses_needed_to_be_quick_enough) @@ -1496,6 +1496,21 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 201) self.assertTrue(response_time < response_sleep) + def test_PUT_with_less_durable_responses(self): + req = swift.common.swob.Request.blank('/v1/a/c/o', method='PUT', + body='') + + codes = [201] * self.policy.ec_nparity + codes += [503] * (self.policy.ec_ndata - 1) + random.shuffle(codes) + expect_headers = { + 'X-Obj-Metadata-Footer': 'yes', + 'X-Obj-Multiphase-Commit': 'yes' + } + with set_http_connect(*codes, expect_headers=expect_headers): + resp = req.get_response(self.app) + self.assertEqual(resp.status_int, 503) + def test_COPY_with_ranges(self): req = swift.common.swob.Request.blank( '/v1/a/c/o', method='COPY', From eeb0fa40a19917e6548f95f0bd3c08736928449b Mon Sep 17 00:00:00 2001 From: Christian Schwede Date: Thu, 6 Aug 2015 07:21:15 +0000 Subject: [PATCH 35/70] Make swift-ring-builder filename usage more consistent Sometimes the given argument is internally altered and another filename is used without a note to the operator. Even worse, a given .ring.gz filename is sometimes written out as builder file, without updating the corresponding .builder file. There is already a method to parse the given argv and return the name of the builder and ring file. However, it's rarely used and no warning is given to the user if it is altered. This patch uses the already parsed builder and ring file name instead of argv[1], and also adds a note to the user if the used filename is differently to the one given as argument. Closes-Bug: 1482096 Change-Id: I2f8ef23aeab8b07caaa799f7dcd57e684b4b2ad2 --- swift/cli/ringbuilder.py | 42 +++++++++++++++++-------------- test/unit/cli/test_ringbuilder.py | 17 +++++++++++++ 2 files changed, 40 insertions(+), 19 deletions(-) diff --git a/swift/cli/ringbuilder.py b/swift/cli/ringbuilder.py index 25440530b1..c020df6c67 100755 --- a/swift/cli/ringbuilder.py +++ b/swift/cli/ringbuilder.py @@ -403,14 +403,15 @@ swift-ring-builder create print(Commands.create.__doc__.strip()) exit(EXIT_ERROR) builder = RingBuilder(int(argv[3]), float(argv[4]), int(argv[5])) - backup_dir = pathjoin(dirname(argv[1]), 'backups') + backup_dir = pathjoin(dirname(builder_file), 'backups') try: mkdir(backup_dir) except OSError as err: if err.errno != EEXIST: raise - builder.save(pathjoin(backup_dir, '%d.' % time() + basename(argv[1]))) - builder.save(argv[1]) + builder.save(pathjoin(backup_dir, + '%d.' % time() + basename(builder_file))) + builder.save(builder_file) exit(EXIT_SUCCESS) def default(): @@ -418,7 +419,7 @@ swift-ring-builder create swift-ring-builder Shows information about the ring and the devices within. """ - print('%s, build version %d' % (argv[1], builder.version)) + print('%s, build version %d' % (builder_file, builder.version)) regions = 0 zones = 0 balance = 0 @@ -546,7 +547,7 @@ swift-ring-builder list_parts if not builder._replica2part2dev: print('Specified builder file \"%s\" is not rebalanced yet. ' - 'Please rebalance first.' % argv[1]) + 'Please rebalance first.' % builder_file) exit(EXIT_ERROR) devs = _parse_list_parts_values(argv[3:]) @@ -612,7 +613,7 @@ swift-ring-builder add print('The on-disk ring builder is unchanged.') exit(EXIT_ERROR) - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def set_weight(): @@ -644,7 +645,7 @@ swift-ring-builder set_weight _parse_set_weight_values(argv[3:]) - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def set_info(): @@ -689,7 +690,7 @@ swift-ring-builder set_info print(err) exit(EXIT_ERROR) - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def remove(): @@ -754,7 +755,7 @@ swift-ring-builder search print('%s marked for removal and will ' 'be removed next rebalance.' % format_device(dev)) - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def rebalance(): @@ -856,9 +857,9 @@ swift-ring-builder rebalance [options] ts = time() builder.get_ring().save( pathjoin(backup_dir, '%d.' % ts + basename(ring_file))) - builder.save(pathjoin(backup_dir, '%d.' % ts + basename(argv[1]))) + builder.save(pathjoin(backup_dir, '%d.' % ts + basename(builder_file))) builder.get_ring().save(ring_file) - builder.save(argv[1]) + builder.save(builder_file) exit(status) def dispersion(): @@ -893,7 +894,7 @@ swift-ring-builder dispersion [options] status = EXIT_SUCCESS if not builder._replica2part2dev: print('Specified builder file \"%s\" is not rebalanced yet. ' - 'Please rebalance first.' % argv[1]) + 'Please rebalance first.' % builder_file) exit(EXIT_ERROR) usage = Commands.dispersion.__doc__.strip() parser = optparse.OptionParser(usage) @@ -1015,7 +1016,7 @@ swift-ring-builder write_builder [min_part_hours] def pretend_min_part_hours_passed(): builder.pretend_min_part_hours_passed() - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def set_min_part_hours(): @@ -1031,7 +1032,7 @@ swift-ring-builder set_min_part_hours builder.change_min_part_hours(int(argv[3])) print('The minimum number of hours before a partition can be ' 'reassigned is now set to %s' % argv[3]) - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def set_replicas(): @@ -1063,7 +1064,7 @@ swift-ring-builder set_replicas builder.set_replicas(new_replicas) print('The replica count is now %.6f.' % builder.replicas) print('The change will take effect after the next rebalance.') - builder.save(argv[1]) + builder.save(builder_file) exit(EXIT_SUCCESS) def set_overload(): @@ -1106,7 +1107,7 @@ swift-ring-builder set_overload [%] print('The overload factor is now %0.2f%% (%.6f)' % ( builder.overload * 100, builder.overload)) print('The change will take effect after the next rebalance.') - builder.save(argv[1]) + builder.save(builder_file) exit(status) @@ -1139,6 +1140,9 @@ def main(arguments=None): exit(EXIT_SUCCESS) builder_file, ring_file = parse_builder_ring_filename_args(argv) + if builder_file != argv[1]: + print('Note: using %s instead of %s as builder file' % ( + builder_file, argv[1])) try: builder = RingBuilder.load(builder_file) @@ -1151,10 +1155,10 @@ def main(arguments=None): exit(EXIT_ERROR) except Exception as e: print('Problem occurred while reading builder file: %s. %s' % - (argv[1], e)) + (builder_file, e)) exit(EXIT_ERROR) - backup_dir = pathjoin(dirname(argv[1]), 'backups') + backup_dir = pathjoin(dirname(builder_file), 'backups') try: mkdir(backup_dir) except OSError as err: @@ -1167,7 +1171,7 @@ def main(arguments=None): command = argv[2] if argv[0].endswith('-safe'): try: - with lock_parent_directory(abspath(argv[1]), 15): + with lock_parent_directory(abspath(builder_file), 15): Commands.__dict__.get(command, Commands.unknown.im_func)() except exceptions.LockTimeout: print("Ring/builder dir currently locked.") diff --git a/test/unit/cli/test_ringbuilder.py b/test/unit/cli/test_ringbuilder.py index ec51d13f06..cf9efe59b0 100644 --- a/test/unit/cli/test_ringbuilder.py +++ b/test/unit/cli/test_ringbuilder.py @@ -1741,6 +1741,23 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin): err = exc self.assertEquals(err.code, 2) + def test_use_ringfile_as_builderfile(self): + mock_stdout = six.StringIO() + mock_stderr = six.StringIO() + + argv = ["", "object.ring.gz"] + + try: + with mock.patch("sys.stdout", mock_stdout): + with mock.patch("sys.stderr", mock_stderr): + ringbuilder.main(argv) + except SystemExit: + pass + expected = "Note: using object.builder instead of object.ring.gz " \ + "as builder file\n" \ + "Ring Builder file does not exist: object.builder\n" + self.assertEqual(expected, mock_stdout.getvalue()) + class TestRebalanceCommand(unittest.TestCase, RunSwiftRingBuilderMixin): From 79ba4a85983641e539b620bd143e62673c98416e Mon Sep 17 00:00:00 2001 From: Hisashi Osanai Date: Wed, 3 Dec 2014 06:15:16 +0900 Subject: [PATCH 36/70] Enable Object Replicator's failure count in recon This patch makes the count of object replication failure in recon. And "failure_nodes" is added to Account Replicator and Container Replicator. Recon shows the count of object repliction failure as follows: $ curl http://:/recon/replication/object { "replication_last": 1416334368.60865, "replication_stats": { "attempted": 13346, "failure": 870, "failure_nodes": { "192.168.0.1": {"sdb1": 3}, "192.168.0.2": {"sdb1": 851, "sdc1": 1, "sdd1": 8}, "192.168.0.3": {"sdb1": 3, "sdc1": 4} }, "hashmatch": 0, "remove": 0, "rsync": 0, "start": 1416354240.9761429, "success": 1908 }, "replication_time": 2316.5563162644703, "object_replication_last": 1416334368.60865, "object_replication_time": 2316.5563162644703 } Note that 'object_replication_last' and 'object_replication_time' are considered to be transitional and will be removed in the subsequent releases. Use 'replication_last' and 'replication_time' instead. Additionaly this patch adds the count in swift-recon and it will be showed as follows: $ swift-recon object -r ======================================================================== ======= --> Starting reconnaissance on 4 hosts ======================================================================== ======= [2014-11-27 16:14:09] Checking on replication [replication_failure] low: 0, high: 0, avg: 0.0, total: 0, Failed: 0.0%, no_result: 0, reported: 4 [replication_success] low: 3, high: 3, avg: 3.0, total: 12, Failed: 0.0%, no_result: 0, reported: 4 [replication_time] low: 0, high: 0, avg: 0.0, total: 0, Failed: 0.0%, no_result: 0, reported: 4 [replication_attempted] low: 1, high: 1, avg: 1.0, total: 4, Failed: 0.0%, no_result: 0, reported: 4 Oldest completion was 2014-11-27 16:09:45 (4 minutes ago) by 192.168.0.4:6002. Most recent completion was 2014-11-27 16:14:19 (-10 seconds ago) by 192.168.0.1:6002. ======================================================================== ======= In case there is a cluster which has servers, a server runs with this patch and the other servers run without this patch. If swift-recon executes on the server which runs with this patch, there are unnecessary information on the output such as [failure], [success] and [attempted]. Because other servers which run without this patch are not able to send a response with information that this patch needs. Therefore once you apply this patch, you also apply this patch to other servers before you execute swift-recon. DocImpact Change-Id: Iecd33655ae2568482833131f422679996c374d78 Co-Authored-By: Kenichiro Matsuda Co-Authored-By: Brian Cline Implements: blueprint enable-object-replication-failure-in-recon --- doc/source/admin_guide.rst | 6 +- swift/cli/recon.py | 71 ++--------- swift/common/db_replicator.py | 37 +++++- swift/common/middleware/recon.py | 16 +-- swift/obj/replicator.py | 136 ++++++++++++++++++++-- test/unit/cli/test_recon.py | 38 ------ test/unit/common/middleware/test_recon.py | 45 ++++++- test/unit/obj/test_replicator.py | 3 + 8 files changed, 222 insertions(+), 130 deletions(-) mode change 100755 => 100644 swift/cli/recon.py diff --git a/doc/source/admin_guide.rst b/doc/source/admin_guide.rst index d50efc4ef4..7d396664df 100644 --- a/doc/source/admin_guide.rst +++ b/doc/source/admin_guide.rst @@ -549,12 +549,16 @@ Request URI Description /recon/sockstat returns consumable info from /proc/net/sockstat|6 /recon/devices returns list of devices and devices dir i.e. /srv/node /recon/async returns count of async pending -/recon/replication returns object replication times (for backward compatibility) +/recon/replication returns object replication info (for backward compatibility) /recon/replication/ returns replication info for given type (account, container, object) /recon/auditor/ returns auditor stats on last reported scan for given type (account, container, object) /recon/updater/ returns last updater sweep times for given type (container, object) ========================= ======================================================================================== +Note that 'object_replication_last' and 'object_replication_time' in object +replication info are considered to be transitional and will be removed in +the subsequent releases. Use 'replication_last' and 'replication_time' instead. + This information can also be queried via the swift-recon command line utility:: fhines@ubuntu:~$ swift-recon -h diff --git a/swift/cli/recon.py b/swift/cli/recon.py old mode 100755 new mode 100644 index 79e0721c04..c405b9fb43 --- a/swift/cli/recon.py +++ b/swift/cli/recon.py @@ -460,12 +460,14 @@ class SwiftRecon(object): recon.scout, hosts): if status == 200: stats['replication_time'].append( - response.get('replication_time')) - repl_stats = response['replication_stats'] + response.get('replication_time', + response.get('object_replication_time', 0))) + repl_stats = response.get('replication_stats') if repl_stats: for stat_key in ['attempted', 'failure', 'success']: stats[stat_key].append(repl_stats.get(stat_key)) - last = response.get('replication_last', 0) + last = response.get('replication_last', + response.get('object_replication_last', 0)) if last < least_recent_time: least_recent_time = last least_recent_url = url @@ -506,62 +508,6 @@ class SwiftRecon(object): elapsed, elapsed_unit, host)) print("=" * 79) - def object_replication_check(self, hosts): - """ - Obtain and print replication statistics from object servers - - :param hosts: set of hosts to check. in the format of: - set([('127.0.0.1', 6020), ('127.0.0.2', 6030)]) - """ - stats = {} - recon = Scout("replication", self.verbose, self.suppress_errors, - self.timeout) - print("[%s] Checking on replication" % self._ptime()) - least_recent_time = 9999999999 - least_recent_url = None - most_recent_time = 0 - most_recent_url = None - for url, response, status, ts_start, ts_end in self.pool.imap( - recon.scout, hosts): - if status == 200: - stats[url] = response['object_replication_time'] - last = response.get('object_replication_last', 0) - if last < least_recent_time: - least_recent_time = last - least_recent_url = url - if last > most_recent_time: - most_recent_time = last - most_recent_url = url - times = [x for x in stats.values() if x is not None] - if len(stats) > 0 and len(times) > 0: - computed = self._gen_stats(times, 'replication_time') - if computed['reported'] > 0: - self._print_stats(computed) - else: - print("[replication_time] - No hosts returned valid data.") - else: - print("[replication_time] - No hosts returned valid data.") - if least_recent_url is not None: - host = urlparse(least_recent_url).netloc - if not least_recent_time: - print('Oldest completion was NEVER by %s.' % host) - else: - elapsed = time.time() - least_recent_time - elapsed, elapsed_unit = seconds2timeunit(elapsed) - print('Oldest completion was %s (%d %s ago) by %s.' % ( - time.strftime('%Y-%m-%d %H:%M:%S', - time.gmtime(least_recent_time)), - elapsed, elapsed_unit, host)) - if most_recent_url is not None: - host = urlparse(most_recent_url).netloc - elapsed = time.time() - most_recent_time - elapsed, elapsed_unit = seconds2timeunit(elapsed) - print('Most recent completion was %s (%d %s ago) by %s.' % ( - time.strftime('%Y-%m-%d %H:%M:%S', - time.gmtime(most_recent_time)), - elapsed, elapsed_unit, host)) - print("=" * 79) - def updater_check(self, hosts): """ Obtain and print updater statistics @@ -1072,7 +1018,7 @@ class SwiftRecon(object): if options.all: if self.server_type == 'object': self.async_check(hosts) - self.object_replication_check(hosts) + self.replication_check(hosts) self.object_auditor_check(hosts) self.updater_check(hosts) self.expirer_check(hosts) @@ -1102,10 +1048,7 @@ class SwiftRecon(object): if options.unmounted: self.umount_check(hosts) if options.replication: - if self.server_type == 'object': - self.object_replication_check(hosts) - else: - self.replication_check(hosts) + self.replication_check(hosts) if options.auditor: if self.server_type == 'object': self.object_auditor_check(hosts) diff --git a/swift/common/db_replicator.py b/swift/common/db_replicator.py index 151a070c07..7a6e8d549f 100644 --- a/swift/common/db_replicator.py +++ b/swift/common/db_replicator.py @@ -187,7 +187,8 @@ class Replicator(Daemon): self.stats = {'attempted': 0, 'success': 0, 'failure': 0, 'ts_repl': 0, 'no_change': 0, 'hashmatch': 0, 'rsync': 0, 'diff': 0, 'remove': 0, 'empty': 0, 'remote_merge': 0, - 'start': time.time(), 'diff_capped': 0} + 'start': time.time(), 'diff_capped': 0, + 'failure_nodes': {}} def _report_stats(self): """Report the current stats to the logs.""" @@ -212,6 +213,13 @@ class Replicator(Daemon): ('no_change', 'hashmatch', 'rsync', 'diff', 'ts_repl', 'empty', 'diff_capped')])) + def _add_failure_stats(self, failure_devs_info): + for node, dev in failure_devs_info: + self.stats['failure'] += 1 + failure_devs = self.stats['failure_nodes'].setdefault(node, {}) + failure_devs.setdefault(dev, 0) + failure_devs[dev] += 1 + def _rsync_file(self, db_file, remote_file, whole_file=True, different_region=False): """ @@ -479,7 +487,10 @@ class Replicator(Daemon): quarantine_db(broker.db_file, broker.db_type) else: self.logger.exception(_('ERROR reading db %s'), object_file) - self.stats['failure'] += 1 + nodes = self.ring.get_part_nodes(int(partition)) + self._add_failure_stats([(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in nodes]) self.logger.increment('failures') return # The db is considered deleted if the delete_timestamp value is greater @@ -494,6 +505,7 @@ class Replicator(Daemon): self.logger.timing_since('timing', start_time) return responses = [] + failure_devs_info = set() nodes = self.ring.get_part_nodes(int(partition)) local_dev = None for node in nodes: @@ -532,7 +544,8 @@ class Replicator(Daemon): self.logger.exception(_('ERROR syncing %(file)s with node' ' %(node)s'), {'file': object_file, 'node': node}) - self.stats['success' if success else 'failure'] += 1 + if not success: + failure_devs_info.add((node['replication_ip'], node['device'])) self.logger.increment('successes' if success else 'failures') responses.append(success) try: @@ -543,7 +556,17 @@ class Replicator(Daemon): if not shouldbehere and all(responses): # If the db shouldn't be on this node and has been successfully # synced to all of its peers, it can be removed. - self.delete_db(broker) + if not self.delete_db(broker): + failure_devs_info.update( + [(failure_dev['replication_ip'], failure_dev['device']) + for failure_dev in repl_nodes]) + + target_devs_info = set([(target_dev['replication_ip'], + target_dev['device']) + for target_dev in repl_nodes]) + self.stats['success'] += len(target_devs_info - failure_devs_info) + self._add_failure_stats(failure_devs_info) + self.logger.timing_since('timing', start_time) def delete_db(self, broker): @@ -558,9 +581,11 @@ class Replicator(Daemon): if err.errno not in (errno.ENOENT, errno.ENOTEMPTY): self.logger.exception( _('ERROR while trying to clean up %s') % suf_dir) + return False self.stats['remove'] += 1 device_name = self.extract_device(object_file) self.logger.increment('removes.' + device_name) + return True def extract_device(self, object_file): """ @@ -592,6 +617,10 @@ class Replicator(Daemon): node['replication_port']): if self.mount_check and not ismount( os.path.join(self.root, node['device'])): + self._add_failure_stats( + [(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in self.ring.devs if failure_dev]) self.logger.warn( _('Skipping %(device)s as it is not mounted') % node) continue diff --git a/swift/common/middleware/recon.py b/swift/common/middleware/recon.py index b7a508f921..b0d1a1a526 100644 --- a/swift/common/middleware/recon.py +++ b/swift/common/middleware/recon.py @@ -134,19 +134,19 @@ class ReconMiddleware(object): def get_replication_info(self, recon_type): """get replication info""" + replication_list = ['replication_time', + 'replication_stats', + 'replication_last'] if recon_type == 'account': - return self._from_recon_cache(['replication_time', - 'replication_stats', - 'replication_last'], + return self._from_recon_cache(replication_list, self.account_recon_cache) elif recon_type == 'container': - return self._from_recon_cache(['replication_time', - 'replication_stats', - 'replication_last'], + return self._from_recon_cache(replication_list, self.container_recon_cache) elif recon_type == 'object': - return self._from_recon_cache(['object_replication_time', - 'object_replication_last'], + replication_list += ['object_replication_time', + 'object_replication_last'] + return self._from_recon_cache(replication_list, self.object_recon_cache) else: return None diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 70b55046cb..639c67b032 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -101,6 +101,30 @@ class ObjectReplicator(Daemon): conf.get('handoff_delete', 'auto'), 0) self._diskfile_mgr = DiskFileManager(conf, self.logger) + def _zero_stats(self): + """Zero out the stats.""" + self.stats = {'attempted': 0, 'success': 0, 'failure': 0, + 'hashmatch': 0, 'rsync': 0, 'remove': 0, + 'start': time.time(), 'failure_nodes': {}} + + def _add_failure_stats(self, failure_devs_info): + for node, dev in failure_devs_info: + self.stats['failure'] += 1 + failure_devs = self.stats['failure_nodes'].setdefault(node, {}) + failure_devs.setdefault(dev, 0) + failure_devs[dev] += 1 + + def _get_my_replication_ips(self): + my_replication_ips = set() + ips = whataremyips() + for policy in POLICIES: + self.load_object_ring(policy) + for local_dev in [dev for dev in policy.object_ring.devs + if dev and dev['replication_ip'] in ips and + dev['replication_port'] == self.port]: + my_replication_ips.add(local_dev['replication_ip']) + return list(my_replication_ips) + # Just exists for doc anchor point def sync(self, node, job, suffixes, *args, **kwargs): """ @@ -243,6 +267,7 @@ class ObjectReplicator(Daemon): self.replication_count += 1 self.logger.increment('partition.delete.count.%s' % (job['device'],)) self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy']) + failure_devs_info = set() begin = time.time() try: responses = [] @@ -251,6 +276,7 @@ class ObjectReplicator(Daemon): delete_objs = None if suffixes: for node in job['nodes']: + self.stats['rsync'] += 1 kwargs = {} if node['region'] in synced_remote_regions and \ self.conf.get('sync_method', 'rsync') == 'ssync': @@ -271,6 +297,9 @@ class ObjectReplicator(Daemon): if node['region'] != job['region']: synced_remote_regions[node['region']] = \ candidates.keys() + else: + failure_devs_info.add((node['replication_ip'], + node['device'])) responses.append(success) for region, cand_objs in synced_remote_regions.items(): if delete_objs is None: @@ -286,11 +315,23 @@ class ObjectReplicator(Daemon): delete_handoff = len(responses) == len(job['nodes']) and \ all(responses) if delete_handoff: + self.stats['remove'] += 1 if (self.conf.get('sync_method', 'rsync') == 'ssync' and delete_objs is not None): self.logger.info(_("Removing %s objects"), len(delete_objs)) - self.delete_handoff_objs(job, delete_objs) + _junk, error_paths = self.delete_handoff_objs( + job, delete_objs) + # if replication works for a hand-off device and it faild, + # the remote devices which are target of the replication + # from the hand-off device will be marked. Because cleanup + # after replication failed means replicator needs to + # replicate again with the same info. + if error_paths: + failure_devs_info.update( + [(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in job['nodes']]) else: self.delete_partition(job['path']) elif not suffixes: @@ -298,14 +339,21 @@ class ObjectReplicator(Daemon): except (Exception, Timeout): self.logger.exception(_("Error syncing handoff partition")) finally: + target_devs_info = set([(target_dev['replication_ip'], + target_dev['device']) + for target_dev in job['nodes']]) + self.stats['success'] += len(target_devs_info - failure_devs_info) + self._add_failure_stats(failure_devs_info) self.partition_times.append(time.time() - begin) self.logger.timing_since('partition.delete.timing', begin) def delete_partition(self, path): self.logger.info(_("Removing partition: %s"), path) - tpool.execute(shutil.rmtree, path, ignore_errors=True) + tpool.execute(shutil.rmtree, path) def delete_handoff_objs(self, job, delete_objs): + success_paths = [] + error_paths = [] for object_hash in delete_objs: object_path = storage_directory(job['obj_path'], job['partition'], object_hash) @@ -313,11 +361,14 @@ class ObjectReplicator(Daemon): suffix_dir = dirname(object_path) try: os.rmdir(suffix_dir) + success_paths.append(object_path) except OSError as e: if e.errno not in (errno.ENOENT, errno.ENOTEMPTY): + error_paths.append(object_path) self.logger.exception( "Unexpected error trying to cleanup suffix dir:%r", suffix_dir) + return success_paths, error_paths def update(self, job): """ @@ -328,6 +379,8 @@ class ObjectReplicator(Daemon): self.replication_count += 1 self.logger.increment('partition.update.count.%s' % (job['device'],)) self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy']) + target_devs_info = set() + failure_devs_info = set() begin = time.time() try: hashed, local_hash = tpool_reraise( @@ -346,6 +399,7 @@ class ObjectReplicator(Daemon): while attempts_left > 0: # If this throws StopIteration it will be caught way below node = next(nodes) + target_devs_info.add((node['replication_ip'], node['device'])) attempts_left -= 1 # if we have already synced to this remote region, # don't sync again on this replication pass @@ -361,12 +415,16 @@ class ObjectReplicator(Daemon): self.logger.error(_('%(ip)s/%(device)s responded' ' as unmounted'), node) attempts_left += 1 + failure_devs_info.add((node['replication_ip'], + node['device'])) continue if resp.status != HTTP_OK: self.logger.error(_("Invalid response %(resp)s " "from %(ip)s"), {'resp': resp.status, 'ip': node['replication_ip']}) + failure_devs_info.add((node['replication_ip'], + node['device'])) continue remote_hash = pickle.loads(resp.read()) del resp @@ -374,6 +432,7 @@ class ObjectReplicator(Daemon): local_hash[suffix] != remote_hash.get(suffix, -1)] if not suffixes: + self.stats['hashmatch'] += 1 continue hashed, recalc_hash = tpool_reraise( self._diskfile_mgr._get_hashes, @@ -384,6 +443,7 @@ class ObjectReplicator(Daemon): suffixes = [suffix for suffix in local_hash if local_hash[suffix] != remote_hash.get(suffix, -1)] + self.stats['rsync'] += 1 success, _junk = self.sync(node, job, suffixes) with Timeout(self.http_timeout): conn = http_connect( @@ -392,18 +452,26 @@ class ObjectReplicator(Daemon): '/' + '-'.join(suffixes), headers=self.headers) conn.getresponse().read() + if not success: + failure_devs_info.add((node['replication_ip'], + node['device'])) # add only remote region when replicate succeeded if success and node['region'] != job['region']: synced_remote_regions.add(node['region']) self.suffix_sync += len(suffixes) self.logger.update_stats('suffix.syncs', len(suffixes)) except (Exception, Timeout): + failure_devs_info.add((node['replication_ip'], + node['device'])) self.logger.exception(_("Error syncing with node: %s") % node) self.suffix_count += len(local_hash) except (Exception, Timeout): + failure_devs_info.update(target_devs_info) self.logger.exception(_("Error syncing partition")) finally: + self.stats['success'] += len(target_devs_info - failure_devs_info) + self._add_failure_stats(failure_devs_info) self.partition_times.append(time.time() - begin) self.logger.timing_since('partition.update.timing', begin) @@ -481,6 +549,9 @@ class ObjectReplicator(Daemon): using replication style storage policy """ jobs = [] + self.all_devs_info.update( + [(dev['replication_ip'], dev['device']) + for dev in policy.object_ring.devs if dev]) data_dir = get_data_dir(policy) for local_dev in [dev for dev in policy.object_ring.devs if (dev @@ -494,6 +565,11 @@ class ObjectReplicator(Daemon): obj_path = join(dev_path, data_dir) tmp_path = join(dev_path, get_tmp_dir(policy)) if self.mount_check and not ismount(dev_path): + self._add_failure_stats( + [(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in policy.object_ring.devs + if failure_dev]) self.logger.warn(_('%s is not mounted'), local_dev['device']) continue unlink_older_than(tmp_path, time.time() - self.reclaim_age) @@ -508,6 +584,7 @@ class ObjectReplicator(Daemon): and partition not in override_partitions): continue + part_nodes = None try: job_path = join(obj_path, partition) part_nodes = policy.object_ring.get_part_nodes( @@ -524,6 +601,17 @@ class ObjectReplicator(Daemon): partition=partition, region=local_dev['region'])) except ValueError: + if part_nodes: + self._add_failure_stats( + [(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in nodes]) + else: + self._add_failure_stats( + [(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in policy.object_ring.devs + if failure_dev]) continue return jobs @@ -569,19 +657,31 @@ class ObjectReplicator(Daemon): self.replication_count = 0 self.last_replication_count = -1 self.partition_times = [] + self.my_replication_ips = self._get_my_replication_ips() + self.all_devs_info = set() stats = eventlet.spawn(self.heartbeat) lockup_detector = eventlet.spawn(self.detect_lockups) eventlet.sleep() # Give spawns a cycle + current_nodes = None try: self.run_pool = GreenPool(size=self.concurrency) jobs = self.collect_jobs(override_devices=override_devices, override_partitions=override_partitions, override_policies=override_policies) for job in jobs: + current_nodes = job['nodes'] + if override_devices and job['device'] not in override_devices: + continue + if override_partitions and \ + job['partition'] not in override_partitions: + continue dev_path = join(self.devices_dir, job['device']) if self.mount_check and not ismount(dev_path): + self._add_failure_stats([(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in job['nodes']]) self.logger.warn(_('%s is not mounted'), job['device']) continue if not self.check_ring(job['policy'].object_ring): @@ -603,18 +703,26 @@ class ObjectReplicator(Daemon): self.run_pool.spawn(self.update_deleted, job) else: self.run_pool.spawn(self.update, job) + current_nodes = None with Timeout(self.lockup_timeout): self.run_pool.waitall() except (Exception, Timeout): + if current_nodes: + self._add_failure_stats([(failure_dev['replication_ip'], + failure_dev['device']) + for failure_dev in current_nodes]) + else: + self._add_failure_stats(self.all_devs_info) self.logger.exception(_("Exception in top-level replication loop")) self.kill_coros() finally: stats.kill() lockup_detector.kill() self.stats_line() + self.stats['attempted'] = self.replication_count def run_once(self, *args, **kwargs): - start = time.time() + self._zero_stats() self.logger.info(_("Running object replicator in script mode.")) override_devices = list_from_csv(kwargs.get('devices')) @@ -631,27 +739,35 @@ class ObjectReplicator(Daemon): override_devices=override_devices, override_partitions=override_partitions, override_policies=override_policies) - total = (time.time() - start) / 60 + total = (time.time() - self.stats['start']) / 60 self.logger.info( _("Object replication complete (once). (%.02f minutes)"), total) if not (override_partitions or override_devices): - dump_recon_cache({'object_replication_time': total, - 'object_replication_last': time.time()}, + replication_last = time.time() + dump_recon_cache({'replication_stats': self.stats, + 'replication_time': total, + 'replication_last': replication_last, + 'object_replication_time': total, + 'object_replication_last': replication_last}, self.rcache, self.logger) def run_forever(self, *args, **kwargs): self.logger.info(_("Starting object replicator in daemon mode.")) # Run the replicator continually while True: - start = time.time() + self._zero_stats() self.logger.info(_("Starting object replication pass.")) # Run the replicator self.replicate() - total = (time.time() - start) / 60 + total = (time.time() - self.stats['start']) / 60 self.logger.info( _("Object replication complete. (%.02f minutes)"), total) - dump_recon_cache({'object_replication_time': total, - 'object_replication_last': time.time()}, + replication_last = time.time() + dump_recon_cache({'replication_stats': self.stats, + 'replication_time': total, + 'replication_last': replication_last, + 'object_replication_time': total, + 'object_replication_last': replication_last}, self.rcache, self.logger) self.logger.debug('Replication sleeping for %s seconds.', self.interval) diff --git a/test/unit/cli/test_recon.py b/test/unit/cli/test_recon.py index 6559b615f0..345097c63f 100644 --- a/test/unit/cli/test_recon.py +++ b/test/unit/cli/test_recon.py @@ -578,44 +578,6 @@ class TestReconCommands(unittest.TestCase): cli.disk_usage([('127.0.0.1', 6010)], 5, 0) mock_print.assert_has_calls(expected_calls) - @mock.patch('__builtin__.print') - @mock.patch('time.time') - def test_object_replication_check(self, mock_now, mock_print): - now = 1430000000.0 - - def dummy_request(*args, **kwargs): - return [ - ('http://127.0.0.1:6010/recon/replication/object', - {"object_replication_time": 61, - "object_replication_last": now}, - 200, - 0, - 0), - ('http://127.0.0.1:6020/recon/replication/object', - {"object_replication_time": 23, - "object_replication_last": now}, - 200, - 0, - 0), - ] - - cli = recon.SwiftRecon() - cli.pool.imap = dummy_request - - default_calls = [ - mock.call('[replication_time] low: 23, high: 61, avg: 42.0, ' + - 'total: 84, Failed: 0.0%, no_result: 0, reported: 2'), - mock.call('Oldest completion was 2015-04-25 22:13:20 ' + - '(42 seconds ago) by 127.0.0.1:6010.'), - mock.call('Most recent completion was 2015-04-25 22:13:20 ' + - '(42 seconds ago) by 127.0.0.1:6010.'), - ] - - mock_now.return_value = now + 42 - cli.object_replication_check([('127.0.0.1', 6010), - ('127.0.0.1', 6020)]) - mock_print.assert_has_calls(default_calls) - @mock.patch('__builtin__.print') @mock.patch('time.time') def test_replication_check(self, mock_now, mock_print): diff --git a/test/unit/common/middleware/test_recon.py b/test/unit/common/middleware/test_recon.py index 520e2edaf6..8ea659dcaf 100644 --- a/test/unit/common/middleware/test_recon.py +++ b/test/unit/common/middleware/test_recon.py @@ -506,6 +506,9 @@ class TestReconSuccess(TestCase): "attempted": 1, "diff": 0, "diff_capped": 0, "empty": 0, "failure": 0, "hashmatch": 0, + "failure_nodes": { + "192.168.0.1": 0, + "192.168.0.2": 0}, "no_change": 2, "remote_merge": 0, "remove": 0, "rsync": 0, "start": 1333044050.855202, @@ -523,6 +526,9 @@ class TestReconSuccess(TestCase): "attempted": 1, "diff": 0, "diff_capped": 0, "empty": 0, "failure": 0, "hashmatch": 0, + "failure_nodes": { + "192.168.0.1": 0, + "192.168.0.2": 0}, "no_change": 2, "remote_merge": 0, "remove": 0, "rsync": 0, "start": 1333044050.855202, @@ -537,6 +543,9 @@ class TestReconSuccess(TestCase): "attempted": 179, "diff": 0, "diff_capped": 0, "empty": 0, "failure": 0, "hashmatch": 0, + "failure_nodes": { + "192.168.0.1": 0, + "192.168.0.2": 0}, "no_change": 358, "remote_merge": 0, "remove": 0, "rsync": 0, "start": 5.5, "success": 358, @@ -555,6 +564,9 @@ class TestReconSuccess(TestCase): "attempted": 179, "diff": 0, "diff_capped": 0, "empty": 0, "failure": 0, "hashmatch": 0, + "failure_nodes": { + "192.168.0.1": 0, + "192.168.0.2": 0}, "no_change": 358, "remote_merge": 0, "remove": 0, "rsync": 0, "start": 5.5, "success": 358, @@ -562,17 +574,40 @@ class TestReconSuccess(TestCase): "replication_last": 1357969645.25}) def test_get_replication_object(self): - from_cache_response = {"object_replication_time": 200.0, - "object_replication_last": 1357962809.15} + from_cache_response = { + "replication_time": 0.2615511417388916, + "replication_stats": { + "attempted": 179, + "failure": 0, "hashmatch": 0, + "failure_nodes": { + "192.168.0.1": 0, + "192.168.0.2": 0}, + "remove": 0, "rsync": 0, + "start": 1333044050.855202, "success": 358}, + "replication_last": 1357969645.25, + "object_replication_time": 0.2615511417388916, + "object_replication_last": 1357969645.25} self.fakecache.fakeout_calls = [] self.fakecache.fakeout = from_cache_response rv = self.app.get_replication_info('object') self.assertEquals(self.fakecache.fakeout_calls, - [((['object_replication_time', + [((['replication_time', 'replication_stats', + 'replication_last', 'object_replication_time', 'object_replication_last'], '/var/cache/swift/object.recon'), {})]) - self.assertEquals(rv, {'object_replication_time': 200.0, - 'object_replication_last': 1357962809.15}) + self.assertEquals(rv, { + "replication_time": 0.2615511417388916, + "replication_stats": { + "attempted": 179, + "failure": 0, "hashmatch": 0, + "failure_nodes": { + "192.168.0.1": 0, + "192.168.0.2": 0}, + "remove": 0, "rsync": 0, + "start": 1333044050.855202, "success": 358}, + "replication_last": 1357969645.25, + "object_replication_time": 0.2615511417388916, + "object_replication_last": 1357969645.25}) def test_get_updater_info_container(self): from_cache_response = {"container_updater_sweep": 18.476239919662476} diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index 526ff0b7f2..d7316ab4bf 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -205,6 +205,8 @@ class TestObjectReplicator(unittest.TestCase): def _create_replicator(self): self.replicator = object_replicator.ObjectReplicator(self.conf) self.replicator.logger = self.logger + self.replicator._zero_stats() + self.replicator.all_devs_info = set() self.df_mgr = diskfile.DiskFileManager(self.conf, self.logger) def test_run_once(self): @@ -771,6 +773,7 @@ class TestObjectReplicator(unittest.TestCase): self.conf['sync_method'] = 'ssync' self.replicator = object_replicator.ObjectReplicator(self.conf) self.replicator.logger = debug_logger() + self.replicator._zero_stats() with mock.patch('swift.obj.replicator.http_connect', mock_http_connect(200)): From 1a81cda8b7eba0d25ea7341da756bb588cae8d73 Mon Sep 17 00:00:00 2001 From: Hamdi Roumani Date: Mon, 3 Aug 2015 17:45:56 -0400 Subject: [PATCH 37/70] Doc instructions for post rebase steps Improve the 'first_contribution_swift' by adding instructions for how to rebuild swift following a rebase. Change-Id: If5c91dc4e1e8d1712bbd8b326c675967fb4b8c15 --- doc/source/first_contribution_swift.rst | 37 +++++++++++++++++++++---- 1 file changed, 32 insertions(+), 5 deletions(-) diff --git a/doc/source/first_contribution_swift.rst b/doc/source/first_contribution_swift.rst index bbd3c65ff8..b4aa948ed7 100644 --- a/doc/source/first_contribution_swift.rst +++ b/doc/source/first_contribution_swift.rst @@ -118,6 +118,36 @@ After you proposed your changes to Swift, you can track the review in: * ``_ +.. _post-rebase-instructions: + +------------------------ +Post rebase instructions +------------------------ + +After rebasing, the following steps should be performed to rebuild the swift +installation. Note that these commands should be performed from the root of the +swift repo directory (e.g. $HOME/swift/): + + ``sudo python setup.py develop`` + + ``sudo pip install -r test-requirements.txt`` + +If using TOX, depending on the changes made during the rebase, you may need to +rebuild the TOX environment (generally this will be the case if +test-requirements.txt was updated such that a new version of a package is +required), this can be accomplished using the '-r' argument to the TOX cli: + + ``tox -r`` + +You can include any of the other TOX arguments as well, for example, to run the +pep8 suite and rebuild the TOX environment the following can be used: + + ``tox -r -e pep8`` + +The rebuild option only needs to be specified once for a particular build (e.g. +pep8), that is further invocations of the same build will not require this +until the next rebase. + --------------- Troubleshooting --------------- @@ -145,8 +175,5 @@ your commit using: This happens because `git rebase` will retrieve code for a different version of Swift in the development stream, but the start scripts under `/usr/local/bin` have -not been updated. The solution is to execute the following command under the swift -directory (which contains `setup.py`): - - ``sudo python setup.py develop`` - +not been updated. The solution is to follow the steps described in the +:ref:`post-rebase-instructions` section. From eaa006464cb98a2f36809edfa13bd3dcaebc9952 Mon Sep 17 00:00:00 2001 From: John Dickinson Date: Mon, 17 Aug 2015 22:13:42 -0700 Subject: [PATCH 38/70] move global statement up a few lines Change-Id: I190d2d530c6c0525d988cc88b0965b48a291fffb --- test/unit/proxy/test_server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 153e45c0c0..26903d0a92 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -8523,6 +8523,7 @@ class TestSwiftInfo(unittest.TestCase): class TestSocketObjectVersions(unittest.TestCase): def setUp(self): + global _test_sockets self.prolis = prolis = listen(('localhost', 0)) self._orig_prolis = _test_sockets[0] allowed_headers = ', '.join([ @@ -8541,7 +8542,6 @@ class TestSocketObjectVersions(unittest.TestCase): self.coro = spawn(wsgi.server, prolis, prosrv, NullLogger()) # replace global prosrv with one that's filtered with version # middleware - global _test_sockets self.sockets = list(_test_sockets) self.sockets[0] = prolis _test_sockets = tuple(self.sockets) From d6267c1f95a7fcd2b0e4f15d5efc6bcfe2114de0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Contini?= Date: Wed, 12 Aug 2015 15:00:45 -0400 Subject: [PATCH 39/70] Keep user id and project id in subrequests env Keep HTTP_X_USER_ID and HTTP_X_PROJECT_ID to be available as user_id and project_id in storage.objects.outgoing.bytes in ceilometer when downloading a multipart object. Change-Id: I0f4734f021e5d6e84d48ed9bebeb321d7a9590ad Closes-Bug: #1477283 --- swift/common/wsgi.py | 2 +- test/unit/common/test_wsgi.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index fbb3d5d009..3a75e7197f 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -1090,7 +1090,7 @@ def make_env(env, method=None, path=None, agent='Swift', query_string=None, 'HTTP_ORIGIN', 'HTTP_ACCESS_CONTROL_REQUEST_METHOD', 'SERVER_PROTOCOL', 'swift.cache', 'swift.source', 'swift.trans_id', 'swift.authorize_override', - 'swift.authorize'): + 'swift.authorize', 'HTTP_X_USER_ID', 'HTTP_X_PROJECT_ID'): if name in env: newenv[name] = env[name] if method: diff --git a/test/unit/common/test_wsgi.py b/test/unit/common/test_wsgi.py index a165ecb5f2..7a5b03696e 100644 --- a/test/unit/common/test_wsgi.py +++ b/test/unit/common/test_wsgi.py @@ -816,6 +816,16 @@ class TestWSGI(unittest.TestCase): self.assertEquals(r.environ['SCRIPT_NAME'], '') self.assertEquals(r.environ['PATH_INFO'], '/override') + def test_make_env_keep_user_project_id(self): + oldenv = {'HTTP_X_USER_ID': '1234', 'HTTP_X_PROJECT_ID': '5678'} + newenv = wsgi.make_env(oldenv) + + self.assertTrue('HTTP_X_USER_ID' in newenv) + self.assertEquals(newenv['HTTP_X_USER_ID'], '1234') + + self.assertTrue('HTTP_X_PROJECT_ID' in newenv) + self.assertEquals(newenv['HTTP_X_PROJECT_ID'], '5678') + class TestServersPerPortStrategy(unittest.TestCase): def setUp(self): From 8aaacbf88d9ea4432e58dd261b98ff95d7c69be6 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Mon, 27 Jul 2015 18:55:01 +0200 Subject: [PATCH 40/70] pep8: Fix hacking H232 warnings (octal) Fix warnings "H232: Python 3.x incompatible octal 000001234 should be written as 0o1234". Change-Id: I9a7bbb034357783885ac3e18fe1e9e32a5951616 --- test/unit/container/test_reconciler.py | 4 ++-- tox.ini | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/test/unit/container/test_reconciler.py b/test/unit/container/test_reconciler.py index 00aba94459..1b41227608 100644 --- a/test/unit/container/test_reconciler.py +++ b/test/unit/container/test_reconciler.py @@ -207,8 +207,8 @@ class TestReconcilerUtils(unittest.TestCase): self.assertEqual(got['account'], 'AUTH_bob') self.assertEqual(got['container'], 'con') self.assertEqual(got['obj'], 'obj') - self.assertEqual(got['q_ts'], 0000001234.20190) - self.assertEqual(got['q_record'], 0000001234.20192) + self.assertEqual(got['q_ts'], 1234.20190) + self.assertEqual(got['q_record'], 1234.20192) self.assertEqual(got['q_op'], 'PUT') # negative test diff --git a/tox.ini b/tox.ini index 249855208d..a98035cd44 100644 --- a/tox.ini +++ b/tox.ini @@ -59,7 +59,6 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate # F812: list comprehension redefines ... # H101: Use TODO(NAME) # H202: assertRaises Exception too broad -# H232: Python 3.x incompatible octal 000001234 should be written as 0o1234 # H233: Python 3.x incompatible use of print operator # H234: assertEquals is deprecated, use assertEqual # H235: assert_ is deprecated, use assertTrue @@ -72,6 +71,6 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate # H501: Do not use self.__dict__ for string formatting # H702: Formatting operation should be outside of localization method call # H703: Multiple positional placeholders -ignore = F402,F812,H101,H202,H232,H233,H234,H235,H301,H306,H401,H403,H404,H405,H501,H702,H703 +ignore = F402,F812,H101,H202,H233,H234,H235,H301,H306,H401,H403,H404,H405,H501,H702,H703 exclude = .venv,.tox,dist,doc,*egg show-source = True From 7bea148d2fe22daa9d1fbbc5cd16ebd390b64b1b Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Sun, 16 Aug 2015 10:34:26 +0200 Subject: [PATCH 41/70] pep8: replace deprecated calls to assert_() The TestCase.assert_() has been deprecated in Python 2.7. Replace it with assertTrue() or even better methods (assertIn, assertNotIn, assertIsInstance) which provide better error messages. Change-Id: I21c730351470031a2dabe5238693095eabdb8964 --- test/functional/tests.py | 8 ++++---- test/unit/common/test_wsgi.py | 2 +- test/unit/proxy/test_server.py | 14 +++++++------- tox.ini | 3 +-- 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/test/functional/tests.py b/test/functional/tests.py index 56e7fdc6bd..8c64d8d12d 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -441,14 +441,14 @@ class TestContainer(Base): def testListDelimiter(self): cont = self.env.account.container(Utils.create_name()) - self.assert_(cont.create()) + self.assertTrue(cont.create()) delimiter = '-' files = ['test', delimiter.join(['test', 'bar']), delimiter.join(['test', 'foo'])] for f in files: file_item = cont.file(f) - self.assert_(file_item.write_random()) + self.assertTrue(file_item.write_random()) results = cont.files() results = cont.files(parms={'delimiter': delimiter}) @@ -456,13 +456,13 @@ class TestContainer(Base): def testListDelimiterAndPrefix(self): cont = self.env.account.container(Utils.create_name()) - self.assert_(cont.create()) + self.assertTrue(cont.create()) delimiter = 'a' files = ['bar', 'bazar'] for f in files: file_item = cont.file(f) - self.assert_(file_item.write_random()) + self.assertTrue(file_item.write_random()) results = cont.files(parms={'delimiter': delimiter, 'prefix': 'ba'}) self.assertEqual(results, ['bar', 'baza']) diff --git a/test/unit/common/test_wsgi.py b/test/unit/common/test_wsgi.py index 7a5b03696e..1e786e273c 100644 --- a/test/unit/common/test_wsgi.py +++ b/test/unit/common/test_wsgi.py @@ -144,7 +144,7 @@ class TestWSGI(unittest.TestCase): app = app.app expected = \ swift.common.middleware.versioned_writes.VersionedWritesMiddleware - self.assert_(isinstance(app, expected)) + self.assertIsInstance(app, expected) app = app.app expected = swift.proxy.server.Application diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 26903d0a92..04eb3d6eb7 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -8597,7 +8597,7 @@ class TestSocketObjectVersions(unittest.TestCase): headers, body = get_container() exp = 'HTTP/1.1 2' # 2xx series response self.assertEqual(headers[:len(exp)], exp) - self.assert_('X-Versions-Location: %s' % vc in headers) + self.assertIn('X-Versions-Location: %s' % vc, headers) def put_version_container(): sock = connect_tcp(('localhost', prolis.getsockname()[1])) @@ -8654,8 +8654,8 @@ class TestSocketObjectVersions(unittest.TestCase): headers, body = get() exp = 'HTTP/1.1 200' self.assertEqual(headers[:len(exp)], exp) - self.assert_('Content-Type: text/jibberish%s' % version in headers) - self.assert_('X-Object-Meta-Foo: barbaz' not in headers) + self.assertIn('Content-Type: text/jibberish%s' % version, headers) + self.assertNotIn('X-Object-Meta-Foo: barbaz', headers) self.assertEqual(body, '%05d' % version) def get_version_container(): @@ -8738,8 +8738,8 @@ class TestSocketObjectVersions(unittest.TestCase): self.assertEqual(headers[:len(exp)], exp) headers, body = get() - self.assert_('Content-Type: foo/bar' in headers) - self.assert_('X-Object-Meta-Bar: foo' in headers) + self.assertIn('Content-Type: foo/bar', headers) + self.assertIn('X-Object-Meta-Bar: foo', headers) self.assertEqual(body, '%05d' % version) # check container listing @@ -8758,8 +8758,8 @@ class TestSocketObjectVersions(unittest.TestCase): headers, body = get() exp = 'HTTP/1.1 200' self.assertEqual(headers[:len(exp)], exp) - self.assert_('Content-Type: text/jibberish%s' % (segment - 1) - in headers) + self.assertIn('Content-Type: text/jibberish%s' % (segment - 1), + headers) self.assertEqual(body, '%05d' % (segment - 1)) # Ensure we have the right number of versions saved sock = connect_tcp(('localhost', prolis.getsockname()[1])) diff --git a/tox.ini b/tox.ini index a98035cd44..50236788c4 100644 --- a/tox.ini +++ b/tox.ini @@ -61,7 +61,6 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate # H202: assertRaises Exception too broad # H233: Python 3.x incompatible use of print operator # H234: assertEquals is deprecated, use assertEqual -# H235: assert_ is deprecated, use assertTrue # H301: one import per line # H306: imports not in alphabetical order (time, os) # H401: docstring should not start with a space @@ -71,6 +70,6 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate # H501: Do not use self.__dict__ for string formatting # H702: Formatting operation should be outside of localization method call # H703: Multiple positional placeholders -ignore = F402,F812,H101,H202,H233,H234,H235,H301,H306,H401,H403,H404,H405,H501,H702,H703 +ignore = F402,F812,H101,H202,H233,H234,H301,H306,H401,H403,H404,H405,H501,H702,H703 exclude = .venv,.tox,dist,doc,*egg show-source = True From c7eb589c6ca2fe4f2950cae22a18513e4dec6c46 Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Sun, 16 Aug 2015 11:06:52 +0200 Subject: [PATCH 42/70] pep8: Fix usage of the l10n _('...') function Fix the pep8 warning H702 "Formatting operation should be outside of localization method call". For the logger, pass parameters as indexed parameters instead of using the string str%args operator, the logger is more reliable in case of formatting error. Change-Id: If418bc155f6a6c0a00f63e3d87ebe4addf4aae55 --- swift/common/manager.py | 6 +++--- swift/obj/auditor.py | 4 ++-- test/unit/obj/test_auditor.py | 8 +++++--- tox.ini | 3 +-- 4 files changed, 11 insertions(+), 10 deletions(-) diff --git a/swift/common/manager.py b/swift/common/manager.py index eb1e93ae20..ca1bc3ca26 100644 --- a/swift/common/manager.py +++ b/swift/common/manager.py @@ -462,10 +462,10 @@ class Server(object): # maybe there's a config file(s) out there, but I couldn't find it! if not kwargs.get('quiet'): if number: - print(_('Unable to locate config number %s for %s' % ( - number, self.server))) + print(_('Unable to locate config number %s for %s') + % (number, self.server)) else: - print(_('Unable to locate config for %s' % (self.server))) + print(_('Unable to locate config for %s') % self.server) if kwargs.get('verbose') and not kwargs.get('quiet'): if found_conf_files: print(_('Found configs:')) diff --git a/swift/obj/auditor.py b/swift/obj/auditor.py index 1e218a2a1b..4875bb2520 100644 --- a/swift/obj/auditor.py +++ b/swift/obj/auditor.py @@ -331,7 +331,7 @@ class ObjectAuditor(Daemon): try: self.audit_loop(parent, zbo_fps, **kwargs) except (Exception, Timeout) as err: - self.logger.exception(_('ERROR auditing: %s' % err)) + self.logger.exception(_('ERROR auditing: %s'), err) self._sleep() def run_once(self, *args, **kwargs): @@ -352,4 +352,4 @@ class ObjectAuditor(Daemon): self.audit_loop(parent, zbo_fps, override_devices=override_devices, **kwargs) except (Exception, Timeout) as err: - self.logger.exception(_('ERROR auditing: %s' % err)) + self.logger.exception(_('ERROR auditing: %s'), err) diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index eb36edf500..2429879e5d 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -546,6 +546,8 @@ class TestAuditor(unittest.TestCase): class Bogus(Exception): pass + loop_error = Bogus('exception') + class ObjectAuditorMock(object): check_args = () check_kwargs = {} @@ -568,7 +570,7 @@ class TestAuditor(unittest.TestCase): def mock_audit_loop_error(self, parent, zbo_fps, override_devices=None, **kwargs): - raise Bogus('exception') + raise loop_error def mock_fork(self): self.fork_called += 1 @@ -602,11 +604,11 @@ class TestAuditor(unittest.TestCase): my_auditor._sleep = mocker.mock_sleep_stop my_auditor.run_once(zero_byte_fps=50) my_auditor.logger.exception.assert_called_once_with( - 'ERROR auditing: exception') + 'ERROR auditing: %s', loop_error) my_auditor.logger.exception.reset_mock() self.assertRaises(StopForever, my_auditor.run_forever) my_auditor.logger.exception.assert_called_once_with( - 'ERROR auditing: exception') + 'ERROR auditing: %s', loop_error) my_auditor.audit_loop = real_audit_loop self.assertRaises(StopForever, diff --git a/tox.ini b/tox.ini index 50236788c4..53d3bf78a8 100644 --- a/tox.ini +++ b/tox.ini @@ -68,8 +68,7 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate # H404: multi line docstring should start without a leading new line # H405: multi line docstring summary not separated with an empty line # H501: Do not use self.__dict__ for string formatting -# H702: Formatting operation should be outside of localization method call # H703: Multiple positional placeholders -ignore = F402,F812,H101,H202,H233,H234,H301,H306,H401,H403,H404,H405,H501,H702,H703 +ignore = F402,F812,H101,H202,H233,H234,H301,H306,H401,H403,H404,H405,H501,H703 exclude = .venv,.tox,dist,doc,*egg show-source = True From 183508710d011ec30ebba2186c58fd28d449e4bb Mon Sep 17 00:00:00 2001 From: Victor Stinner Date: Sun, 16 Aug 2015 11:18:49 +0200 Subject: [PATCH 43/70] pep8: Don't override '_' symbol Fix pep8 warning F402 "import '_' from line 51 shadowed by loop variable". Change-Id: I139060ff5d298a8b0f0f8e529a5737478fb5daf5 --- swift/common/memcached.py | 2 +- tox.ini | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/swift/common/memcached.py b/swift/common/memcached.py index d367b80c89..f3d0eae5d6 100644 --- a/swift/common/memcached.py +++ b/swift/common/memcached.py @@ -443,7 +443,7 @@ class MemcacheRing(object): with Timeout(self._io_timeout): sock.sendall(msg) # Wait for the set to complete - for _ in range(len(mapping)): + for line in range(len(mapping)): fp.readline() self._return_conn(server, fp, sock) return diff --git a/tox.ini b/tox.ini index 53d3bf78a8..14fbe0b180 100644 --- a/tox.ini +++ b/tox.ini @@ -55,7 +55,6 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate [flake8] # it's not a bug that we aren't using all of hacking, ignore: -# F402: import '_' from line 51 shadowed by loop variable # F812: list comprehension redefines ... # H101: Use TODO(NAME) # H202: assertRaises Exception too broad @@ -69,6 +68,6 @@ commands = bandit -c bandit.yaml -r swift bin -n 5 -p gate # H405: multi line docstring summary not separated with an empty line # H501: Do not use self.__dict__ for string formatting # H703: Multiple positional placeholders -ignore = F402,F812,H101,H202,H233,H234,H301,H306,H401,H403,H404,H405,H501,H703 +ignore = F812,H101,H202,H233,H234,H301,H306,H401,H403,H404,H405,H501,H703 exclude = .venv,.tox,dist,doc,*egg show-source = True From 47dc31940d64b0c87e99cc73eddcdd01dd8b3ad4 Mon Sep 17 00:00:00 2001 From: Samuel Merritt Date: Wed, 19 Aug 2015 10:42:28 -0700 Subject: [PATCH 44/70] Add OpenStack release names to changelog I can never remember the mapping of real version numbers to the OpenStack names. Change-Id: Ib7c5ae4ff2a33018364698eb34f8df1622719fab --- CHANGELOG | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index 1e7bd5ff36..3625a077ed 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,4 +1,4 @@ -swift (2.3.0) +swift (2.3.0, OpenStack Kilo) * Erasure Code support (beta) @@ -164,7 +164,7 @@ swift (2.2.1) * Various other minor bug fixes and improvements. -swift (2.2.0) +swift (2.2.0, OpenStack Juno) * Added support for Keystone v3 auth. @@ -338,7 +338,7 @@ swift (2.0.0) * Various other minor bug fixes and improvements -swift (1.13.1) +swift (1.13.1, OpenStack Icehouse) * Change the behavior of CORS responses to better match the spec @@ -605,7 +605,7 @@ swift (1.11.0) * Various other bug fixes and improvements -swift (1.10.0) +swift (1.10.0, OpenStack Havana) * Added support for pooling memcache connections @@ -776,7 +776,7 @@ swift (1.9.0) * Various other minor bug fixes and improvements -swift (1.8.0) +swift (1.8.0, OpenStack Grizzly) * Make rings' replica count adjustable @@ -947,7 +947,7 @@ swift (1.7.5) * Various other minor bug fixes and improvements -swift (1.7.4) +swift (1.7.4, OpenStack Folsom) * Fix issue where early client disconnects may have caused a memory leak @@ -1153,7 +1153,7 @@ swift (1.5.0) * Various other minor bug fixes and improvements -swift (1.4.8) +swift (1.4.8, OpenStack Essex) * Added optional max_containers_per_account restriction @@ -1296,7 +1296,7 @@ swift (1.4.4) * Query only specific zone via swift-recon. -swift (1.4.3) +swift (1.4.3, OpenStack Diablo) * Additional quarantine catching code. @@ -1421,3 +1421,13 @@ swift (1.4.0) * Stats uploaders now allow overrides for source_filename_pattern and new_log_cutoff values. + +--- + +Changelog entries for previous versions are incomplete + +swift (1.3.0, OpenStack Cactus) + +swift (1.2.0, OpenStack Bexar) + +swift (1.0.0, OpenStack Austin) From 923238aa1ba0963c414ba5321cd3910b2910f4ed Mon Sep 17 00:00:00 2001 From: janonymous Date: Tue, 28 Jul 2015 20:35:25 +0530 Subject: [PATCH 45/70] test/(functional/probe):Replace python print operator with print function (pep H233, py33) 'print' function is compatible with 2.x and 3.x python versions Link : https://www.python.org/dev/peps/pep-3105/ Python 2.6 has a __future__ import that removes print as language syntax, letting you use the functional form instead Change-Id: I416c6ac21ccbfb91ec328ffb1ed21e492ef52d58 --- test/__init__.py | 17 +++++++---------- test/functional/__init__.py | 33 +++++++++++++++++---------------- test/probe/brain.py | 6 +++--- test/probe/common.py | 18 +++++++++--------- 4 files changed, 36 insertions(+), 38 deletions(-) diff --git a/test/__init__.py b/test/__init__.py index 3bd25b1407..b3ebefe70c 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -15,7 +15,7 @@ # See http://code.google.com/p/python-nose/issues/detail?id=373 # The code below enables nosetests to work with i18n _() blocks - +from __future__ import print_function import sys import os try: @@ -63,15 +63,12 @@ def get_config(section_name=None, defaults=None): config = readconf(config_file, section_name) except SystemExit: if not os.path.exists(config_file): - print >>sys.stderr, \ - 'Unable to read test config %s - file not found' \ - % config_file + print('Unable to read test config %s - file not found' + % config_file, file=sys.stderr) elif not os.access(config_file, os.R_OK): - print >>sys.stderr, \ - 'Unable to read test config %s - permission denied' \ - % config_file + print('Unable to read test config %s - permission denied' + % config_file, file=sys.stderr) else: - print >>sys.stderr, \ - 'Unable to read test config %s - section %s not found' \ - % (config_file, section_name) + print('Unable to read test config %s - section %s not found' + % (config_file, section_name), file=sys.stderr) return config diff --git a/test/functional/__init__.py b/test/functional/__init__.py index 8f16f5ac23..f07d162691 100644 --- a/test/functional/__init__.py +++ b/test/functional/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import print_function import mock import os import sys @@ -128,7 +129,7 @@ class InProcessException(BaseException): def _info(msg): - print >> sys.stderr, msg + print(msg, file=sys.stderr) def _debug(msg): @@ -501,7 +502,7 @@ def get_cluster_info(): # Most likely the swift cluster has "expose_info = false" set # in its proxy-server.conf file, so we'll just do the best we # can. - print >>sys.stderr, "** Swift Cluster not exposing /info **" + print("** Swift Cluster not exposing /info **", file=sys.stderr) # Finally, we'll allow any constraint present in the swift-constraints # section of test.conf to override everything. Note that only those @@ -513,8 +514,8 @@ def get_cluster_info(): except KeyError: pass except ValueError: - print >>sys.stderr, "Invalid constraint value: %s = %s" % ( - k, test_constraints[k]) + print("Invalid constraint value: %s = %s" % ( + k, test_constraints[k]), file=sys.stderr) eff_constraints.update(test_constraints) # Just make it look like these constraints were loaded from a /info call, @@ -564,8 +565,8 @@ def setup_package(): in_process_setup(the_object_server=( mem_object_server if in_mem_obj else object_server)) except InProcessException as exc: - print >> sys.stderr, ('Exception during in-process setup: %s' - % str(exc)) + print(('Exception during in-process setup: %s' + % str(exc)), file=sys.stderr) raise global web_front_end @@ -674,20 +675,19 @@ def setup_package(): global skip skip = not all([swift_test_auth, swift_test_user[0], swift_test_key[0]]) if skip: - print >>sys.stderr, 'SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG' + print('SKIPPING FUNCTIONAL TESTS DUE TO NO CONFIG', file=sys.stderr) global skip2 skip2 = not all([not skip, swift_test_user[1], swift_test_key[1]]) if not skip and skip2: - print >>sys.stderr, \ - 'SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS' \ - ' DUE TO NO CONFIG FOR THEM' + print('SKIPPING SECOND ACCOUNT FUNCTIONAL TESTS ' + 'DUE TO NO CONFIG FOR THEM', file=sys.stderr) global skip3 skip3 = not all([not skip, swift_test_user[2], swift_test_key[2]]) if not skip and skip3: - print >>sys.stderr, \ - 'SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS DUE TO NO CONFIG FOR THEM' + print('SKIPPING THIRD ACCOUNT FUNCTIONAL TESTS' + 'DUE TO NO CONFIG FOR THEM', file=sys.stderr) global skip_if_not_v3 skip_if_not_v3 = (swift_test_auth_version != '3' @@ -695,16 +695,17 @@ def setup_package(): swift_test_user[3], swift_test_key[3]])) if not skip and skip_if_not_v3: - print >>sys.stderr, \ - 'SKIPPING FUNCTIONAL TESTS SPECIFIC TO AUTH VERSION 3' + print('SKIPPING FUNCTIONAL TESTS SPECIFIC TO AUTH VERSION 3', + file=sys.stderr) global skip_service_tokens skip_service_tokens = not all([not skip, swift_test_user[4], swift_test_key[4], swift_test_tenant[4], swift_test_service_prefix]) if not skip and skip_service_tokens: - print >>sys.stderr, \ - 'SKIPPING FUNCTIONAL TESTS SPECIFIC TO SERVICE TOKENS' + print( + 'SKIPPING FUNCTIONAL TESTS SPECIFIC TO SERVICE TOKENS', + file=sys.stderr) if policy_specified: policies = FunctionalStoragePolicyCollection.from_info() diff --git a/test/probe/brain.py b/test/probe/brain.py index bec97c78bc..9ec907c0a2 100644 --- a/test/probe/brain.py +++ b/test/probe/brain.py @@ -11,7 +11,7 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. - +from __future__ import print_function import sys import itertools import uuid @@ -226,8 +226,8 @@ def main(): try: brain.run(command, *args) except ClientException as e: - print '**WARNING**: %s raised %s' % (command, e) - print 'STATUS'.join(['*' * 25] * 2) + print('**WARNING**: %s raised %s' % (command, e)) + print('STATUS'.join(['*' * 25] * 2)) brain.servers.status() sys.exit() diff --git a/test/probe/common.py b/test/probe/common.py index 07977f5cd7..1479ba9ddc 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - +from __future__ import print_function import os from subprocess import Popen, PIPE import sys @@ -86,9 +86,9 @@ def check_server(ipport, ipport2server, pids, timeout=CHECK_SERVER_TIMEOUT): break except Exception as err: if time() > try_until: - print err - print 'Giving up on %s:%s after %s seconds.' % ( - server, ipport, timeout) + print(err) + print('Giving up on %s:%s after %s seconds.' % ( + server, ipport, timeout)) raise err sleep(0.1) else: @@ -102,8 +102,8 @@ def check_server(ipport, ipport2server, pids, timeout=CHECK_SERVER_TIMEOUT): return url, token, account except Exception as err: if time() > try_until: - print err - print 'Giving up on proxy:8080 after 30 seconds.' + print(err) + print('Giving up on proxy:8080 after 30 seconds.') raise err sleep(0.1) return None @@ -258,7 +258,7 @@ def get_policy(**kwargs): def resetswift(): p = Popen("resetswift 2>&1", shell=True, stdout=PIPE) stdout, _stderr = p.communicate() - print stdout + print(stdout) Manager(['all']).stop() @@ -407,11 +407,11 @@ if __name__ == "__main__": force_validate=True) except SkipTest as err: sys.exit('%s ERROR: %s' % (server, err)) - print '%s OK' % server + print('%s OK' % server) for policy in POLICIES: try: get_ring(policy.ring_name, 3, 4, server='object', force_validate=True) except SkipTest as err: sys.exit('object ERROR (%s): %s' % (policy.name, err)) - print 'object OK (%s)' % policy.name + print('object OK (%s)' % policy.name) From 4b6836b3bc465e9c2c6d2974c193a49b973c4e4e Mon Sep 17 00:00:00 2001 From: Akihito Takai Date: Thu, 20 Aug 2015 18:40:52 +0900 Subject: [PATCH 46/70] Fix the comment in [1]. (line 259) parameter(op) of object_update method is 'PUT' or 'DELETE' not 'POST' or 'DELETE'. [1]: swift/obj/updater.py Change-Id: I876a620ba8e09e69fba7156b12e69445c229e160 --- swift/obj/updater.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/obj/updater.py b/swift/obj/updater.py index a07a72be9b..d588de72b1 100644 --- a/swift/obj/updater.py +++ b/swift/obj/updater.py @@ -256,7 +256,7 @@ class ObjectUpdater(Daemon): :param node: node dictionary from the container ring :param part: partition that holds the container - :param op: operation performed (ex: 'POST' or 'DELETE') + :param op: operation performed (ex: 'PUT' or 'DELETE') :param obj: object name being updated :param headers_out: headers to send with the update """ From be66aa8e76d2994eb8a0a944e0dea26ffbb3c159 Mon Sep 17 00:00:00 2001 From: Samuel Merritt Date: Thu, 20 Aug 2015 13:24:38 -0700 Subject: [PATCH 47/70] Fix 500 for bogus Range request to 0-byte object. The proxy was trying to pop a byterange off a Range header that didn't contain syntactically-valid byteranges. This worked about as well as you'd expect. Now we detect the bogus value and remove the header entirely. Change-Id: I24b92f900d33ec79880c7db2870378489d5a6810 --- swift/proxy/controllers/base.py | 7 +++++- test/unit/proxy/test_server.py | 40 +++++++++++++++++++++++++++++++++ 2 files changed, 46 insertions(+), 1 deletion(-) diff --git a/swift/proxy/controllers/base.py b/swift/proxy/controllers/base.py index 554469cc06..70940f9c16 100644 --- a/swift/proxy/controllers/base.py +++ b/swift/proxy/controllers/base.py @@ -696,7 +696,12 @@ class ResumingGetter(object): If we have no Range header, this is a no-op. """ if 'Range' in self.backend_headers: - req_range = Range(self.backend_headers['Range']) + try: + req_range = Range(self.backend_headers['Range']) + except ValueError: + # there's a Range header, but it's garbage, so get rid of it + self.backend_headers.pop('Range') + return begin, end = req_range.ranges.pop(0) if len(req_range.ranges) > 0: self.backend_headers['Range'] = str(req_range) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 04eb3d6eb7..539d6ef102 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1457,6 +1457,46 @@ class TestObjectController(unittest.TestCase): 'bytes 4123-4523/5800') self.assertEqual(second_range_body, obj[4123:4524]) + @unpatch_policies + def test_GET_bad_range_zero_byte(self): + prolis = _test_sockets[0] + prosrv = _test_servers[0] + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + + path = '/v1/a/c/o.zerobyte' + fd.write('PUT %s HTTP/1.1\r\n' + 'Host: localhost\r\n' + 'Connection: close\r\n' + 'X-Storage-Token: t\r\n' + 'Content-Length: 0\r\n' + 'Content-Type: application/octet-stream\r\n' + '\r\n' % (path,)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + # bad byte-range + req = Request.blank( + path, + environ={'REQUEST_METHOD': 'GET'}, + headers={'Content-Type': 'application/octet-stream', + 'Range': 'bytes=spaghetti-carbonara'}) + res = req.get_response(prosrv) + self.assertEqual(res.status_int, 200) + self.assertEqual(res.body, '') + + # not a byte-range + req = Request.blank( + path, + environ={'REQUEST_METHOD': 'GET'}, + headers={'Content-Type': 'application/octet-stream', + 'Range': 'Kotta'}) + res = req.get_response(prosrv) + self.assertEqual(res.status_int, 200) + self.assertEqual(res.body, '') + @unpatch_policies def test_GET_ranges_resuming(self): prolis = _test_sockets[0] From ab163702de733be39ba5e7024c7a8dd4c86bc29b Mon Sep 17 00:00:00 2001 From: Pradeep Kumar Singh Date: Tue, 14 Jul 2015 10:03:18 +0530 Subject: [PATCH 48/70] Emit warning log in object replicator When the object-replicator encounters handoffs_first and handoff_delete options as enabled it should emit a log warning indicating that it should be changed back to the default before the next "normal" rebalance. Closes-Bug: #1457262 Change-Id: If9dc2796c18ed3cf13da920831e2d5c2ae9f12a0 --- swift/obj/replicator.py | 8 ++++++-- test/unit/obj/test_replicator.py | 17 +++++++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 70b55046cb..08d50f1e1f 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -53,13 +53,13 @@ class ObjectReplicator(Daemon): caller to do this in a loop. """ - def __init__(self, conf): + def __init__(self, conf, logger=None): """ :param conf: configuration object obtained from ConfigParser :param logger: logging object """ self.conf = conf - self.logger = get_logger(conf, log_route='object-replicator') + self.logger = logger or get_logger(conf, log_route='object-replicator') self.devices_dir = conf.get('devices', '/srv/node') self.mount_check = config_true_value(conf.get('mount_check', 'true')) self.vm_test_mode = config_true_value(conf.get('vm_test_mode', 'no')) @@ -99,6 +99,10 @@ class ObjectReplicator(Daemon): False)) self.handoff_delete = config_auto_int_value( conf.get('handoff_delete', 'auto'), 0) + if any((self.handoff_delete, self.handoffs_first)): + self.logger.warn('handoffs_first and handoff_delete should be' + ' changed back to the default before the next' + ' normal rebalance') self._diskfile_mgr = DiskFileManager(conf, self.logger) # Just exists for doc anchor point diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index 526ff0b7f2..2734f247a5 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -184,6 +184,23 @@ class TestObjectReplicator(unittest.TestCase): def tearDown(self): rmtree(self.testdir, ignore_errors=1) + def test_handoff_replication_setting_warnings(self): + conf = {'handoffs_first': 'true'} + replicator = object_replicator.ObjectReplicator( + conf, logger=self.logger) + self.assertTrue(replicator.handoffs_first) + log_message = 'handoffs_first and handoff_delete should'\ + ' be changed back to the default before the'\ + ' next normal rebalance' + expected = [log_message] + self.assertEqual(self.logger.get_lines_for_level('warning'), expected) + conf = {'handoff_delete': '2'} + replicator = object_replicator.ObjectReplicator( + conf, logger=self.logger) + self.assertEqual(replicator.handoff_delete, 2) + expected.append(log_message) + self.assertEqual(self.logger.get_lines_for_level('warning'), expected) + def _write_disk_data(self, disk_name): os.mkdir(os.path.join(self.devices, disk_name)) objects = os.path.join(self.devices, disk_name, From a1ceab5a92dfdafd5abffed272ac2cece5302f32 Mon Sep 17 00:00:00 2001 From: Carlos Cavanna Date: Fri, 21 Aug 2015 14:14:31 -0400 Subject: [PATCH 49/70] New troubleshooting case in documentation. Added a new troubleshooting case for the "First Contribution to Swift" documentation page. Change-Id: I182ba702b49b28409fe56becae93326e5f63dcd0 --- doc/source/first_contribution_swift.rst | 27 ++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/doc/source/first_contribution_swift.rst b/doc/source/first_contribution_swift.rst index b4aa948ed7..a1e3930cb6 100644 --- a/doc/source/first_contribution_swift.rst +++ b/doc/source/first_contribution_swift.rst @@ -152,7 +152,7 @@ until the next rebase. Troubleshooting --------------- -You may run into the following error when starting Swift if you rebase +You may run into the following errors when starting Swift if you rebase your commit using: ``git rebase`` @@ -173,6 +173,31 @@ your commit using: pkg_resources.DistributionNotFound: swift==2.3.1.devXXX (where XXX represents a dev version of Swift). +.. code-block:: python + + Traceback (most recent call last): + File "/usr/local/bin/swift-proxy-server", line 10, in + execfile(__file__) + File "/home/swift/swift/bin/swift-proxy-server", line 23, in + sys.exit(run_wsgi(conf_file, 'proxy-server', **options)) + File "/home/swift/swift/swift/common/wsgi.py", line 888, in run_wsgi + loadapp(conf_path, global_conf=global_conf) + File "/home/swift/swift/swift/common/wsgi.py", line 390, in loadapp + func(PipelineWrapper(ctx)) + File "/home/swift/swift/swift/proxy/server.py", line 602, in modify_wsgi_pipeline + ctx = pipe.create_filter(filter_name) + File "/home/swift/swift/swift/common/wsgi.py", line 329, in create_filter + global_conf=self.context.global_conf) + File "/usr/lib/python2.7/dist-packages/paste/deploy/loadwsgi.py", line 296, in loadcontext + global_conf=global_conf) + File "/usr/lib/python2.7/dist-packages/paste/deploy/loadwsgi.py", line 328, in _loadegg + return loader.get_context(object_type, name, global_conf) + File "/usr/lib/python2.7/dist-packages/paste/deploy/loadwsgi.py", line 620, in get_context + object_type, name=name) + File "/usr/lib/python2.7/dist-packages/paste/deploy/loadwsgi.py", line 659, in find_egg_entry_point + for prot in protocol_options] or '(no entry points)')))) + LookupError: Entry point 'versioned_writes' not found in egg 'swift' (dir: /home/swift/swift; protocols: paste.filter_factory, paste.filter_app_factory; entry_points: ) + This happens because `git rebase` will retrieve code for a different version of Swift in the development stream, but the start scripts under `/usr/local/bin` have not been updated. The solution is to follow the steps described in the From 8b1df9918bf6c22b49b161a402199b532bfd8266 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Fri, 21 Aug 2015 14:04:46 -0700 Subject: [PATCH 50/70] Minor cleanup handoff mode warnings * message is a little clearer * test is a little stronger Change-Id: I745cde7f4a46dafc80ab42d39e6ccc92aa3b746e --- swift/obj/replicator.py | 7 +++--- test/unit/__init__.py | 2 ++ test/unit/obj/test_replicator.py | 41 ++++++++++++++++++++------------ 3 files changed, 32 insertions(+), 18 deletions(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 08d50f1e1f..0cecc3bd54 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -100,9 +100,10 @@ class ObjectReplicator(Daemon): self.handoff_delete = config_auto_int_value( conf.get('handoff_delete', 'auto'), 0) if any((self.handoff_delete, self.handoffs_first)): - self.logger.warn('handoffs_first and handoff_delete should be' - ' changed back to the default before the next' - ' normal rebalance') + self.logger.warn('Handoff only mode is not intended for normal ' + 'operation, please disable handoffs_first and ' + 'handoff_delete before the next ' + 'normal rebalance') self._diskfile_mgr = DiskFileManager(conf, self.logger) # Just exists for doc anchor point diff --git a/test/unit/__init__.py b/test/unit/__init__.py index a6c5001c3c..ae9723a346 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -510,6 +510,8 @@ class FakeLogger(logging.Logger, object): self.lines_dict = {'critical': [], 'error': [], 'info': [], 'warning': [], 'debug': [], 'notice': []} + clear = _clear # this is a public interface + def get_lines_for_level(self, level): if level not in self.lines_dict: raise KeyError( diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index 2734f247a5..2919eb26cb 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -185,21 +185,32 @@ class TestObjectReplicator(unittest.TestCase): rmtree(self.testdir, ignore_errors=1) def test_handoff_replication_setting_warnings(self): - conf = {'handoffs_first': 'true'} - replicator = object_replicator.ObjectReplicator( - conf, logger=self.logger) - self.assertTrue(replicator.handoffs_first) - log_message = 'handoffs_first and handoff_delete should'\ - ' be changed back to the default before the'\ - ' next normal rebalance' - expected = [log_message] - self.assertEqual(self.logger.get_lines_for_level('warning'), expected) - conf = {'handoff_delete': '2'} - replicator = object_replicator.ObjectReplicator( - conf, logger=self.logger) - self.assertEqual(replicator.handoff_delete, 2) - expected.append(log_message) - self.assertEqual(self.logger.get_lines_for_level('warning'), expected) + conf_tests = [ + # (config, expected_warning) + ({}, False), + ({'handoff_delete': 'auto'}, False), + ({'handoffs_first': 'no'}, False), + ({'handoff_delete': '2'}, True), + ({'handoffs_first': 'yes'}, True), + ({'handoff_delete': '1', 'handoffs_first': 'yes'}, True), + ] + log_message = 'Handoff only mode is not intended for normal ' \ + 'operation, please disable handoffs_first and ' \ + 'handoff_delete before the next normal rebalance' + for config, expected_warning in conf_tests: + self.logger.clear() + object_replicator.ObjectReplicator(config, logger=self.logger) + warning_log_lines = self.logger.get_lines_for_level('warning') + if expected_warning: + expected_log_lines = [log_message] + else: + expected_log_lines = [] + self.assertEqual(expected_log_lines, warning_log_lines, + 'expected %s != %s for config %r' % ( + expected_log_lines, + warning_log_lines, + config, + )) def _write_disk_data(self, disk_name): os.mkdir(os.path.join(self.devices, disk_name)) From 69e7424d3cb6c59341d739c3d41f42397031d96a Mon Sep 17 00:00:00 2001 From: Eran Rom Date: Tue, 28 Jul 2015 09:41:12 +0300 Subject: [PATCH 51/70] Add container sync probe test to SAIO default set SAIO Configuration and documentation changes enabling to run the container sync probe test by default Change-Id: Iccf59533d0d4fe72549d318339ab125d04dde006 Related-Bug: #1476623 --- doc/saio/swift/container-sync-realms.conf | 5 +++++ doc/saio/swift/proxy-server.conf | 6 +++++- doc/source/development_saio.rst | 4 ++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 doc/saio/swift/container-sync-realms.conf diff --git a/doc/saio/swift/container-sync-realms.conf b/doc/saio/swift/container-sync-realms.conf new file mode 100644 index 0000000000..503a71c4f1 --- /dev/null +++ b/doc/saio/swift/container-sync-realms.conf @@ -0,0 +1,5 @@ +[saio] +key = changeme +key2 = changeme +cluster_saio_endpoint = http://127.0.0.1:8080/v1/ + diff --git a/doc/saio/swift/proxy-server.conf b/doc/saio/swift/proxy-server.conf index c25e0ed90d..d9e5c95148 100644 --- a/doc/saio/swift/proxy-server.conf +++ b/doc/saio/swift/proxy-server.conf @@ -9,7 +9,7 @@ eventlet_debug = true [pipeline:main] # Yes, proxy-logging appears twice. This is so that # middleware-originated requests get logged too. -pipeline = catch_errors gatekeeper healthcheck proxy-logging cache bulk tempurl ratelimit crossdomain tempauth staticweb container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server +pipeline = catch_errors gatekeeper healthcheck proxy-logging cache bulk tempurl ratelimit crossdomain container_sync tempauth staticweb container-quotas account-quotas slo dlo versioned_writes proxy-logging proxy-server [filter:catch_errors] use = egg:swift#catch_errors @@ -35,6 +35,10 @@ use = egg:swift#dlo [filter:slo] use = egg:swift#slo +[filter:container_sync] +use = egg:swift#container_sync +current = //saio/saio_endpoint + [filter:tempurl] use = egg:swift#tempurl diff --git a/doc/source/development_saio.rst b/doc/source/development_saio.rst index e406fb02ef..0f391266ce 100644 --- a/doc/source/development_saio.rst +++ b/doc/source/development_saio.rst @@ -371,6 +371,10 @@ commands are as follows: .. literalinclude:: /../saio/swift/container-reconciler.conf + #. ``/etc/swift/container-sync-realms.conf`` + + .. literalinclude:: /../saio/swift/container-sync-realms.conf + #. ``/etc/swift/account-server/1.conf`` .. literalinclude:: /../saio/swift/account-server/1.conf From 4500ff340f021e4e5f6fd53960de03cb3c207925 Mon Sep 17 00:00:00 2001 From: Kota Tsuyuzaki Date: Wed, 13 May 2015 00:43:59 -0700 Subject: [PATCH 52/70] Fix the missing SLO state on fast-post When using fast-post and POST (i.e. metadata update) is requested to a SLO manifest files, current Swift drops the 'X-Static-Large-Object' header from the existing metadata. It results in breaking the SLO state because the manifest missing the 'X-Static-Large-Object' metadata will be maintained as a normal files. This patch fixes object-server to keep the existing 'X-Static-Large-Object' flag and then keep the SLO state. Change-Id: Ib1eb569071372c322dd105c52baeeb094003291e Closes-bug: #1453807 --- swift/obj/server.py | 6 ++++++ test/functional/tests.py | 4 ++-- test/unit/obj/test_server.py | 7 +++++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/swift/obj/server.py b/swift/obj/server.py index d16ec50f54..e0d43920c5 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -444,6 +444,11 @@ class ObjectController(BaseStorageServer): override = key.lower().replace(override_prefix, 'x-') update_headers[override] = val + def _preserve_slo_manifest(self, update_metadata, orig_metadata): + if 'X-Static-Large-Object' in orig_metadata: + update_metadata['X-Static-Large-Object'] = \ + orig_metadata['X-Static-Large-Object'] + @public @timing_stats() def POST(self, request): @@ -473,6 +478,7 @@ class ObjectController(BaseStorageServer): request=request, headers={'X-Backend-Timestamp': orig_timestamp.internal}) metadata = {'X-Timestamp': req_timestamp.internal} + self._preserve_slo_manifest(metadata, orig_metadata) metadata.update(val for val in request.headers.items() if is_user_meta('object', val[0])) for header_key in self.allowed_headers: diff --git a/test/functional/tests.py b/test/functional/tests.py index 8c64d8d12d..d72e665f60 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -2450,8 +2450,6 @@ class TestSlo(Base): self.fail("COPY didn't copy the manifest (invalid json on GET)") def _make_manifest(self): - # To avoid the bug 1453807 on fast-post, make a new manifest - # for post test. file_item = self.env.container.file("manifest-post") seg_info = self.env.seg_info file_item.write( @@ -2473,6 +2471,7 @@ class TestSlo(Base): updated = self.env.container.file("manifest-post") updated.info() updated.header_fields([('user-meta', 'x-object-meta-post')]) # sanity + updated.header_fields([('slo', 'x-static-large-object')]) updated_contents = updated.read(parms={'multipart-manifest': 'get'}) try: json.loads(updated_contents) @@ -2493,6 +2492,7 @@ class TestSlo(Base): updated.info() updated.header_fields( [('user-meta', 'x-object-meta-post')]) # sanity + updated.header_fields([('slo', 'x-static-large-object')]) updated_contents = updated.read( parms={'multipart-manifest': 'get'}) try: diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 1fb966c991..3a5119369b 100755 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -253,6 +253,7 @@ class TestObjectController(unittest.TestCase): 'X-Object-Manifest': 'c/bar', 'Content-Encoding': 'gzip', 'Content-Disposition': 'bar', + 'X-Static-Large-Object': 'True', }) req.body = 'VERIFY' resp = req.get_response(self.object_controller) @@ -263,7 +264,8 @@ class TestObjectController(unittest.TestCase): "Foo" not in resp.headers and "Content-Encoding" in resp.headers and "X-Object-Manifest" in resp.headers and - "Content-Disposition" in resp.headers) + "Content-Disposition" in resp.headers and + "X-Static-Large-Object" in resp.headers) self.assertEqual(resp.headers['Content-Type'], 'application/x-test') timestamp = normalize_timestamp(time()) @@ -282,7 +284,8 @@ class TestObjectController(unittest.TestCase): "Content-Encoding" not in resp.headers and "X-Object-Manifest" not in resp.headers and "Content-Disposition" not in resp.headers and - "X-Object-Meta-3" in resp.headers) + "X-Object-Meta-3" in resp.headers and + "X-Static-Large-Object" in resp.headers) self.assertEqual(resp.headers['Content-Type'], 'application/x-test') # Test for empty metadata From ae129bf5e8c918efd8ee66279f98ee85e7c97b24 Mon Sep 17 00:00:00 2001 From: Kota Tsuyuzaki Date: Fri, 15 May 2015 06:44:12 -0700 Subject: [PATCH 53/70] eventlet.posthook related code needn't anymore In the past, older Swift seemed to use eventlet.posthooks scheme for the logging on proxy with posthooklogger method. However, for now, Swift attaches no method to eventlet.posthooks everywhere so we don't have to maintain the posthooks anymore. Change-Id: Ie63941a202d448532d980252a4d25575f8edab9c --- swift/common/wsgi.py | 4 ++-- swift/proxy/controllers/obj.py | 7 +++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/swift/common/wsgi.py b/swift/common/wsgi.py index 3a75e7197f..e7504355b6 100644 --- a/swift/common/wsgi.py +++ b/swift/common/wsgi.py @@ -1084,8 +1084,8 @@ def make_env(env, method=None, path=None, agent='Swift', query_string=None, :returns: Fresh WSGI environment. """ newenv = {} - for name in ('eventlet.posthooks', 'HTTP_USER_AGENT', 'HTTP_HOST', - 'PATH_INFO', 'QUERY_STRING', 'REMOTE_USER', 'REQUEST_METHOD', + for name in ('HTTP_USER_AGENT', 'HTTP_HOST', 'PATH_INFO', + 'QUERY_STRING', 'REMOTE_USER', 'REQUEST_METHOD', 'SCRIPT_NAME', 'SERVER_NAME', 'SERVER_PORT', 'HTTP_ORIGIN', 'HTTP_ACCESS_CONTROL_REQUEST_METHOD', 'SERVER_PROTOCOL', 'swift.cache', 'swift.source', diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 9633c4b271..ac45324f87 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -818,6 +818,13 @@ class BaseObjectController(Controller): self.object_name = dest_object # re-write the existing request as a PUT instead of creating a new one # since this one is already attached to the posthooklogger + # TODO: Swift now has proxy-logging middleware instead of + # posthooklogger used in before. i.e. we don't have to + # keep the code depends on evnetlet.posthooks sequence, IMHO. + # However, creating a new sub request might + # cause the possibility to hide some bugs behindes the request + # so that we should discuss whichi is suitable (new-sub-request + # vs re-write-existing-request) for Swift. [kota_] req.method = 'PUT' req.path_info = '/v1/%s/%s/%s' % \ (dest_account, dest_container, dest_object) From a38f63e1c6b8b85b1675aa900e239a2e9906811e Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Fri, 21 Aug 2015 18:14:55 -0700 Subject: [PATCH 54/70] Use correct Storage-Policy header for REPLICATE requests Under some concurrency the object-replicator could potentially send the wrong X-Backed-Storage-Policy-Index header to it's partner nodes during replication if there were multiple storage policies on the same node because of a race where multiple jobs being processed concurrently would mutate some shared state on the ObjectReplicator instance. Instead of using shared stated on the ObjectReplicator instance when mutating the default headers send with REPLICATION requests each job will copy them into a local where they can safely be updated. Change-Id: I5522db57af7e308b1f9d4181f14ea14e386a71fd --- swift/obj/replicator.py | 14 ++-- test/unit/obj/test_replicator.py | 106 ++++++++++++++++++++++++++++++- 2 files changed, 111 insertions(+), 9 deletions(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 6526b14038..8ae393088b 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -90,7 +90,7 @@ class ObjectReplicator(Daemon): self.node_timeout = float(conf.get('node_timeout', 10)) self.sync_method = getattr(self, conf.get('sync_method') or 'rsync') self.network_chunk_size = int(conf.get('network_chunk_size', 65536)) - self.headers = { + self.default_headers = { 'Content-Length': '0', 'user-agent': 'object-replicator %s' % os.getpid()} self.rsync_error_log_line_length = \ @@ -270,7 +270,8 @@ class ObjectReplicator(Daemon): if len(suff) == 3 and isdir(join(path, suff))] self.replication_count += 1 self.logger.increment('partition.delete.count.%s' % (job['device'],)) - self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy']) + headers = dict(self.default_headers) + headers['X-Backend-Storage-Policy-Index'] = int(job['policy']) failure_devs_info = set() begin = time.time() try: @@ -296,7 +297,7 @@ class ObjectReplicator(Daemon): node['replication_ip'], node['replication_port'], node['device'], job['partition'], 'REPLICATE', - '/' + '-'.join(suffixes), headers=self.headers) + '/' + '-'.join(suffixes), headers=headers) conn.getresponse().read() if node['region'] != job['region']: synced_remote_regions[node['region']] = \ @@ -382,7 +383,8 @@ class ObjectReplicator(Daemon): """ self.replication_count += 1 self.logger.increment('partition.update.count.%s' % (job['device'],)) - self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy']) + headers = dict(self.default_headers) + headers['X-Backend-Storage-Policy-Index'] = int(job['policy']) target_devs_info = set() failure_devs_info = set() begin = time.time() @@ -414,7 +416,7 @@ class ObjectReplicator(Daemon): resp = http_connect( node['replication_ip'], node['replication_port'], node['device'], job['partition'], 'REPLICATE', - '', headers=self.headers).getresponse() + '', headers=headers).getresponse() if resp.status == HTTP_INSUFFICIENT_STORAGE: self.logger.error(_('%(ip)s/%(device)s responded' ' as unmounted'), node) @@ -454,7 +456,7 @@ class ObjectReplicator(Daemon): node['replication_ip'], node['replication_port'], node['device'], job['partition'], 'REPLICATE', '/' + '-'.join(suffixes), - headers=self.headers) + headers=headers) conn.getresponse().read() if not success: failure_devs_info.add((node['replication_ip'], diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index e90b2bc137..27f06c9608 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -22,12 +22,14 @@ import six.moves.cPickle as pickle import time import tempfile from contextlib import contextmanager, closing +from collections import defaultdict from errno import ENOENT, ENOTEMPTY, ENOTDIR from eventlet.green import subprocess from eventlet import Timeout, tpool -from test.unit import debug_logger, patch_policies +from test.unit import (debug_logger, patch_policies, make_timestamp_iter, + mocked_http_conn) from swift.common import utils from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \ storage_directory @@ -76,6 +78,7 @@ class MockProcess(object): ret_code = None ret_log = None check_args = None + captured_log = None class Stream(object): @@ -99,20 +102,32 @@ class MockProcess(object): if targ not in args[0]: process_errors.append("Invalid: %s not in %s" % (targ, args)) + self.captured_info = { + 'rsync_args': args[0], + } self.stdout = self.Stream() def wait(self): - return next(self.ret_code) + # the _mock_process context manager assures this class attribute is a + # mutable list and takes care of resetting it + rv = next(self.ret_code) + if self.captured_log is not None: + self.captured_info['ret_code'] = rv + self.captured_log.append(self.captured_info) + return rv @contextmanager def _mock_process(ret): + captured_log = [] + MockProcess.captured_log = captured_log orig_process = subprocess.Popen MockProcess.ret_code = (i[0] for i in ret) MockProcess.ret_log = (i[1] for i in ret) MockProcess.check_args = (i[2] for i in ret) object_replicator.subprocess.Popen = MockProcess - yield + yield captured_log + MockProcess.captured_log = None object_replicator.subprocess.Popen = orig_process @@ -180,8 +195,10 @@ class TestObjectReplicator(unittest.TestCase): swift_dir=self.testdir, devices=self.devices, mount_check='false', timeout='300', stats_interval='1', sync_method='rsync') self._create_replicator() + self.ts = make_timestamp_iter() def tearDown(self): + self.assertFalse(process_errors) rmtree(self.testdir, ignore_errors=1) def test_handoff_replication_setting_warnings(self): @@ -659,6 +676,89 @@ class TestObjectReplicator(unittest.TestCase): self.assertTrue(jobs[0]['delete']) self.assertEqual('1', jobs[0]['partition']) + def test_handoffs_first_mode_will_process_all_jobs_after_handoffs(self): + # make a object in the handoff & primary partition + expected_suffix_paths = [] + for policy in POLICIES: + # primary + ts = next(self.ts) + df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o', policy) + with df.create() as w: + w.write('asdf') + w.put({'X-Timestamp': ts.internal}) + w.commit(ts) + expected_suffix_paths.append(os.path.dirname(df._datadir)) + # handoff + ts = next(self.ts) + df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o', policy) + with df.create() as w: + w.write('asdf') + w.put({'X-Timestamp': ts.internal}) + w.commit(ts) + expected_suffix_paths.append(os.path.dirname(df._datadir)) + + # rsync will be called for all parts we created objects in + process_arg_checker = [ + # (return_code, stdout, ) + (0, '', []), + (0, '', []), + (0, '', []), # handoff job "first" policy + (0, '', []), + (0, '', []), + (0, '', []), # handoff job "second" policy + (0, '', []), + (0, '', []), # update job "first" policy + (0, '', []), + (0, '', []), # update job "second" policy + ] + # each handoff partition node gets one replicate request for after + # rsync (2 * 3), each primary partition with objects gets two + # replicate requests (pre-flight and post sync) to each of each + # partners (2 * 2 * 2), the 2 remaining empty parts (2 & 3) get a + # pre-flight replicate request per node for each storage policy + # (2 * 2 * 2) - so 6 + 8 + 8 == 22 + replicate_responses = [200] * 22 + stub_body = pickle.dumps({}) + with _mock_process(process_arg_checker) as rsync_log, \ + mock.patch('swift.obj.replicator.whataremyips', + side_effect=_ips), \ + mocked_http_conn(*replicate_responses, + body=stub_body) as conn_log: + self.replicator.handoffs_first = True + self.replicator.replicate() + # all jobs processed! + self.assertEqual(self.replicator.job_count, + self.replicator.replication_count) + + # sanity, all the handoffs suffixes we filled in were rsync'd + found_rsync_suffix_paths = set() + for subprocess_info in rsync_log: + local_path, remote_path = subprocess_info['rsync_args'][-2:] + found_rsync_suffix_paths.add(local_path) + self.assertEqual(set(expected_suffix_paths), found_rsync_suffix_paths) + # sanity, all nodes got replicated + found_replicate_calls = defaultdict(int) + for req in conn_log.requests: + self.assertEqual(req['method'], 'REPLICATE') + found_replicate_key = ( + int(req['headers']['X-Backend-Storage-Policy-Index']), + req['path']) + found_replicate_calls[found_replicate_key] += 1 + expected_replicate_calls = { + (0, '/sda/1/a83'): 3, + (1, '/sda/1/a83'): 3, + (0, '/sda/0'): 2, + (0, '/sda/0/a83'): 2, + (1, '/sda/0'): 2, + (1, '/sda/0/a83'): 2, + (0, '/sda/2'): 2, + (1, '/sda/2'): 2, + (0, '/sda/3'): 2, + (1, '/sda/3'): 2, + } + self.assertEquals(dict(found_replicate_calls), + expected_replicate_calls) + def test_replicator_skips_bogus_partition_dirs(self): # A directory in the wrong place shouldn't crash the replicator rmtree(self.objects) From 6151c6c45da481c0ea9a4fb876d14b39c1e347bf Mon Sep 17 00:00:00 2001 From: Tim Burke Date: Mon, 24 Aug 2015 20:36:22 +0000 Subject: [PATCH 55/70] Fix use of delimiter in account listings Previously, account listings that used the delimiter query param could omit some containers if they ended with the character that follows the delimiter. See If196e3075612b121ef8da4a9128167d00a248c27 for the corresponding fix for container listings. Change-Id: I57fcb97e51f653f5f4e306a632fcb3a0fb148c4e --- swift/account/backend.py | 12 ++++++++++-- test/unit/account/test_backend.py | 15 ++++++++------- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/swift/account/backend.py b/swift/account/backend.py index 3e3947f565..c91f37fe9b 100644 --- a/swift/account/backend.py +++ b/swift/account/backend.py @@ -380,6 +380,7 @@ class AccountBroker(DatabaseBroker): :returns: list of tuples of (name, object_count, bytes_used, 0) """ + delim_force_gte = False (marker, end_marker, prefix, delimiter) = utf8encode( marker, end_marker, prefix, delimiter) self._commit_puts_stale_ok() @@ -392,12 +393,17 @@ class AccountBroker(DatabaseBroker): query = """ SELECT name, object_count, bytes_used, 0 FROM container - WHERE deleted = 0 AND """ + WHERE """ query_args = [] if end_marker: query += ' name < ? AND' query_args.append(end_marker) - if marker and marker >= prefix: + if delim_force_gte: + query += ' name >= ? AND' + query_args.append(marker) + # Always set back to False + delim_force_gte = False + elif marker and marker >= prefix: query += ' name > ? AND' query_args.append(marker) elif prefix: @@ -437,6 +443,8 @@ class AccountBroker(DatabaseBroker): end = name.find(delimiter, len(prefix)) if end > 0: marker = name[:end] + chr(ord(delimiter) + 1) + # we want result to be inclusive of delim+1 + delim_force_gte = True dir_name = name[:end + 1] if dir_name != orig_marker: results.append([dir_name, 0, 0, 1]) diff --git a/test/unit/account/test_backend.py b/test/unit/account/test_backend.py index 5571b50e15..8226195484 100644 --- a/test/unit/account/test_backend.py +++ b/test/unit/account/test_backend.py @@ -486,6 +486,11 @@ class TestAccountBroker(unittest.TestCase): POLICIES.default.idx) broker.put_container('a-b', Timestamp(time()).internal, 0, 0, 0, POLICIES.default.idx) + # NB: ord(".") == ord("-") + 1 + broker.put_container('a.', Timestamp(time()).internal, 0, 0, 0, + POLICIES.default.idx) + broker.put_container('a.b', Timestamp(time()).internal, 0, 0, 0, + POLICIES.default.idx) broker.put_container('b', Timestamp(time()).internal, 0, 0, 0, POLICIES.default.idx) broker.put_container('b-a', Timestamp(time()).internal, 0, 0, 0, @@ -495,20 +500,16 @@ class TestAccountBroker(unittest.TestCase): broker.put_container('c', Timestamp(time()).internal, 0, 0, 0, POLICIES.default.idx) listing = broker.list_containers_iter(15, None, None, None, None) - self.assertEqual(len(listing), 10) self.assertEqual([row[0] for row in listing], - ['a', 'a-', 'a-a', 'a-a-a', 'a-a-b', 'a-b', 'b', - 'b-a', 'b-b', 'c']) + ['a', 'a-', 'a-a', 'a-a-a', 'a-a-b', 'a-b', 'a.', + 'a.b', 'b', 'b-a', 'b-b', 'c']) listing = broker.list_containers_iter(15, None, None, '', '-') - self.assertEqual(len(listing), 5) self.assertEqual([row[0] for row in listing], - ['a', 'a-', 'b', 'b-', 'c']) + ['a', 'a-', 'a.', 'a.b', 'b', 'b-', 'c']) listing = broker.list_containers_iter(15, None, None, 'a-', '-') - self.assertEqual(len(listing), 4) self.assertEqual([row[0] for row in listing], ['a-', 'a-a', 'a-a-', 'a-b']) listing = broker.list_containers_iter(15, None, None, 'b-', '-') - self.assertEqual(len(listing), 2) self.assertEqual([row[0] for row in listing], ['b-a', 'b-b']) def test_chexor(self): From edde5584affaa983d8db1d294bf1e20a2d4bbb50 Mon Sep 17 00:00:00 2001 From: Hisashi Osanai Date: Tue, 25 Aug 2015 07:52:18 +0900 Subject: [PATCH 56/70] Fix typo of a comment in replicator The typo was introduced by patch 138342 (sorry) so I fix it. Change-Id: Id5126802d281ef7ee9be128bd2152c0d2584160e --- swift/obj/replicator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/swift/obj/replicator.py b/swift/obj/replicator.py index 6526b14038..d2369a190e 100644 --- a/swift/obj/replicator.py +++ b/swift/obj/replicator.py @@ -326,7 +326,7 @@ class ObjectReplicator(Daemon): len(delete_objs)) _junk, error_paths = self.delete_handoff_objs( job, delete_objs) - # if replication works for a hand-off device and it faild, + # if replication works for a hand-off device and it failed, # the remote devices which are target of the replication # from the hand-off device will be marked. Because cleanup # after replication failed means replicator needs to From 6a35d479e8952c854fee6fbef9fe0397a289a6e5 Mon Sep 17 00:00:00 2001 From: Matthew Oliver Date: Tue, 25 Aug 2015 11:24:49 +1000 Subject: [PATCH 57/70] Follow up patch to fix a multiline import NITPIC This change cleans up test/unit/obj/test_replicator.py's imports to use only 1 version of multiline import syntaxes (' \' vs '()'). I don't really mind which, but we should be consistant, at least in the same file. This is a follow up for patch 215857. Change-Id: Ie2d328c25865b19092c493981a803ee246a9d7a5 --- test/unit/obj/test_replicator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/unit/obj/test_replicator.py b/test/unit/obj/test_replicator.py index 27f06c9608..6220e9bff5 100644 --- a/test/unit/obj/test_replicator.py +++ b/test/unit/obj/test_replicator.py @@ -31,8 +31,8 @@ from eventlet import Timeout, tpool from test.unit import (debug_logger, patch_policies, make_timestamp_iter, mocked_http_conn) from swift.common import utils -from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \ - storage_directory +from swift.common.utils import (hash_path, mkdirs, normalize_timestamp, + storage_directory) from swift.common import ring from swift.obj import diskfile, replicator as object_replicator from swift.common.storage_policy import StoragePolicy, POLICIES From a7b84f4c51207608adc3a5de7f4f9a629053c0ab Mon Sep 17 00:00:00 2001 From: Samuel Merritt Date: Mon, 24 Aug 2015 23:11:34 -0700 Subject: [PATCH 58/70] Allow pep8 of a single file Now you can run $ tox -e pep8 path/to/file.py [path/to/file2.py [...]] to run pep8 against just those files[1]. This is quite a bit faster than a full pep8 run, and the faster feedback is nice when you're fiddling with some formatting to placate pep8. Of course, you can still run "tox -e pep8" to check the whole source tree, just as before this commit. [1] It'll still run against bin/swift* as well, but that's still a lot faster than running against all our .py files. Change-Id: I81b4363fb95a34ff0f5c346b2b24f2047154f502 --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 50236788c4..ca70f1eb28 100644 --- a/tox.ini +++ b/tox.ini @@ -27,7 +27,7 @@ downloadcache = ~/cache/pip [testenv:pep8] commands = - flake8 swift test doc setup.py + flake8 {posargs:swift test doc setup.py} flake8 --filename=swift* bin [testenv:py3pep8] From 25dc7224b63b039cb3778cca7f9f81f954c6d94e Mon Sep 17 00:00:00 2001 From: Alistair Coles Date: Tue, 25 Aug 2015 11:05:41 +0100 Subject: [PATCH 59/70] Fix swob.Range docstring Bad ranges cause a ValueError to be raised, not an empty ranges list. Change-Id: I118bd2f7dc08ff5198870f4093c6eb350506c8ed --- swift/common/swob.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/swift/common/swob.py b/swift/common/swob.py index 434fc015f4..fdcbaf3e9b 100644 --- a/swift/common/swob.py +++ b/swift/common/swob.py @@ -480,8 +480,8 @@ class Range(object): After initialization, "range.ranges" is populated with a list of (start, end) tuples denoting the requested ranges. - If there were any syntactically-invalid byte-range-spec values, - "range.ranges" will be an empty list, per the relevant RFC: + If there were any syntactically-invalid byte-range-spec values, the + constructor will raise a ValueError, per the relevant RFC: "The recipient of a byte-range-set that includes one or more syntactically invalid byte-range-spec values MUST ignore the header field that includes From 17efa343c605d0361b3f423696babbab3f3d972d Mon Sep 17 00:00:00 2001 From: Kota Tsuyuzaki Date: Mon, 6 Jul 2015 13:21:40 -0700 Subject: [PATCH 60/70] Fix EC GET backend stream iteration state In EC case, When GET object requested, proxy-server always makes a log line "Client disconnected on read" even though the request succeeded. That is because ECAppIter class doesn't maintain a bunch of backend stream when closing the app_iter. It will cause unfortunately GeneratorExit on backend stream ResumingGetter. This patch fixes to set non_client_disconnected to propagate the state to the backend streams when the range iteration stopped successful. Co-Authored-By: Clay Gerrard Change-Id: I77af9807816bea1444d66534a17e2a210bcf09f8 Closes-Bug: #1472201 --- swift/proxy/controllers/base.py | 1 + swift/proxy/controllers/obj.py | 2 + test/unit/proxy/test_server.py | 98 ++++++++++++++++++++++++++++++++- 3 files changed, 100 insertions(+), 1 deletion(-) diff --git a/swift/proxy/controllers/base.py b/swift/proxy/controllers/base.py index 554469cc06..51831416b7 100644 --- a/swift/proxy/controllers/base.py +++ b/swift/proxy/controllers/base.py @@ -922,6 +922,7 @@ class ResumingGetter(object): 'part_iter': part_iter} self.pop_range() except StopIteration: + req.environ['swift.non_client_disconnect'] = True return except ChunkReadTimeout: diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 2aac83f2e5..e2a15d0303 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -1290,6 +1290,8 @@ class ECAppIter(object): # 100-byte object with 1024-byte segments. That's not # what we're dealing with here, though. if client_asked_for_range and not satisfiable: + req.environ[ + 'swift.non_client_disconnect'] = True raise HTTPRequestedRangeNotSatisfiable( request=req, headers=resp_headers) self.learned_content_type = content_type diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index 85c27fa2ed..7460a95bf3 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -1133,6 +1133,8 @@ class TestObjectController(unittest.TestCase): logger=debug_logger('proxy-ut'), account_ring=FakeRing(), container_ring=FakeRing()) + # clear proxy logger result for each test + _test_servers[0].logger._clear() def tearDown(self): self.app.account_ring.set_replicas(3) @@ -2015,6 +2017,7 @@ class TestObjectController(unittest.TestCase): obj = '0123456' * 11 * 17 prolis = _test_sockets[0] + prosrv = _test_servers[0] sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/ec-con/go-get-it HTTP/1.1\r\n' @@ -2054,6 +2057,10 @@ class TestObjectController(unittest.TestCase): break gotten_obj += buf self.assertEqual(gotten_obj, obj) + error_lines = prosrv.logger.get_lines_for_level('error') + warn_lines = prosrv.logger.get_lines_for_level('warning') + self.assertEquals(len(error_lines), 0) # sanity + self.assertEquals(len(warn_lines), 0) # sanity @unpatch_policies def test_conditional_GET_ec(self): @@ -2079,7 +2086,7 @@ class TestObjectController(unittest.TestCase): exp = 'HTTP/1.1 201' self.assertEqual(headers[:len(exp)], exp) - for verb in ('GET', 'HEAD'): + for verb, body in (('GET', obj), ('HEAD', '')): # If-Match req = Request.blank( '/v1/a/ec-con/conditionals', @@ -2087,6 +2094,7 @@ class TestObjectController(unittest.TestCase): headers={'If-Match': etag}) resp = req.get_response(prosrv) self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.body, body) req = Request.blank( '/v1/a/ec-con/conditionals', @@ -2101,6 +2109,7 @@ class TestObjectController(unittest.TestCase): headers={'If-Match': "*"}) resp = req.get_response(prosrv) self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.body, body) # If-None-Match req = Request.blank( @@ -2116,6 +2125,7 @@ class TestObjectController(unittest.TestCase): headers={'If-None-Match': not_etag}) resp = req.get_response(prosrv) self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.body, body) req = Request.blank( '/v1/a/ec-con/conditionals', @@ -2123,6 +2133,10 @@ class TestObjectController(unittest.TestCase): headers={'If-None-Match': "*"}) resp = req.get_response(prosrv) self.assertEqual(resp.status_int, 304) + error_lines = prosrv.logger.get_lines_for_level('error') + warn_lines = prosrv.logger.get_lines_for_level('warning') + self.assertEquals(len(error_lines), 0) # sanity + self.assertEquals(len(warn_lines), 0) # sanity @unpatch_policies def test_GET_ec_big(self): @@ -2136,6 +2150,7 @@ class TestObjectController(unittest.TestCase): "object is too small for proper testing") prolis = _test_sockets[0] + prosrv = _test_servers[0] sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/ec-con/big-obj-get HTTP/1.1\r\n' @@ -2177,6 +2192,10 @@ class TestObjectController(unittest.TestCase): # of garbage and demolishes your terminal's scrollback buffer. self.assertEqual(len(gotten_obj), len(obj)) self.assertEqual(gotten_obj, obj) + error_lines = prosrv.logger.get_lines_for_level('error') + warn_lines = prosrv.logger.get_lines_for_level('warning') + self.assertEquals(len(error_lines), 0) # sanity + self.assertEquals(len(warn_lines), 0) # sanity @unpatch_policies def test_GET_ec_failure_handling(self): @@ -2261,6 +2280,7 @@ class TestObjectController(unittest.TestCase): obj = '0123456' * 11 * 17 prolis = _test_sockets[0] + prosrv = _test_servers[0] sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('PUT /v1/a/ec-con/go-head-it HTTP/1.1\r\n' @@ -2292,12 +2312,17 @@ class TestObjectController(unittest.TestCase): self.assertEqual(str(len(obj)), headers['Content-Length']) self.assertEqual(md5(obj).hexdigest(), headers['Etag']) self.assertEqual('chartreuse', headers['X-Object-Meta-Color']) + error_lines = prosrv.logger.get_lines_for_level('error') + warn_lines = prosrv.logger.get_lines_for_level('warning') + self.assertEquals(len(error_lines), 0) # sanity + self.assertEquals(len(warn_lines), 0) # sanity @unpatch_policies def test_GET_ec_404(self): self.put_container("ec", "ec-con") prolis = _test_sockets[0] + prosrv = _test_servers[0] sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('GET /v1/a/ec-con/yes-we-have-no-bananas HTTP/1.1\r\n' @@ -2309,12 +2334,17 @@ class TestObjectController(unittest.TestCase): headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 404' self.assertEqual(headers[:len(exp)], exp) + error_lines = prosrv.logger.get_lines_for_level('error') + warn_lines = prosrv.logger.get_lines_for_level('warning') + self.assertEquals(len(error_lines), 0) # sanity + self.assertEquals(len(warn_lines), 0) # sanity @unpatch_policies def test_HEAD_ec_404(self): self.put_container("ec", "ec-con") prolis = _test_sockets[0] + prosrv = _test_servers[0] sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile() fd.write('HEAD /v1/a/ec-con/yes-we-have-no-bananas HTTP/1.1\r\n' @@ -2326,6 +2356,10 @@ class TestObjectController(unittest.TestCase): headers = readuntil2crlfs(fd) exp = 'HTTP/1.1 404' self.assertEqual(headers[:len(exp)], exp) + error_lines = prosrv.logger.get_lines_for_level('error') + warn_lines = prosrv.logger.get_lines_for_level('warning') + self.assertEquals(len(error_lines), 0) # sanity + self.assertEquals(len(warn_lines), 0) # sanity def test_PUT_expect_header_zero_content_length(self): test_errors = [] @@ -5381,6 +5415,62 @@ class TestObjectController(unittest.TestCase): finally: time.time = orig_time + @unpatch_policies + def test_ec_client_disconnect(self): + prolis = _test_sockets[0] + + # create connection + sock = connect_tcp(('localhost', prolis.getsockname()[1])) + fd = sock.makefile() + + # create container + fd.write('PUT /v1/a/ec-discon HTTP/1.1\r\n' + 'Host: localhost\r\n' + 'Content-Length: 0\r\n' + 'X-Storage-Token: t\r\n' + 'X-Storage-Policy: ec\r\n' + '\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 2' + self.assertEqual(headers[:len(exp)], exp) + + # create object + obj = 'a' * 4 * 64 * 2 ** 10 + fd.write('PUT /v1/a/ec-discon/test HTTP/1.1\r\n' + 'Host: localhost\r\n' + 'Content-Length: %d\r\n' + 'X-Storage-Token: t\r\n' + 'Content-Type: donuts\r\n' + '\r\n%s' % (len(obj), obj)) + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 201' + self.assertEqual(headers[:len(exp)], exp) + + # get object + fd.write('GET /v1/a/ec-discon/test HTTP/1.1\r\n' + 'Host: localhost\r\n' + 'Connection: close\r\n' + 'X-Storage-Token: t\r\n' + '\r\n') + fd.flush() + headers = readuntil2crlfs(fd) + exp = 'HTTP/1.1 200' + self.assertEqual(headers[:len(exp)], exp) + + # read most of the object, and disconnect + fd.read(10) + fd.close() + sock.close() + sleep(0) + + # check for disconnect message! + expected = ['Client disconnected on read'] * 2 + self.assertEqual( + _test_servers[0].logger.get_lines_for_level('warning'), + expected) + @unpatch_policies def test_leak_1(self): _request_instances = weakref.WeakKeyDictionary() @@ -5944,12 +6034,18 @@ class TestECMismatchedFA(unittest.TestCase): class TestObjectECRangedGET(unittest.TestCase): def setUp(self): + _test_servers[0].logger._clear() self.app = proxy_server.Application( None, FakeMemcache(), logger=debug_logger('proxy-ut'), account_ring=FakeRing(), container_ring=FakeRing()) + def tearDown(self): + prosrv = _test_servers[0] + self.assertFalse(prosrv.logger.get_lines_for_level('error')) + self.assertFalse(prosrv.logger.get_lines_for_level('warning')) + @classmethod def setUpClass(cls): cls.obj_name = 'range-get-test' From 8086a0e53406dc95856df2d3e015d3eaaf81380a Mon Sep 17 00:00:00 2001 From: Tushar Gohad Date: Tue, 18 Aug 2015 07:24:19 +0000 Subject: [PATCH 61/70] Restrict PyECLib version to 1.0.7 v1.0.9 rev of PyECLib replaces Jerasure with a native EC implementation (liberasurecode_rs_vand) as the default EC scheme. Going forward, Jerasure will not be bundled with PyPI version of PyECLib as it used to be, until v1.0.7. This is an interim change to Swift requirements until we get v1.0.9 PyECLib included into global-requirements and ready patches that change Swift default ec_type (for doc, config samples and unit tests) from "jerasure_rs_vand" to "liberasurecode_rs_vand." Change-Id: Ica4fee2cdea2bc7f5edd0e51ad637a4457faf3b4 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 35aab42f4b..e85555288d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,4 +10,4 @@ pastedeploy>=1.3.3 simplejson>=2.0.9 six>=1.9.0 xattr>=0.4 -PyECLib>=1.0.7 +PyECLib==1.0.7 # BSD From c690bcb68331818a04c94741b25f40cc40f7b3c4 Mon Sep 17 00:00:00 2001 From: Kazuhiro MIYAHARA Date: Mon, 17 Aug 2015 16:50:56 +0900 Subject: [PATCH 62/70] Fix dispersion-reports error message MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This patch fixes Swift to show message "No objects to query. Has swift-dispersion-populate been run?" for "swift-dispersion-report —object-only” with no container for object dispersion. Change-Id: I82da56709cfc296a27f5180681709bc56adbc13d Closes-Bug: #1468120 --- bin/swift-dispersion-report | 8 ++-- test/unit/common/test_internal_client.py | 55 ++++++++++++++++++++++++ 2 files changed, 60 insertions(+), 3 deletions(-) diff --git a/bin/swift-dispersion-report b/bin/swift-dispersion-report index e81cb2b86b..31a1741dda 100755 --- a/bin/swift-dispersion-report +++ b/bin/swift-dispersion-report @@ -26,6 +26,7 @@ except ImportError: from eventlet import GreenPool, hubs, patcher, Timeout from eventlet.pools import Pool +from eventlet.green import urllib2 from swift.common import direct_client try: @@ -176,9 +177,10 @@ def object_dispersion_report(coropool, connpool, account, object_ring, try: objects = [o['name'] for o in conn.get_container( container, prefix='dispersion_', full_listing=True)[1]] - except ClientException as err: - if err.http_status != 404: - raise + except urllib2.HTTPError as err: + if err.getcode() != 404: + raise err + print >>stderr, 'No objects to query. Has ' \ 'swift-dispersion-populate been run?' stderr.flush() diff --git a/test/unit/common/test_internal_client.py b/test/unit/common/test_internal_client.py index 9fed678faf..3c817fbc9b 100644 --- a/test/unit/common/test_internal_client.py +++ b/test/unit/common/test_internal_client.py @@ -1264,6 +1264,61 @@ class TestSimpleClient(unittest.TestCase): self.assertEqual(mock_urlopen.call_count, 2) self.assertEqual([None, None], retval) + @mock.patch('eventlet.green.urllib2.urlopen') + def test_request_with_retries_with_HTTPError(self, mock_urlopen): + mock_response = mock.MagicMock() + mock_response.read.return_value = '' + c = internal_client.SimpleClient(url='http://127.0.0.1', token='token') + self.assertEqual(c.retries, 5) + + for request_method in 'GET PUT POST DELETE HEAD COPY'.split(): + mock_urlopen.reset_mock() + mock_urlopen.side_effect = urllib2.HTTPError(*[None] * 5) + with mock.patch('swift.common.internal_client.sleep') \ + as mock_sleep: + self.assertRaises(urllib2.HTTPError, + c.retry_request, request_method, retries=1) + self.assertEqual(mock_sleep.call_count, 1) + self.assertEqual(mock_urlopen.call_count, 2) + + @mock.patch('eventlet.green.urllib2.urlopen') + def test_request_container_with_retries_with_HTTPError(self, + mock_urlopen): + mock_response = mock.MagicMock() + mock_response.read.return_value = '' + c = internal_client.SimpleClient(url='http://127.0.0.1', token='token') + self.assertEqual(c.retries, 5) + + for request_method in 'GET PUT POST DELETE HEAD COPY'.split(): + mock_urlopen.reset_mock() + mock_urlopen.side_effect = urllib2.HTTPError(*[None] * 5) + with mock.patch('swift.common.internal_client.sleep') \ + as mock_sleep: + self.assertRaises(urllib2.HTTPError, + c.retry_request, request_method, + container='con', retries=1) + self.assertEqual(mock_sleep.call_count, 1) + self.assertEqual(mock_urlopen.call_count, 2) + + @mock.patch('eventlet.green.urllib2.urlopen') + def test_request_object_with_retries_with_HTTPError(self, + mock_urlopen): + mock_response = mock.MagicMock() + mock_response.read.return_value = '' + c = internal_client.SimpleClient(url='http://127.0.0.1', token='token') + self.assertEqual(c.retries, 5) + + for request_method in 'GET PUT POST DELETE HEAD COPY'.split(): + mock_urlopen.reset_mock() + mock_urlopen.side_effect = urllib2.HTTPError(*[None] * 5) + with mock.patch('swift.common.internal_client.sleep') \ + as mock_sleep: + self.assertRaises(urllib2.HTTPError, + c.retry_request, request_method, + container='con', name='obj', retries=1) + self.assertEqual(mock_sleep.call_count, 1) + self.assertEqual(mock_urlopen.call_count, 2) + def test_proxy(self): # check that proxy arg is passed through to the urllib Request scheme = 'http' From 10b2939b433a4a79b4f7b97640b3d208cacfeffb Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Thu, 23 Jul 2015 22:36:21 -0700 Subject: [PATCH 63/70] Disallow unsafe tempurl operations to point to unauthorized data Do not allow PUT tempurls to create pointers to other data. Specifically disallow the creation of DLO object manifests by returning an error if a non-safe tempurl request includes an X-Object-Manifest header regardless of the value of the header. This prevents discoverability attacks which can use any PUT tempurl to probe for private data by creating a DLO object manifest and then using the PUT tempurl to head the object which would 404 if the prefix does not match any object data or form a valid DLO HEAD response if it does. This also prevents a tricky and potentially unexpected consequence of PUT tempurls which would make it unsafe to allow a user to download objects created by tempurl (even if they just created them) because the result of reading the object created via tempurl may not be the data which was uploaded. [CVE-2015-5223] Co-Authored-By: Kota Tsuyuzaki Change-Id: I11e68830009d3f6bff44ae4011a41b67139146f6 Closes-Bug: 1453948 --- swift/common/middleware/tempurl.py | 31 +++++++++++++++++- test/functional/tests.py | 36 +++++++++++++++++++++ test/unit/common/middleware/test_tempurl.py | 19 +++++++++++ 3 files changed, 85 insertions(+), 1 deletion(-) diff --git a/swift/common/middleware/tempurl.py b/swift/common/middleware/tempurl.py index cf3afe3de4..83c4e7b6c1 100644 --- a/swift/common/middleware/tempurl.py +++ b/swift/common/middleware/tempurl.py @@ -122,11 +122,13 @@ from urllib import urlencode from urlparse import parse_qs from swift.proxy.controllers.base import get_account_info, get_container_info -from swift.common.swob import HeaderKeyDict, HTTPUnauthorized +from swift.common.swob import HeaderKeyDict, HTTPUnauthorized, HTTPBadRequest from swift.common.utils import split_path, get_valid_utf8_str, \ register_swift_info, get_hmac, streq_const_time, quote +DISALLOWED_INCOMING_HEADERS = 'x-object-manifest' + #: Default headers to remove from incoming requests. Simply a whitespace #: delimited list of header names and names can optionally end with '*' to #: indicate a prefix match. DEFAULT_INCOMING_ALLOW_HEADERS is a list of @@ -230,6 +232,10 @@ class TempURL(object): #: The methods allowed with Temp URLs. self.methods = methods + self.disallowed_headers = set( + 'HTTP_' + h.upper().replace('-', '_') + for h in DISALLOWED_INCOMING_HEADERS.split()) + headers = DEFAULT_INCOMING_REMOVE_HEADERS if 'incoming_remove_headers' in conf: headers = conf['incoming_remove_headers'] @@ -323,6 +329,13 @@ class TempURL(object): for hmac in hmac_vals) if not is_valid_hmac: return self._invalid(env, start_response) + # disallowed headers prevent accidently allowing upload of a pointer + # to data that the PUT tempurl would not otherwise allow access for. + # It should be safe to provide a GET tempurl for data that an + # untrusted client just uploaded with a PUT tempurl. + resp = self._clean_disallowed_headers(env, start_response) + if resp: + return resp self._clean_incoming_headers(env) env['swift.authorize'] = lambda req: None env['swift.authorize_override'] = True @@ -465,6 +478,22 @@ class TempURL(object): body = '401 Unauthorized: Temp URL invalid\n' return HTTPUnauthorized(body=body)(env, start_response) + def _clean_disallowed_headers(self, env, start_response): + """ + Validate the absense of disallowed headers for "unsafe" operations. + + :returns: None for safe operations or swob.HTTPBadResponse if the + request includes disallowed headers. + """ + if env['REQUEST_METHOD'] in ('GET', 'HEAD', 'OPTIONS'): + return + for h in env: + if h in self.disallowed_headers: + return HTTPBadRequest( + body='The header %r is not allowed in this tempurl' % + h[len('HTTP_'):].title().replace('_', '-'))( + env, start_response) + def _clean_incoming_headers(self, env): """ Removes any headers from the WSGI environment as per the diff --git a/test/functional/tests.py b/test/functional/tests.py index d72e665f60..0ec9a489cc 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -3108,6 +3108,42 @@ class TestTempurl(Base): self.assertTrue(new_obj.info(parms=put_parms, cfg={'no_auth_token': True})) + def test_PUT_manifest_access(self): + new_obj = self.env.container.file(Utils.create_name()) + + # give out a signature which allows a PUT to new_obj + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'PUT', expires, self.env.conn.make_path(new_obj.path), + self.env.tempurl_key) + put_parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + # try to create manifest pointing to some random container + try: + new_obj.write('', { + 'x-object-manifest': '%s/foo' % 'some_random_container' + }, parms=put_parms, cfg={'no_auth_token': True}) + except ResponseError as e: + self.assertEqual(e.status, 400) + else: + self.fail('request did not error') + + # create some other container + other_container = self.env.account.container(Utils.create_name()) + if not other_container.create(): + raise ResponseError(self.conn.response) + + # try to create manifest pointing to new container + try: + new_obj.write('', { + 'x-object-manifest': '%s/foo' % other_container + }, parms=put_parms, cfg={'no_auth_token': True}) + except ResponseError as e: + self.assertEqual(e.status, 400) + else: + self.fail('request did not error') + def test_HEAD(self): expires = int(time.time()) + 86400 sig = self.tempurl_sig( diff --git a/test/unit/common/middleware/test_tempurl.py b/test/unit/common/middleware/test_tempurl.py index c2d96518c5..b5638164ee 100644 --- a/test/unit/common/middleware/test_tempurl.py +++ b/test/unit/common/middleware/test_tempurl.py @@ -649,6 +649,25 @@ class TestTempURL(unittest.TestCase): self.assertTrue('Temp URL invalid' in resp.body) self.assertTrue('Www-Authenticate' in resp.headers) + def test_disallowed_header_object_manifest(self): + self.tempurl = tempurl.filter_factory({})(self.auth) + method = 'PUT' + expires = int(time() + 86400) + path = '/v1/a/c/o' + key = 'abc' + hmac_body = '%s\n%s\n%s' % (method, expires, path) + sig = hmac.new(key, hmac_body, sha1).hexdigest() + req = self._make_request( + path, method='PUT', keys=[key], + headers={'x-object-manifest': 'private/secret'}, + environ={'QUERY_STRING': 'temp_url_sig=%s&temp_url_expires=%s' % ( + sig, expires)}) + resp = req.get_response(self.tempurl) + self.assertEquals(resp.status_int, 400) + self.assertTrue('header' in resp.body) + self.assertTrue('not allowed' in resp.body) + self.assertTrue('X-Object-Manifest' in resp.body) + def test_removed_incoming_header(self): self.tempurl = tempurl.filter_factory({ 'incoming_remove_headers': 'x-remove-this'})(self.auth) From d4409c0a046c6ce0e14e18c95efe2cd16cf120e8 Mon Sep 17 00:00:00 2001 From: Samuel Merritt Date: Tue, 11 Aug 2015 09:10:13 -0500 Subject: [PATCH 64/70] Better scoping for tempurls, especially container tempurls It used to be that a GET of a tempurl referencing a large object would let you download that large object regardless of where its segments lived. However, this led to some violated user expectations around container tempurls. (Note on shorthand: all tempurls reference objects. However, "account tempurl" and "container tempurl" are shorthand meaning tempurls generated using a key on the account or container, respectively.) Let's say an application is given tempurl keys to a particular container, and it does all its work therein using those keys. The user expects that, if the application is compromised, then the attacker only gains access to the "compromised-container". However, with the old behavior, the attacker could read data from *any* container like so: 1) Choose a "victim-container" to download 2) Create PUT and GET tempurl for any object name within the "compromised-container". The object doesn't need to exist; we'll create it. 3) Using the PUT tempurl, upload a DLO manifest with "X-Object-Manifest: /victim-container/" 4) Using the GET tempurl, download the object created in step 3. The result will be the concatenation of all objects in the "victim-container". Step 3 need not be for all objects in the "victim-container"; for example, a value "X-Object-Manifest: /victim-container/abc" would only be the concatenation of all objects whose names begin with "abc". By probing for object names in this way, individual objects may be found and extracted. A similar bug would exist for manifests referencing other accounts except that neither the X-Object-Manifest (DLO) nor the JSON manifest document (SLO) have a way of specifying a different account. This change makes it so that a container tempurl only grants access to objects within its container, *including* large-object segments. This breaks backward compatibility for container tempurls that may have pointed to cross container *LO's, but (a) there are security implications, and (b) container tempurls are a relatively new feature. This works by having the tempurl middleware install an authorization callback ('swift.authorize' in the WSGI environment) that limits the scope of any requests to the account or container from which the key came. This requires swift.authorize to persist for both the manifest request and all segment requests; this is done by having the proxy server restore it to the WSGI environment prior to returning from __call__. [CVE-2015-5223] Co-Authored-By: Clay Gerrard Co-Authored-By: Alistair Coles Co-Authored-By: Christian Schwede Co-Authored-By: Matthew Oliver Change-Id: Ie6d52f7a07e87f6fec21ed8b0ec1d84be8b2b11c Closes-Bug: 1449212 --- swift/common/middleware/tempurl.py | 105 +++++++++--- swift/proxy/server.py | 11 +- test/functional/tests.py | 114 +++++++++++++ test/unit/common/middleware/test_tempurl.py | 171 +++++++++++++++----- 4 files changed, 333 insertions(+), 68 deletions(-) diff --git a/swift/common/middleware/tempurl.py b/swift/common/middleware/tempurl.py index 83c4e7b6c1..10278bf7b5 100644 --- a/swift/common/middleware/tempurl.py +++ b/swift/common/middleware/tempurl.py @@ -152,6 +152,10 @@ DEFAULT_OUTGOING_REMOVE_HEADERS = 'x-object-meta-*' DEFAULT_OUTGOING_ALLOW_HEADERS = 'x-object-meta-public-*' +CONTAINER_SCOPE = 'container' +ACCOUNT_SCOPE = 'account' + + def get_tempurl_keys_from_metadata(meta): """ Extracts the tempurl keys from metadata. @@ -172,6 +176,38 @@ def disposition_format(filename): quote(filename, safe=' /'), quote(filename)) +def authorize_same_account(account_to_match): + + def auth_callback_same_account(req): + try: + _ver, acc, _rest = req.split_path(2, 3, True) + except ValueError: + return HTTPUnauthorized(request=req) + + if acc == account_to_match: + return None + else: + return HTTPUnauthorized(request=req) + + return auth_callback_same_account + + +def authorize_same_container(account_to_match, container_to_match): + + def auth_callback_same_container(req): + try: + _ver, acc, con, _rest = req.split_path(3, 4, True) + except ValueError: + return HTTPUnauthorized(request=req) + + if acc == account_to_match and con == container_to_match: + return None + else: + return HTTPUnauthorized(request=req) + + return auth_callback_same_container + + class TempURL(object): """ WSGI Middleware to grant temporary URLs specific access to Swift @@ -304,10 +340,10 @@ class TempURL(object): return self.app(env, start_response) if not temp_url_sig or not temp_url_expires: return self._invalid(env, start_response) - account = self._get_account(env) + account, container = self._get_account_and_container(env) if not account: return self._invalid(env, start_response) - keys = self._get_keys(env, account) + keys = self._get_keys(env) if not keys: return self._invalid(env, start_response) if env['REQUEST_METHOD'] == 'HEAD': @@ -322,11 +358,16 @@ class TempURL(object): else: hmac_vals = self._get_hmacs(env, temp_url_expires, keys) - # While it's true that any() will short-circuit, this doesn't affect - # the timing-attack resistance since the only way this will - # short-circuit is when a valid signature is passed in. - is_valid_hmac = any(streq_const_time(temp_url_sig, hmac) - for hmac in hmac_vals) + is_valid_hmac = False + hmac_scope = None + for hmac, scope in hmac_vals: + # While it's true that we short-circuit, this doesn't affect the + # timing-attack resistance since the only way this will + # short-circuit is when a valid signature is passed in. + if streq_const_time(temp_url_sig, hmac): + is_valid_hmac = True + hmac_scope = scope + break if not is_valid_hmac: return self._invalid(env, start_response) # disallowed headers prevent accidently allowing upload of a pointer @@ -337,7 +378,12 @@ class TempURL(object): if resp: return resp self._clean_incoming_headers(env) - env['swift.authorize'] = lambda req: None + + if hmac_scope == ACCOUNT_SCOPE: + env['swift.authorize'] = authorize_same_account(account) + else: + env['swift.authorize'] = authorize_same_container(account, + container) env['swift.authorize_override'] = True env['REMOTE_USER'] = '.wsgi.tempurl' qs = {'temp_url_sig': temp_url_sig, @@ -378,22 +424,23 @@ class TempURL(object): return self.app(env, _start_response) - def _get_account(self, env): + def _get_account_and_container(self, env): """ - Returns just the account for the request, if it's an object - request and one of the configured methods; otherwise, None is + Returns just the account and container for the request, if it's an + object request and one of the configured methods; otherwise, None is returned. :param env: The WSGI environment for the request. - :returns: Account str or None. + :returns: (Account str, container str) or (None, None). """ if env['REQUEST_METHOD'] in self.methods: try: ver, acc, cont, obj = split_path(env['PATH_INFO'], 4, 4, True) except ValueError: - return None + return (None, None) if ver == 'v1' and obj.strip('/'): - return acc + return (acc, cont) + return (None, None) def _get_temp_url_info(self, env): """ @@ -423,18 +470,23 @@ class TempURL(object): inline = True return temp_url_sig, temp_url_expires, filename, inline - def _get_keys(self, env, account): + def _get_keys(self, env): """ Returns the X-[Account|Container]-Meta-Temp-URL-Key[-2] header values - for the account or container, or an empty list if none are set. + for the account or container, or an empty list if none are set. Each + value comes as a 2-tuple (key, scope), where scope is either + CONTAINER_SCOPE or ACCOUNT_SCOPE. Returns 0-4 elements depending on how many keys are set in the account's or container's metadata. :param env: The WSGI environment for the request. - :param account: Account str. - :returns: [X-Account-Meta-Temp-URL-Key str value if set, - X-Account-Meta-Temp-URL-Key-2 str value if set] + :returns: [ + (X-Account-Meta-Temp-URL-Key str value, ACCOUNT_SCOPE) if set, + (X-Account-Meta-Temp-URL-Key-2 str value, ACCOUNT_SCOPE if set, + (X-Container-Meta-Temp-URL-Key str value, CONTAINER_SCOPE) if set, + (X-Container-Meta-Temp-URL-Key-2 str value, CONTAINER_SCOPE if set, + ] """ account_info = get_account_info(env, self.app, swift_source='TU') account_keys = get_tempurl_keys_from_metadata(account_info['meta']) @@ -443,25 +495,28 @@ class TempURL(object): container_keys = get_tempurl_keys_from_metadata( container_info.get('meta', [])) - return account_keys + container_keys + return ([(ak, ACCOUNT_SCOPE) for ak in account_keys] + + [(ck, CONTAINER_SCOPE) for ck in container_keys]) - def _get_hmacs(self, env, expires, keys, request_method=None): + def _get_hmacs(self, env, expires, scoped_keys, request_method=None): """ :param env: The WSGI environment for the request. :param expires: Unix timestamp as an int for when the URL expires. - :param keys: Key strings, from the X-Account-Meta-Temp-URL-Key[-2] of - the account. + :param scoped_keys: (key, scope) tuples like _get_keys() returns :param request_method: Optional override of the request in the WSGI env. For example, if a HEAD does not match, you may wish to override with GET to still allow the HEAD. + + :returns: a list of (hmac, scope) 2-tuples """ if not request_method: request_method = env['REQUEST_METHOD'] - return [get_hmac( - request_method, env['PATH_INFO'], expires, key) for key in keys] + return [ + (get_hmac(request_method, env['PATH_INFO'], expires, key), scope) + for (key, scope) in scoped_keys] def _invalid(self, env, start_response): """ diff --git a/swift/proxy/server.py b/swift/proxy/server.py index d55dcdab92..0401691b6e 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -381,6 +381,7 @@ class Application(object): allowed_methods = getattr(controller, 'allowed_methods', set()) return HTTPMethodNotAllowed( request=req, headers={'Allow': ', '.join(allowed_methods)}) + old_authorize = None if 'swift.authorize' in req.environ: # We call authorize before the handler, always. If authorized, # we remove the swift.authorize hook so isn't ever called @@ -391,7 +392,7 @@ class Application(object): if not resp and not req.headers.get('X-Copy-From-Account') \ and not req.headers.get('Destination-Account'): # No resp means authorized, no delayed recheck required. - del req.environ['swift.authorize'] + old_authorize = req.environ['swift.authorize'] else: # Response indicates denial, but we might delay the denial # and recheck later. If not delayed, return the error now. @@ -401,7 +402,13 @@ class Application(object): # gets mutated during handling. This way logging can display the # method the client actually sent. req.environ['swift.orig_req_method'] = req.method - return handler(req) + try: + if old_authorize: + req.environ.pop('swift.authorize', None) + return handler(req) + finally: + if old_authorize: + req.environ['swift.authorize'] = old_authorize except HTTPException as error_response: return error_response except (Exception, Timeout): diff --git a/test/functional/tests.py b/test/functional/tests.py index 0ec9a489cc..758de80802 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -3090,6 +3090,59 @@ class TestTempurl(Base): contents = self.env.obj.read(parms=parms, cfg={'no_auth_token': True}) self.assertEqual(contents, "obj contents") + def test_GET_DLO_inside_container(self): + seg1 = self.env.container.file( + "get-dlo-inside-seg1" + Utils.create_name()) + seg2 = self.env.container.file( + "get-dlo-inside-seg2" + Utils.create_name()) + seg1.write("one fish two fish ") + seg2.write("red fish blue fish") + + manifest = self.env.container.file("manifest" + Utils.create_name()) + manifest.write( + '', + hdrs={"X-Object-Manifest": "%s/get-dlo-inside-seg" % + (self.env.container.name,)}) + + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'GET', expires, self.env.conn.make_path(manifest.path), + self.env.tempurl_key) + parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + contents = manifest.read(parms=parms, cfg={'no_auth_token': True}) + self.assertEqual(contents, "one fish two fish red fish blue fish") + + def test_GET_DLO_outside_container(self): + seg1 = self.env.container.file( + "get-dlo-outside-seg1" + Utils.create_name()) + seg2 = self.env.container.file( + "get-dlo-outside-seg2" + Utils.create_name()) + seg1.write("one fish two fish ") + seg2.write("red fish blue fish") + + container2 = self.env.account.container(Utils.create_name()) + container2.create() + + manifest = container2.file("manifest" + Utils.create_name()) + manifest.write( + '', + hdrs={"X-Object-Manifest": "%s/get-dlo-outside-seg" % + (self.env.container.name,)}) + + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'GET', expires, self.env.conn.make_path(manifest.path), + self.env.tempurl_key) + parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + # cross container tempurl works fine for account tempurl key + contents = manifest.read(parms=parms, cfg={'no_auth_token': True}) + self.assertEqual(contents, "one fish two fish red fish blue fish") + self.assert_status([200]) + def test_PUT(self): new_obj = self.env.container.file(Utils.create_name()) @@ -3422,6 +3475,67 @@ class TestContainerTempurl(Base): 'Container TempURL key-2 found, should not be visible ' 'to readonly ACLs') + def test_GET_DLO_inside_container(self): + seg1 = self.env.container.file( + "get-dlo-inside-seg1" + Utils.create_name()) + seg2 = self.env.container.file( + "get-dlo-inside-seg2" + Utils.create_name()) + seg1.write("one fish two fish ") + seg2.write("red fish blue fish") + + manifest = self.env.container.file("manifest" + Utils.create_name()) + manifest.write( + '', + hdrs={"X-Object-Manifest": "%s/get-dlo-inside-seg" % + (self.env.container.name,)}) + + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'GET', expires, self.env.conn.make_path(manifest.path), + self.env.tempurl_key) + parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + contents = manifest.read(parms=parms, cfg={'no_auth_token': True}) + self.assertEqual(contents, "one fish two fish red fish blue fish") + + def test_GET_DLO_outside_container(self): + container2 = self.env.account.container(Utils.create_name()) + container2.create() + seg1 = container2.file( + "get-dlo-outside-seg1" + Utils.create_name()) + seg2 = container2.file( + "get-dlo-outside-seg2" + Utils.create_name()) + seg1.write("one fish two fish ") + seg2.write("red fish blue fish") + + manifest = self.env.container.file("manifest" + Utils.create_name()) + manifest.write( + '', + hdrs={"X-Object-Manifest": "%s/get-dlo-outside-seg" % + (container2.name,)}) + + expires = int(time.time()) + 86400 + sig = self.tempurl_sig( + 'GET', expires, self.env.conn.make_path(manifest.path), + self.env.tempurl_key) + parms = {'temp_url_sig': sig, + 'temp_url_expires': str(expires)} + + # cross container tempurl does not work for container tempurl key + try: + manifest.read(parms=parms, cfg={'no_auth_token': True}) + except ResponseError as e: + self.assertEqual(e.status, 401) + else: + self.fail('request did not error') + try: + manifest.info(parms=parms, cfg={'no_auth_token': True}) + except ResponseError as e: + self.assertEqual(e.status, 401) + else: + self.fail('request did not error') + class TestContainerTempurlUTF8(Base2, TestContainerTempurl): set_up = False diff --git a/test/unit/common/middleware/test_tempurl.py b/test/unit/common/middleware/test_tempurl.py index b5638164ee..c84063120a 100644 --- a/test/unit/common/middleware/test_tempurl.py +++ b/test/unit/common/middleware/test_tempurl.py @@ -29,6 +29,7 @@ # limitations under the License. import hmac +import itertools import unittest from hashlib import sha1 from time import time @@ -44,10 +45,13 @@ class FakeApp(object): self.calls = 0 self.status_headers_body_iter = status_headers_body_iter if not self.status_headers_body_iter: - self.status_headers_body_iter = iter([('404 Not Found', { - 'x-test-header-one-a': 'value1', - 'x-test-header-two-a': 'value2', - 'x-test-header-two-b': 'value3'}, '')]) + self.status_headers_body_iter = iter( + itertools.repeat(( + '404 Not Found', { + 'x-test-header-one-a': 'value1', + 'x-test-header-two-a': 'value2', + 'x-test-header-two-b': 'value3'}, + ''))) self.request = None def __call__(self, env, start_response): @@ -69,16 +73,18 @@ class TestTempURL(unittest.TestCase): self.auth = tempauth.filter_factory({'reseller_prefix': ''})(self.app) self.tempurl = tempurl.filter_factory({})(self.auth) - def _make_request(self, path, environ=None, keys=(), **kwargs): + def _make_request(self, path, environ=None, keys=(), container_keys=None, + **kwargs): if environ is None: environ = {} _junk, account, _junk, _junk = utils.split_path(path, 2, 4) - self._fake_cache_environ(environ, account, keys) + self._fake_cache_environ(environ, account, keys, + container_keys=container_keys) req = Request.blank(path, environ=environ, **kwargs) return req - def _fake_cache_environ(self, environ, account, keys): + def _fake_cache_environ(self, environ, account, keys, container_keys=None): """ Fake out the caching layer for get_account_info(). Injects account data into environ such that keys are the tempurl keys, if set. @@ -96,8 +102,13 @@ class TestTempURL(unittest.TestCase): 'bytes': '0', 'meta': meta} + meta = {} + for i, key in enumerate(container_keys or []): + meta_name = 'Temp-URL-key' + (("-%d" % (i + 1) if i else "")) + meta[meta_name] = key + container_cache_key = 'swift.container/' + account + '/c' - environ.setdefault(container_cache_key, {'meta': {}}) + environ.setdefault(container_cache_key, {'meta': meta}) def test_passthrough(self): resp = self._make_request('/v1/a/c/o').get_response(self.tempurl) @@ -581,6 +592,81 @@ class TestTempURL(unittest.TestCase): self.assertTrue('Temp URL invalid' in resp.body) self.assertTrue('Www-Authenticate' in resp.headers) + def test_authorize_limits_scope(self): + req_other_object = Request.blank("/v1/a/c/o2") + req_other_container = Request.blank("/v1/a/c2/o2") + req_other_account = Request.blank("/v1/a2/c2/o2") + + key_kwargs = { + 'keys': ['account-key', 'shared-key'], + 'container_keys': ['container-key', 'shared-key'], + } + + # A request with the account key limits the pre-authed scope to the + # account level. + method = 'GET' + expires = int(time() + 86400) + path = '/v1/a/c/o' + + hmac_body = '%s\n%s\n%s' % (method, expires, path) + sig = hmac.new('account-key', hmac_body, sha1).hexdigest() + qs = '?temp_url_sig=%s&temp_url_expires=%s' % (sig, expires) + + # make request will setup the environ cache for us + req = self._make_request(path + qs, **key_kwargs) + resp = req.get_response(self.tempurl) + self.assertEquals(resp.status_int, 404) # sanity check + + authorize = req.environ['swift.authorize'] + # Requests for other objects happen if, for example, you're + # downloading a large object or creating a large-object manifest. + oo_resp = authorize(req_other_object) + self.assertEqual(oo_resp, None) + oc_resp = authorize(req_other_container) + self.assertEqual(oc_resp, None) + oa_resp = authorize(req_other_account) + self.assertEqual(oa_resp.status_int, 401) + + # A request with the container key limits the pre-authed scope to + # the container level; a different container in the same account is + # out of scope and thus forbidden. + hmac_body = '%s\n%s\n%s' % (method, expires, path) + sig = hmac.new('container-key', hmac_body, sha1).hexdigest() + qs = '?temp_url_sig=%s&temp_url_expires=%s' % (sig, expires) + + req = self._make_request(path + qs, **key_kwargs) + resp = req.get_response(self.tempurl) + self.assertEquals(resp.status_int, 404) # sanity check + + authorize = req.environ['swift.authorize'] + oo_resp = authorize(req_other_object) + self.assertEqual(oo_resp, None) + oc_resp = authorize(req_other_container) + self.assertEqual(oc_resp.status_int, 401) + oa_resp = authorize(req_other_account) + self.assertEqual(oa_resp.status_int, 401) + + # If account and container share a key (users set these, so this can + # happen by accident, stupidity, *or* malice!), limit the scope to + # account level. This prevents someone from shrinking the scope of + # account-level tempurls by reusing one of the account's keys on a + # container. + hmac_body = '%s\n%s\n%s' % (method, expires, path) + sig = hmac.new('shared-key', hmac_body, sha1).hexdigest() + qs = '?temp_url_sig=%s&temp_url_expires=%s' % (sig, expires) + + req = self._make_request(path + qs, **key_kwargs) + resp = req.get_response(self.tempurl) + self.assertEquals(resp.status_int, 404) # sanity check + + authorize = req.environ['swift.authorize'] + oo_resp = authorize(req_other_object) + self.assertEqual(oo_resp, None) + oc_resp = authorize(req_other_container) + self.assertEqual(oc_resp, None) + oa_resp = authorize(req_other_account) + self.assertEqual(oa_resp.status_int, 401) + def test_changed_path_invalid(self): method = 'GET' expires = int(time() + 86400) @@ -828,35 +914,38 @@ class TestTempURL(unittest.TestCase): self.assertTrue('x-conflict-header-test' in resp.headers) self.assertEqual(resp.headers['x-conflict-header-test'], 'value') - def test_get_account(self): - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'HEAD', 'PATH_INFO': '/v1/a/c/o'}), 'a') - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/o'}), 'a') - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/v1/a/c/o'}), 'a') - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'POST', 'PATH_INFO': '/v1/a/c/o'}), 'a') - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'DELETE', 'PATH_INFO': '/v1/a/c/o'}), 'a') - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'UNKNOWN', 'PATH_INFO': '/v1/a/c/o'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c//////'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c///o///'}), 'a') - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a//o'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1//c/o'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '//a/c/o'}), None) - self.assertEquals(self.tempurl._get_account({ - 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v2/a/c/o'}), None) + def test_get_account_and_container(self): + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'HEAD', 'PATH_INFO': '/v1/a/c/o'}), ('a', 'c')) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/o'}), ('a', 'c')) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/v1/a/c/o'}), ('a', 'c')) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'POST', 'PATH_INFO': '/v1/a/c/o'}), ('a', 'c')) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'DELETE', 'PATH_INFO': '/v1/a/c/o'}), ('a', 'c')) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'UNKNOWN', 'PATH_INFO': '/v1/a/c/o'}), + (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/'}), (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c//////'}), + (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c///o///'}), + ('a', 'c')) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c'}), (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a//o'}), (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1//c/o'}), (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '//a/c/o'}), (None, None)) + self.assertEquals(self.tempurl._get_account_and_container({ + 'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v2/a/c/o'}), (None, None)) def test_get_temp_url_info(self): s = 'f5d5051bddf5df7e27c628818738334f' @@ -908,13 +997,13 @@ class TestTempURL(unittest.TestCase): self.assertEquals( self.tempurl._get_hmacs( {'REQUEST_METHOD': 'GET', 'PATH_INFO': '/v1/a/c/o'}, - 1, ['abc']), - ['026d7f7cc25256450423c7ad03fc9f5ffc1dab6d']) + 1, [('abc', 'account')]), + [('026d7f7cc25256450423c7ad03fc9f5ffc1dab6d', 'account')]) self.assertEquals( self.tempurl._get_hmacs( {'REQUEST_METHOD': 'HEAD', 'PATH_INFO': '/v1/a/c/o'}, - 1, ['abc'], request_method='GET'), - ['026d7f7cc25256450423c7ad03fc9f5ffc1dab6d']) + 1, [('abc', 'account')], request_method='GET'), + [('026d7f7cc25256450423c7ad03fc9f5ffc1dab6d', 'account')]) def test_invalid(self): From 893f30c61d280804e417790dd34ba7bc3fb4f6fc Mon Sep 17 00:00:00 2001 From: paul luse Date: Wed, 12 Aug 2015 13:32:50 -0700 Subject: [PATCH 65/70] EC GET path: require fragments to be of same set And if they are not, exhaust the node iter to go get more. The problem without this implementation is a simple overwrite where a GET follows before the handoff has put the newer obj back on the 'alive again' node such that the proxy gets n-1 fragments of the newest set and 1 of the older. This patch bucketizes the fragments by etag and if it doesn't have enough continues to exhaust the node iterator until it has a large enough matching set. Change-Id: Ib710a133ce1be278365067fd0d6610d80f1f7372 Co-Authored-By: Clay Gerrard Co-Authored-By: Alistair Coles Closes-Bug: 1457691 --- swift/common/utils.py | 9 +- swift/proxy/controllers/base.py | 94 ++++ swift/proxy/controllers/obj.py | 56 ++- swift/proxy/server.py | 58 +-- test/probe/common.py | 45 ++ test/probe/test_object_handoff.py | 90 +++- test/probe/test_reconstructor_revert.py | 28 +- test/unit/common/test_utils.py | 16 + test/unit/proxy/controllers/test_obj.py | 548 ++++++++++++++++++++++++ 9 files changed, 828 insertions(+), 116 deletions(-) diff --git a/swift/common/utils.py b/swift/common/utils.py index 093db76ac8..8923246c54 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -2268,6 +2268,7 @@ class GreenAsyncPile(object): size = size_or_pool self._responses = eventlet.queue.LightQueue(size) self._inflight = 0 + self._pending = 0 def _run_func(self, func, args, kwargs): try: @@ -2279,6 +2280,7 @@ class GreenAsyncPile(object): """ Spawn a job in a green thread on the pile. """ + self._pending += 1 self._inflight += 1 self._pool.spawn(self._run_func, func, args, kwargs) @@ -2303,12 +2305,13 @@ class GreenAsyncPile(object): def next(self): try: - return self._responses.get_nowait() + rv = self._responses.get_nowait() except Empty: if self._inflight == 0: raise StopIteration() - else: - return self._responses.get() + rv = self._responses.get() + self._pending -= 1 + return rv class ModifiedParseResult(ParseResult): diff --git a/swift/proxy/controllers/base.py b/swift/proxy/controllers/base.py index 70940f9c16..65d9acdc0f 100644 --- a/swift/proxy/controllers/base.py +++ b/swift/proxy/controllers/base.py @@ -28,6 +28,7 @@ import os import time import functools import inspect +import itertools import operator from sys import exc_info from swift import gettext_ as _ @@ -1125,6 +1126,99 @@ class GetOrHeadHandler(ResumingGetter): return res +class NodeIter(object): + """ + Yields nodes for a ring partition, skipping over error + limited nodes and stopping at the configurable number of nodes. If a + node yielded subsequently gets error limited, an extra node will be + yielded to take its place. + + Note that if you're going to iterate over this concurrently from + multiple greenthreads, you'll want to use a + swift.common.utils.GreenthreadSafeIterator to serialize access. + Otherwise, you may get ValueErrors from concurrent access. (You also + may not, depending on how logging is configured, the vagaries of + socket IO and eventlet, and the phase of the moon.) + + :param app: a proxy app + :param ring: ring to get yield nodes from + :param partition: ring partition to yield nodes for + :param node_iter: optional iterable of nodes to try. Useful if you + want to filter or reorder the nodes. + """ + + def __init__(self, app, ring, partition, node_iter=None): + self.app = app + self.ring = ring + self.partition = partition + + part_nodes = ring.get_part_nodes(partition) + if node_iter is None: + node_iter = itertools.chain( + part_nodes, ring.get_more_nodes(partition)) + num_primary_nodes = len(part_nodes) + self.nodes_left = self.app.request_node_count(num_primary_nodes) + self.expected_handoffs = self.nodes_left - num_primary_nodes + + # Use of list() here forcibly yanks the first N nodes (the primary + # nodes) from node_iter, so the rest of its values are handoffs. + self.primary_nodes = self.app.sort_nodes( + list(itertools.islice(node_iter, num_primary_nodes))) + self.handoff_iter = node_iter + + def __iter__(self): + self._node_iter = self._node_gen() + return self + + def log_handoffs(self, handoffs): + """ + Log handoff requests if handoff logging is enabled and the + handoff was not expected. + + We only log handoffs when we've pushed the handoff count further + than we would normally have expected under normal circumstances, + that is (request_node_count - num_primaries), when handoffs goes + higher than that it means one of the primaries must have been + skipped because of error limiting before we consumed all of our + nodes_left. + """ + if not self.app.log_handoffs: + return + extra_handoffs = handoffs - self.expected_handoffs + if extra_handoffs > 0: + self.app.logger.increment('handoff_count') + self.app.logger.warning( + 'Handoff requested (%d)' % handoffs) + if (extra_handoffs == len(self.primary_nodes)): + # all the primaries were skipped, and handoffs didn't help + self.app.logger.increment('handoff_all_count') + + def _node_gen(self): + for node in self.primary_nodes: + if not self.app.error_limited(node): + yield node + if not self.app.error_limited(node): + self.nodes_left -= 1 + if self.nodes_left <= 0: + return + handoffs = 0 + for node in self.handoff_iter: + if not self.app.error_limited(node): + handoffs += 1 + self.log_handoffs(handoffs) + yield node + if not self.app.error_limited(node): + self.nodes_left -= 1 + if self.nodes_left <= 0: + return + + def next(self): + return next(self._node_iter) + + def __next__(self): + return self.next() + + class Controller(object): """Base WSGI controller class for the proxy""" server_type = 'Base' diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index c82c83150d..22a4a4eb30 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -1951,44 +1951,43 @@ class ECObjectController(BaseObjectController): orig_range = req.range range_specs = self._convert_range(req, policy) - node_iter = GreenthreadSafeIterator(node_iter) - num_gets = policy.ec_ndata - with ContextPool(num_gets) as pool: + safe_iter = GreenthreadSafeIterator(node_iter) + with ContextPool(policy.ec_ndata) as pool: pile = GreenAsyncPile(pool) - for _junk in range(num_gets): + for _junk in range(policy.ec_ndata): pile.spawn(self._fragment_GET_request, - req, node_iter, partition, + req, safe_iter, partition, policy) - gets = list(pile) - good_gets = [] bad_gets = [] - for get, parts_iter in gets: + etag_buckets = collections.defaultdict(list) + best_etag = None + for get, parts_iter in pile: if is_success(get.last_status): - good_gets.append((get, parts_iter)) + etag = HeaderKeyDict( + get.last_headers)['X-Object-Sysmeta-Ec-Etag'] + etag_buckets[etag].append((get, parts_iter)) + if etag != best_etag and ( + len(etag_buckets[etag]) > + len(etag_buckets[best_etag])): + best_etag = etag else: bad_gets.append((get, parts_iter)) + matching_response_count = max( + len(etag_buckets[best_etag]), len(bad_gets)) + if (policy.ec_ndata - matching_response_count > + pile._pending) and node_iter.nodes_left > 0: + # we need more matching responses to reach ec_ndata + # than we have pending gets, as long as we still have + # nodes in node_iter we can spawn another + pile.spawn(self._fragment_GET_request, req, + safe_iter, partition, policy) req.range = orig_range - if len(good_gets) == num_gets: - # If these aren't all for the same object, then error out so - # at least the client doesn't get garbage. We can do a lot - # better here with more work, but this'll work for now. - found_obj_etags = set( - HeaderKeyDict( - getter.last_headers)['X-Object-Sysmeta-Ec-Etag'] - for getter, _junk in good_gets) - if len(found_obj_etags) > 1: - self.app.logger.debug( - "Returning 503 for %s; found too many etags (%s)", - req.path, - ", ".join(found_obj_etags)) - return HTTPServiceUnavailable(request=req) - - # we found enough pieces to decode the object, so now let's - # decode the object + if len(etag_buckets[best_etag]) >= policy.ec_ndata: + # headers can come from any of the getters resp_headers = HeaderKeyDict( - good_gets[0][0].source_headers[-1]) + etag_buckets[best_etag][0][0].source_headers[-1]) resp_headers.pop('Content-Range', None) eccl = resp_headers.get('X-Object-Sysmeta-Ec-Content-Length') obj_length = int(eccl) if eccl is not None else None @@ -1996,11 +1995,10 @@ class ECObjectController(BaseObjectController): # This is only true if we didn't get a 206 response, but # that's the only time this is used anyway. fa_length = int(resp_headers['Content-Length']) - app_iter = ECAppIter( req.swift_entity_path, policy, - [iterator for getter, iterator in good_gets], + [iterator for getter, iterator in etag_buckets[best_etag]], range_specs, fa_length, obj_length, self.app.logger) resp = Response( diff --git a/swift/proxy/server.py b/swift/proxy/server.py index 0401691b6e..b49181dc37 100644 --- a/swift/proxy/server.py +++ b/swift/proxy/server.py @@ -19,7 +19,6 @@ import socket from swift import gettext_ as _ from random import shuffle from time import time -import itertools import functools import sys @@ -36,7 +35,7 @@ from swift.common.utils import cache_from_env, get_logger, \ from swift.common.constraints import check_utf8, valid_api_version from swift.proxy.controllers import AccountController, ContainerController, \ ObjectControllerRouter, InfoController -from swift.proxy.controllers.base import get_container_info +from swift.proxy.controllers.base import get_container_info, NodeIter from swift.common.swob import HTTPBadRequest, HTTPForbidden, \ HTTPMethodNotAllowed, HTTPNotFound, HTTPPreconditionFailed, \ HTTPServerError, HTTPException, Request, HTTPServiceUnavailable @@ -507,60 +506,7 @@ class Application(object): 'port': node['port'], 'device': node['device']}) def iter_nodes(self, ring, partition, node_iter=None): - """ - Yields nodes for a ring partition, skipping over error - limited nodes and stopping at the configurable number of nodes. If a - node yielded subsequently gets error limited, an extra node will be - yielded to take its place. - - Note that if you're going to iterate over this concurrently from - multiple greenthreads, you'll want to use a - swift.common.utils.GreenthreadSafeIterator to serialize access. - Otherwise, you may get ValueErrors from concurrent access. (You also - may not, depending on how logging is configured, the vagaries of - socket IO and eventlet, and the phase of the moon.) - - :param ring: ring to get yield nodes from - :param partition: ring partition to yield nodes for - :param node_iter: optional iterable of nodes to try. Useful if you - want to filter or reorder the nodes. - """ - part_nodes = ring.get_part_nodes(partition) - if node_iter is None: - node_iter = itertools.chain(part_nodes, - ring.get_more_nodes(partition)) - num_primary_nodes = len(part_nodes) - - # Use of list() here forcibly yanks the first N nodes (the primary - # nodes) from node_iter, so the rest of its values are handoffs. - primary_nodes = self.sort_nodes( - list(itertools.islice(node_iter, num_primary_nodes))) - handoff_nodes = node_iter - nodes_left = self.request_node_count(len(primary_nodes)) - - log_handoffs_threshold = nodes_left - len(primary_nodes) - for node in primary_nodes: - if not self.error_limited(node): - yield node - if not self.error_limited(node): - nodes_left -= 1 - if nodes_left <= 0: - return - handoffs = 0 - for node in handoff_nodes: - if not self.error_limited(node): - handoffs += 1 - if self.log_handoffs and handoffs > log_handoffs_threshold: - self.logger.increment('handoff_count') - self.logger.warning( - 'Handoff requested (%d)' % handoffs) - if handoffs - log_handoffs_threshold == len(primary_nodes): - self.logger.increment('handoff_all_count') - yield node - if not self.error_limited(node): - nodes_left -= 1 - if nodes_left <= 0: - return + return NodeIter(self, ring, partition, node_iter=node_iter) def exception_occurred(self, node, typ, additional_info, **kwargs): diff --git a/test/probe/common.py b/test/probe/common.py index 1479ba9ddc..45a907444d 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -20,6 +20,8 @@ import sys from time import sleep, time from collections import defaultdict import unittest +from hashlib import md5 +from uuid import uuid4 from nose import SkipTest from six.moves.http_client import HTTPConnection @@ -262,6 +264,49 @@ def resetswift(): Manager(['all']).stop() +class Body(object): + + def __init__(self, total=3.5 * 2 ** 20): + self.length = total + self.hasher = md5() + self.read_amount = 0 + self.chunk = uuid4().hex * 2 ** 10 + self.buff = '' + + @property + def etag(self): + return self.hasher.hexdigest() + + def __len__(self): + return self.length + + def read(self, amount): + if len(self.buff) < amount: + try: + self.buff += next(self) + except StopIteration: + pass + rv, self.buff = self.buff[:amount], self.buff[amount:] + return rv + + def __iter__(self): + return self + + def next(self): + if self.buff: + rv, self.buff = self.buff, '' + return rv + if self.read_amount >= self.length: + raise StopIteration() + rv = self.chunk[:int(self.length - self.read_amount)] + self.read_amount += len(rv) + self.hasher.update(rv) + return rv + + def __next__(self): + return next(self) + + class ProbeTest(unittest.TestCase): """ Don't instantiate this directly, use a child class instead. diff --git a/test/probe/test_object_handoff.py b/test/probe/test_object_handoff.py index c7df4b9e07..f3b02c53cd 100755 --- a/test/probe/test_object_handoff.py +++ b/test/probe/test_object_handoff.py @@ -16,13 +16,17 @@ from unittest import main from uuid import uuid4 +import random +from hashlib import md5 +from collections import defaultdict from swiftclient import client from swift.common import direct_client from swift.common.exceptions import ClientException from swift.common.manager import Manager -from test.probe.common import kill_server, ReplProbeTest, start_server +from test.probe.common import (kill_server, start_server, ReplProbeTest, + ECProbeTest, Body) class TestObjectHandoff(ReplProbeTest): @@ -211,5 +215,89 @@ class TestObjectHandoff(ReplProbeTest): self.fail("Expected ClientException but didn't get it") +class TestECObjectHandoffOverwrite(ECProbeTest): + + def get_object(self, container_name, object_name): + headers, body = client.get_object(self.url, self.token, + container_name, + object_name, + resp_chunk_size=64 * 2 ** 10) + resp_checksum = md5() + for chunk in body: + resp_checksum.update(chunk) + return resp_checksum.hexdigest() + + def test_ec_handoff_overwrite(self): + container_name = 'container-%s' % uuid4() + object_name = 'object-%s' % uuid4() + + # create EC container + headers = {'X-Storage-Policy': self.policy.name} + client.put_container(self.url, self.token, container_name, + headers=headers) + + # PUT object + old_contents = Body() + client.put_object(self.url, self.token, container_name, + object_name, contents=old_contents) + + # get our node lists + opart, onodes = self.object_ring.get_nodes( + self.account, container_name, object_name) + + # shutdown one of the primary data nodes + failed_primary = random.choice(onodes) + failed_primary_device_path = self.device_dir('object', failed_primary) + self.kill_drive(failed_primary_device_path) + + # overwrite our object with some new data + new_contents = Body() + client.put_object(self.url, self.token, container_name, + object_name, contents=new_contents) + self.assertNotEqual(new_contents.etag, old_contents.etag) + + # restore failed primary device + self.revive_drive(failed_primary_device_path) + + # sanity - failed node has old contents + req_headers = {'X-Backend-Storage-Policy-Index': int(self.policy)} + headers = direct_client.direct_head_object( + failed_primary, opart, self.account, container_name, + object_name, headers=req_headers) + self.assertEqual(headers['X-Object-Sysmeta-EC-Etag'], + old_contents.etag) + + # we have 1 primary with wrong old etag, and we should have 5 with + # new etag plus a handoff with the new etag, so killing 2 other + # primaries forces proxy to try to GET from all primaries plus handoff. + other_nodes = [n for n in onodes if n != failed_primary] + random.shuffle(other_nodes) + for node in other_nodes[:2]: + self.kill_drive(self.device_dir('object', node)) + + # sanity, after taking out two primaries we should be down to + # only four primaries, one of which has the old etag - but we + # also have a handoff with the new etag out there + found_frags = defaultdict(int) + req_headers = {'X-Backend-Storage-Policy-Index': int(self.policy)} + for node in onodes + list(self.object_ring.get_more_nodes(opart)): + try: + headers = direct_client.direct_head_object( + node, opart, self.account, container_name, + object_name, headers=req_headers) + except Exception: + continue + found_frags[headers['X-Object-Sysmeta-EC-Etag']] += 1 + self.assertEqual(found_frags, { + new_contents.etag: 4, # this should be enough to rebuild! + old_contents.etag: 1, + }) + + # clear node error limiting + Manager(['proxy']).restart() + + resp_etag = self.get_object(container_name, object_name) + self.assertEqual(resp_etag, new_contents.etag) + if __name__ == '__main__': main() diff --git a/test/probe/test_reconstructor_revert.py b/test/probe/test_reconstructor_revert.py index 5e10c1337e..df4dc8beac 100755 --- a/test/probe/test_reconstructor_revert.py +++ b/test/probe/test_reconstructor_revert.py @@ -21,7 +21,7 @@ import random import shutil from collections import defaultdict -from test.probe.common import ECProbeTest +from test.probe.common import ECProbeTest, Body from swift.common import direct_client from swift.common.storage_policy import EC_POLICY @@ -31,32 +31,6 @@ from swift.obj import reconstructor from swiftclient import client -class Body(object): - - def __init__(self, total=3.5 * 2 ** 20): - self.total = total - self.hasher = md5() - self.size = 0 - self.chunk = 'test' * 16 * 2 ** 10 - - @property - def etag(self): - return self.hasher.hexdigest() - - def __iter__(self): - return self - - def next(self): - if self.size > self.total: - raise StopIteration() - self.size += len(self.chunk) - self.hasher.update(self.chunk) - return self.chunk - - def __next__(self): - return next(self) - - class TestReconstructorRevert(ECProbeTest): def setUp(self): diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 5402ab1de8..653d939cce 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -4530,6 +4530,22 @@ class TestGreenAsyncPile(unittest.TestCase): self.assertEqual(pile.waitall(0.5), [0.1, 0.1]) self.assertEqual(completed[0], 2) + def test_pending(self): + pile = utils.GreenAsyncPile(3) + self.assertEqual(0, pile._pending) + for repeats in range(2): + # repeat to verify that pending will go again up after going down + for i in range(4): + pile.spawn(lambda: i) + self.assertEqual(4, pile._pending) + for i in range(3, -1, -1): + pile.next() + self.assertEqual(i, pile._pending) + # sanity check - the pile is empty + self.assertRaises(StopIteration, pile.next) + # pending remains 0 + self.assertEqual(0, pile._pending) + class TestLRUCache(unittest.TestCase): diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index ea4b165c70..af695ef23f 100755 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -26,6 +26,7 @@ from hashlib import md5 import mock from eventlet import Timeout +from six import BytesIO from six.moves import range import swift @@ -913,6 +914,76 @@ class TestObjControllerLegacyCache(TestReplicatedObjController): self.assertEqual(resp.status_int, 503) +class StubResponse(object): + + def __init__(self, status, body='', headers=None): + self.status = status + self.body = body + self.readable = BytesIO(body) + self.headers = swob.HeaderKeyDict(headers) + fake_reason = ('Fake', 'This response is a lie.') + self.reason = swob.RESPONSE_REASONS.get(status, fake_reason)[0] + + def getheader(self, header_name, default=None): + return self.headers.get(header_name, default) + + def getheaders(self): + if 'Content-Length' not in self.headers: + self.headers['Content-Length'] = len(self.body) + return self.headers.items() + + def read(self, amt=0): + return self.readable.read(amt) + + +@contextmanager +def capture_http_requests(get_response): + + class FakeConn(object): + + def __init__(self, req): + self.req = req + self.resp = None + + def getresponse(self): + self.resp = get_response(self.req) + return self.resp + + class ConnectionLog(object): + + def __init__(self): + self.connections = [] + + def __len__(self): + return len(self.connections) + + def __getitem__(self, i): + return self.connections[i] + + def __iter__(self): + return iter(self.connections) + + def __call__(self, ip, port, method, path, headers, qs, ssl): + req = { + 'ip': ip, + 'port': port, + 'method': method, + 'path': path, + 'headers': headers, + 'qs': qs, + 'ssl': ssl, + } + conn = FakeConn(req) + self.connections.append(conn) + return conn + + fake_conn = ConnectionLog() + + with mock.patch('swift.common.bufferedhttp.http_connect_raw', + new=fake_conn): + yield fake_conn + + @patch_policies(with_ec_default=True) class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): container_info = { @@ -1344,6 +1415,483 @@ class TestECObjController(BaseObjectControllerMixin, unittest.TestCase): for fragments in zip(*fragment_payloads)] return ec_archive_bodies + def _make_ec_object_stub(self, test_body=None, policy=None): + policy = policy or self.policy + segment_size = policy.ec_segment_size + test_body = test_body or ( + 'test' * segment_size)[:-random.randint(0, 1000)] + etag = md5(test_body).hexdigest() + ec_archive_bodies = self._make_ec_archive_bodies(test_body, + policy=policy) + return { + 'body': test_body, + 'etag': etag, + 'frags': ec_archive_bodies, + } + + def _fake_ec_node_response(self, node_frags): + """ + Given a list of entries for each node in ring order, where the + entries are a dict (or list of dicts) which describe all of the + fragment(s); create a function suitable for use with + capture_http_requests that will accept a req object and return a + response that will suitably fake the behavior of an object + server who had the given fragments on disk at the time. + """ + node_map = {} + all_nodes = [] + + def _build_node_map(req): + node_key = lambda n: (n['ip'], n['port']) + part = utils.split_path(req['path'], 5, 5, True)[1] + policy = POLICIES[int( + req['headers']['X-Backend-Storage-Policy-Index'])] + all_nodes.extend(policy.object_ring.get_part_nodes(part)) + all_nodes.extend(policy.object_ring.get_more_nodes(part)) + for i, node in enumerate(all_nodes): + node_map[node_key(node)] = i + + # normalize node_frags to a list of fragments for each node even + # if there's only one fragment in the dataset provided. + for i, frags in enumerate(node_frags): + if isinstance(frags, dict): + node_frags[i] = [frags] + + def get_response(req): + if not node_map: + _build_node_map(req) + + try: + node_index = node_map[(req['ip'], req['port'])] + except KeyError: + raise Exception("Couldn't find node %s:%s in %r" % ( + req['ip'], req['port'], all_nodes)) + + try: + frags = node_frags[node_index] + except KeyError: + raise Exception('Found node %r:%r at index %s - ' + 'but only got %s stub response nodes' % ( + req['ip'], req['port'], node_index, + len(node_frags))) + + try: + stub = random.choice(frags) + except IndexError: + stub = None + if stub: + body = stub['obj']['frags'][stub['frag']] + headers = { + 'X-Object-Sysmeta-Ec-Content-Length': len( + stub['obj']['body']), + 'X-Object-Sysmeta-Ec-Etag': stub['obj']['etag'], + 'X-Object-Sysmeta-Ec-Frag-Index': stub['frag'], + } + resp = StubResponse(200, body, headers) + else: + resp = StubResponse(404) + return resp + + return get_response + + def test_GET_with_frags_swapped_around(self): + segment_size = self.policy.ec_segment_size + test_data = ('test' * segment_size)[:-657] + etag = md5(test_data).hexdigest() + ec_archive_bodies = self._make_ec_archive_bodies(test_data) + + _part, primary_nodes = self.obj_ring.get_nodes('a', 'c', 'o') + + node_key = lambda n: (n['ip'], n['port']) + response_map = { + node_key(n): StubResponse(200, ec_archive_bodies[i], { + 'X-Object-Sysmeta-Ec-Content-Length': len(test_data), + 'X-Object-Sysmeta-Ec-Etag': etag, + 'X-Object-Sysmeta-Ec-Frag-Index': i, + }) for i, n in enumerate(primary_nodes) + } + + # swap a parity response into a data node + data_node = random.choice(primary_nodes[:self.policy.ec_ndata]) + parity_node = random.choice(primary_nodes[self.policy.ec_ndata:]) + (response_map[node_key(data_node)], + response_map[node_key(parity_node)]) = \ + (response_map[node_key(parity_node)], + response_map[node_key(data_node)]) + + def get_response(req): + req_key = (req['ip'], req['port']) + return response_map.pop(req_key) + + req = swob.Request.blank('/v1/a/c/o') + with capture_http_requests(get_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 200) + self.assertEqual(len(log), self.policy.ec_ndata) + self.assertEqual(len(response_map), + len(primary_nodes) - self.policy.ec_ndata) + + def test_GET_with_single_missed_overwrite_does_not_need_handoff(self): + obj1 = self._make_ec_object_stub() + obj2 = self._make_ec_object_stub() + + node_frags = [ + {'obj': obj2, 'frag': 0}, + {'obj': obj2, 'frag': 1}, + {'obj': obj1, 'frag': 2}, # missed over write + {'obj': obj2, 'frag': 3}, + {'obj': obj2, 'frag': 4}, + {'obj': obj2, 'frag': 5}, + {'obj': obj2, 'frag': 6}, + {'obj': obj2, 'frag': 7}, + {'obj': obj2, 'frag': 8}, + {'obj': obj2, 'frag': 9}, + {'obj': obj2, 'frag': 10}, # parity + {'obj': obj2, 'frag': 11}, # parity + {'obj': obj2, 'frag': 12}, # parity + {'obj': obj2, 'frag': 13}, # parity + # {'obj': obj2, 'frag': 2}, # handoff (not used in this test) + ] + + fake_response = self._fake_ec_node_response(node_frags) + + req = swob.Request.blank('/v1/a/c/o') + with capture_http_requests(fake_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.headers['etag'], obj2['etag']) + self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + + collected_responses = defaultdict(set) + for conn in log: + etag = conn.resp.headers['X-Object-Sysmeta-Ec-Etag'] + index = conn.resp.headers['X-Object-Sysmeta-Ec-Frag-Index'] + collected_responses[etag].add(index) + + # because the primary nodes are shuffled, it's possible the proxy + # didn't even notice the missed overwrite frag - but it might have + self.assertLessEqual(len(log), self.policy.ec_ndata + 1) + self.assertLessEqual(len(collected_responses), 2) + + # ... regardless we should never need to fetch more than ec_ndata + # frags for any given etag + for etag, frags in collected_responses.items(): + self.assertTrue(len(frags) <= self.policy.ec_ndata, + 'collected %s frags for etag %s' % ( + len(frags), etag)) + + def test_GET_with_many_missed_overwrite_will_need_handoff(self): + obj1 = self._make_ec_object_stub() + obj2 = self._make_ec_object_stub() + + node_frags = [ + {'obj': obj2, 'frag': 0}, + {'obj': obj2, 'frag': 1}, + {'obj': obj1, 'frag': 2}, # missed + {'obj': obj2, 'frag': 3}, + {'obj': obj2, 'frag': 4}, + {'obj': obj2, 'frag': 5}, + {'obj': obj1, 'frag': 6}, # missed + {'obj': obj2, 'frag': 7}, + {'obj': obj2, 'frag': 8}, + {'obj': obj1, 'frag': 9}, # missed + {'obj': obj1, 'frag': 10}, # missed + {'obj': obj1, 'frag': 11}, # missed + {'obj': obj2, 'frag': 12}, + {'obj': obj2, 'frag': 13}, + {'obj': obj2, 'frag': 6}, # handoff + ] + + fake_response = self._fake_ec_node_response(node_frags) + + req = swob.Request.blank('/v1/a/c/o') + with capture_http_requests(fake_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.headers['etag'], obj2['etag']) + self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + + collected_responses = defaultdict(set) + for conn in log: + etag = conn.resp.headers['X-Object-Sysmeta-Ec-Etag'] + index = conn.resp.headers['X-Object-Sysmeta-Ec-Frag-Index'] + collected_responses[etag].add(index) + + # there's not enough of the obj2 etag on the primaries, we would + # have collected responses for both etags, and would have made + # one more request to the handoff node + self.assertEqual(len(log), self.replicas() + 1) + self.assertEqual(len(collected_responses), 2) + + # ... regardless we should never need to fetch more than ec_ndata + # frags for any given etag + for etag, frags in collected_responses.items(): + self.assertTrue(len(frags) <= self.policy.ec_ndata, + 'collected %s frags for etag %s' % ( + len(frags), etag)) + + def test_GET_with_missing_and_mixed_frags_will_dig_deep_but_succeed(self): + obj1 = self._make_ec_object_stub() + obj2 = self._make_ec_object_stub() + + node_frags = [ + {'obj': obj1, 'frag': 0}, + {'obj': obj2, 'frag': 0}, + {}, + {'obj': obj1, 'frag': 1}, + {'obj': obj2, 'frag': 1}, + {}, + {'obj': obj1, 'frag': 2}, + {'obj': obj2, 'frag': 2}, + {}, + {'obj': obj1, 'frag': 3}, + {'obj': obj2, 'frag': 3}, + {}, + {'obj': obj1, 'frag': 4}, + {'obj': obj2, 'frag': 4}, + {}, + {'obj': obj1, 'frag': 5}, + {'obj': obj2, 'frag': 5}, + {}, + {'obj': obj1, 'frag': 6}, + {'obj': obj2, 'frag': 6}, + {}, + {'obj': obj1, 'frag': 7}, + {'obj': obj2, 'frag': 7}, + {}, + {'obj': obj1, 'frag': 8}, + {'obj': obj2, 'frag': 8}, + {}, + {'obj': obj2, 'frag': 9}, + ] + + fake_response = self._fake_ec_node_response(node_frags) + + req = swob.Request.blank('/v1/a/c/o') + with capture_http_requests(fake_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.headers['etag'], obj2['etag']) + self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + + collected_responses = defaultdict(set) + for conn in log: + etag = conn.resp.headers['X-Object-Sysmeta-Ec-Etag'] + index = conn.resp.headers['X-Object-Sysmeta-Ec-Frag-Index'] + collected_responses[etag].add(index) + + # we go exactly as long as we have to, finding two different + # etags and some 404's (i.e. collected_responses[None]) + self.assertEqual(len(log), len(node_frags)) + self.assertEqual(len(collected_responses), 3) + + # ... regardless we should never need to fetch more than ec_ndata + # frags for any given etag + for etag, frags in collected_responses.items(): + self.assertTrue(len(frags) <= self.policy.ec_ndata, + 'collected %s frags for etag %s' % ( + len(frags), etag)) + + def test_GET_with_missing_and_mixed_frags_will_dig_deep_but_stop(self): + obj1 = self._make_ec_object_stub() + obj2 = self._make_ec_object_stub() + + node_frags = [ + {'obj': obj1, 'frag': 0}, + {'obj': obj2, 'frag': 0}, + {}, + {'obj': obj1, 'frag': 1}, + {'obj': obj2, 'frag': 1}, + {}, + {'obj': obj1, 'frag': 2}, + {'obj': obj2, 'frag': 2}, + {}, + {'obj': obj1, 'frag': 3}, + {'obj': obj2, 'frag': 3}, + {}, + {'obj': obj1, 'frag': 4}, + {'obj': obj2, 'frag': 4}, + {}, + {'obj': obj1, 'frag': 5}, + {'obj': obj2, 'frag': 5}, + {}, + {'obj': obj1, 'frag': 6}, + {'obj': obj2, 'frag': 6}, + {}, + {'obj': obj1, 'frag': 7}, + {'obj': obj2, 'frag': 7}, + {}, + {'obj': obj1, 'frag': 8}, + {'obj': obj2, 'frag': 8}, + {}, + {}, + ] + + fake_response = self._fake_ec_node_response(node_frags) + + req = swob.Request.blank('/v1/a/c/o') + with capture_http_requests(fake_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 404) + + collected_responses = defaultdict(set) + for conn in log: + etag = conn.resp.headers['X-Object-Sysmeta-Ec-Etag'] + index = conn.resp.headers['X-Object-Sysmeta-Ec-Frag-Index'] + collected_responses[etag].add(index) + + # default node_iter will exhaust at 2 * replicas + self.assertEqual(len(log), 2 * self.replicas()) + self.assertEqual(len(collected_responses), 3) + + # ... regardless we should never need to fetch more than ec_ndata + # frags for any given etag + for etag, frags in collected_responses.items(): + self.assertTrue(len(frags) <= self.policy.ec_ndata, + 'collected %s frags for etag %s' % ( + len(frags), etag)) + + def test_GET_mixed_success_with_range(self): + fragment_size = self.policy.fragment_size + + ec_stub = self._make_ec_object_stub() + frag_archives = ec_stub['frags'] + frag_archive_size = len(ec_stub['frags'][0]) + + headers = { + 'Content-Type': 'text/plain', + 'Content-Length': fragment_size, + 'Content-Range': 'bytes 0-%s/%s' % (fragment_size - 1, + frag_archive_size), + 'X-Object-Sysmeta-Ec-Content-Length': len(ec_stub['body']), + 'X-Object-Sysmeta-Ec-Etag': ec_stub['etag'], + } + responses = [ + StubResponse(206, frag_archives[0][:fragment_size], headers), + StubResponse(206, frag_archives[1][:fragment_size], headers), + StubResponse(206, frag_archives[2][:fragment_size], headers), + StubResponse(206, frag_archives[3][:fragment_size], headers), + StubResponse(206, frag_archives[4][:fragment_size], headers), + # data nodes with old frag + StubResponse(416), + StubResponse(416), + StubResponse(206, frag_archives[7][:fragment_size], headers), + StubResponse(206, frag_archives[8][:fragment_size], headers), + StubResponse(206, frag_archives[9][:fragment_size], headers), + # hopefully we ask for two more + StubResponse(206, frag_archives[10][:fragment_size], headers), + StubResponse(206, frag_archives[11][:fragment_size], headers), + ] + + def get_response(req): + return responses.pop(0) if responses else StubResponse(404) + + req = swob.Request.blank('/v1/a/c/o', headers={'Range': 'bytes=0-3'}) + with capture_http_requests(get_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 206) + self.assertEqual(resp.body, 'test') + self.assertEqual(len(log), self.policy.ec_ndata + 2) + + def test_GET_with_range_unsatisfiable_mixed_success(self): + responses = [ + StubResponse(416), + StubResponse(416), + StubResponse(416), + StubResponse(416), + StubResponse(416), + StubResponse(416), + StubResponse(416), + # sneak in bogus extra responses + StubResponse(404), + StubResponse(206), + # and then just "enough" more 416's + StubResponse(416), + StubResponse(416), + StubResponse(416), + ] + + def get_response(req): + return responses.pop(0) if responses else StubResponse(404) + + req = swob.Request.blank('/v1/a/c/o', headers={ + 'Range': 'bytes=%s-' % 100000000000000}) + with capture_http_requests(get_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 416) + # ec_ndata responses that must agree, plus the bogus extras + self.assertEqual(len(log), self.policy.ec_ndata + 2) + + def test_GET_mixed_ranged_responses_success(self): + segment_size = self.policy.ec_segment_size + fragment_size = self.policy.fragment_size + new_data = ('test' * segment_size)[:-492] + new_etag = md5(new_data).hexdigest() + new_archives = self._make_ec_archive_bodies(new_data) + old_data = ('junk' * segment_size)[:-492] + old_etag = md5(old_data).hexdigest() + old_archives = self._make_ec_archive_bodies(old_data) + frag_archive_size = len(new_archives[0]) + + new_headers = { + 'Content-Type': 'text/plain', + 'Content-Length': fragment_size, + 'Content-Range': 'bytes 0-%s/%s' % (fragment_size - 1, + frag_archive_size), + 'X-Object-Sysmeta-Ec-Content-Length': len(new_data), + 'X-Object-Sysmeta-Ec-Etag': new_etag, + } + old_headers = { + 'Content-Type': 'text/plain', + 'Content-Length': fragment_size, + 'Content-Range': 'bytes 0-%s/%s' % (fragment_size - 1, + frag_archive_size), + 'X-Object-Sysmeta-Ec-Content-Length': len(old_data), + 'X-Object-Sysmeta-Ec-Etag': old_etag, + } + # 7 primaries with stale frags, 3 handoffs failed to get new frags + responses = [ + StubResponse(206, old_archives[0][:fragment_size], old_headers), + StubResponse(206, new_archives[1][:fragment_size], new_headers), + StubResponse(206, old_archives[2][:fragment_size], old_headers), + StubResponse(206, new_archives[3][:fragment_size], new_headers), + StubResponse(206, old_archives[4][:fragment_size], old_headers), + StubResponse(206, new_archives[5][:fragment_size], new_headers), + StubResponse(206, old_archives[6][:fragment_size], old_headers), + StubResponse(206, new_archives[7][:fragment_size], new_headers), + StubResponse(206, old_archives[8][:fragment_size], old_headers), + StubResponse(206, new_archives[9][:fragment_size], new_headers), + StubResponse(206, old_archives[10][:fragment_size], old_headers), + StubResponse(206, new_archives[11][:fragment_size], new_headers), + StubResponse(206, old_archives[12][:fragment_size], old_headers), + StubResponse(206, new_archives[13][:fragment_size], new_headers), + StubResponse(206, new_archives[0][:fragment_size], new_headers), + StubResponse(404), + StubResponse(404), + StubResponse(206, new_archives[6][:fragment_size], new_headers), + StubResponse(404), + StubResponse(206, new_archives[10][:fragment_size], new_headers), + StubResponse(206, new_archives[12][:fragment_size], new_headers), + ] + + def get_response(req): + return responses.pop(0) if responses else StubResponse(404) + + req = swob.Request.blank('/v1/a/c/o') + with capture_http_requests(get_response) as log: + resp = req.get_response(self.app) + + self.assertEqual(resp.status_int, 200) + self.assertEqual(resp.body, new_data[:segment_size]) + self.assertEqual(len(log), self.policy.ec_ndata + 10) + def test_GET_mismatched_fragment_archives(self): segment_size = self.policy.ec_segment_size test_data1 = ('test' * segment_size)[:-333] From 993ee4e37af1961adba2047d5aa2eb210e423eb3 Mon Sep 17 00:00:00 2001 From: nakagawamsa Date: Fri, 28 Aug 2015 11:49:43 +0900 Subject: [PATCH 66/70] Remove duplicate X-Backend-Storage-Policy-Index key There is duplicate 'X-Backend-Storage-Policy-Index' dictionary key in unit.obj.test_server.py. One key has fixed policy index value, and another has random value. Unittest should done with random policy index, so remove key which is set fixed value. Change-Id: Ic91fcf44d48297d0feee33c928ca682def9790a3 --- test/unit/obj/test_server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 3a5119369b..937d9f4106 100755 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -3147,7 +3147,6 @@ class TestObjectController(unittest.TestCase): 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-size': '0', 'x-timestamp': utils.Timestamp('12345').internal, - 'X-Backend-Storage-Policy-Index': '37', 'referer': 'PUT http://localhost/sda1/p/a/c/o', 'user-agent': 'object-server %d' % os.getpid(), 'X-Backend-Storage-Policy-Index': int(policy), From 05de1305a903ee4ce9c8c50fde53c552d5b90d51 Mon Sep 17 00:00:00 2001 From: Clay Gerrard Date: Thu, 27 Aug 2015 18:35:09 -0700 Subject: [PATCH 67/70] Make ssync_sender send valid chunked requests The connect method of ssync_sender tells the remote connection that it's going to send a valid HTTP chunked request, but if the remote end needs to respond with an error of any kind sender throws HTTP right out the window, picks up his ball, and closes the socket down hard - much to the surprise of the eventlet.wsgi server who up to this point had been playing along quite nicely with this 'SSYNC' nonsense assuming that everyone here is consenting mature adults. If you're going to make a "Transfer-Encoding: chunked" request have the good decency to finish the job with a proper '0\r\n\r\n'. [1] N.B. It might be possible to handle an error status during the initialize_request phase with some sort of 100-continue support, but honestly it's not entirely clear to me when the server isn't going to close the connection if the client is still expected to send the body [2] - further if the error comes later during missing_check or updates we'll for sure want to send the chunk transfer termination line before we close down the socket and this way we cover both. 1. Really, eventlet.wsgi shouldn't be so blasted brittle about this [3] 2. https://lists.w3.org/Archives/Public/ietf-http-wg/2005AprJun/0007.html 3. https://github.com/eventlet/eventlet/commit/c3ce3eef0b4d0dfdbfb1ec0186d4bb204fb8ecd5 Closes-Bug #1489587 Change-Id: Ic17c6c3075553f8cf6ef6213e62a00282f0d01cf --- swift/obj/ssync_sender.py | 5 ++++- test/unit/obj/test_ssync_receiver.py | 31 +++++++++++++++++++++++++++- test/unit/obj/test_ssync_sender.py | 3 +++ 3 files changed, 37 insertions(+), 2 deletions(-) diff --git a/swift/obj/ssync_sender.py b/swift/obj/ssync_sender.py index c768bbfc82..cf6fcad6a4 100644 --- a/swift/obj/ssync_sender.py +++ b/swift/obj/ssync_sender.py @@ -82,7 +82,6 @@ class Sender(object): set(self.send_list)) can_delete_obj = dict((hash_, self.available_map[hash_]) for hash_ in in_sync_hashes) - self.disconnect() if not self.failures: return True, can_delete_obj else: @@ -103,6 +102,8 @@ class Sender(object): self.node.get('replication_ip'), self.node.get('replication_port'), self.node.get('device'), self.job.get('partition')) + finally: + self.disconnect() except Exception: # We don't want any exceptions to escape our code and possibly # mess up the original replicator code that called us since it @@ -351,6 +352,8 @@ class Sender(object): Closes down the connection to the object server once done with the SSYNC request. """ + if not self.connection: + return try: with exceptions.MessageTimeout( self.daemon.node_timeout, 'disconnect'): diff --git a/test/unit/obj/test_ssync_receiver.py b/test/unit/obj/test_ssync_receiver.py index 9c757e0ae1..38654ffd61 100644 --- a/test/unit/obj/test_ssync_receiver.py +++ b/test/unit/obj/test_ssync_receiver.py @@ -31,7 +31,7 @@ from swift.common import utils from swift.common.swob import HTTPException from swift.obj import diskfile from swift.obj import server -from swift.obj import ssync_receiver +from swift.obj import ssync_receiver, ssync_sender from swift.obj.reconstructor import ObjectReconstructor from test import unit @@ -1705,6 +1705,35 @@ class TestSsyncRxServer(unittest.TestCase): def tearDown(self): shutil.rmtree(self.tmpdir) + def test_SSYNC_disconnect(self): + node = { + 'replication_ip': '127.0.0.1', + 'replication_port': self.rx_port, + 'device': 'sdb1', + } + job = { + 'partition': 0, + 'policy': POLICIES[0], + 'device': 'sdb1', + } + sender = ssync_sender.Sender(self.daemon, node, job, ['abc']) + + # kick off the sender and let the error trigger failure + with mock.patch('swift.obj.ssync_receiver.Receiver.initialize_request')\ + as mock_initialize_request: + mock_initialize_request.side_effect = \ + swob.HTTPInternalServerError() + success, _ = sender() + self.assertFalse(success) + stderr = six.StringIO() + with mock.patch('sys.stderr', stderr): + # let gc and eventlet spin a bit + del sender + for i in range(3): + eventlet.sleep(0) + self.assertNotIn('ValueError: invalid literal for int() with base 16', + stderr.getvalue()) + def test_SSYNC_device_not_available(self): with mock.patch('swift.obj.ssync_receiver.Receiver.missing_check')\ as mock_missing_check: diff --git a/test/unit/obj/test_ssync_sender.py b/test/unit/obj/test_ssync_sender.py index 53f40c757a..211ab39c46 100644 --- a/test/unit/obj/test_ssync_sender.py +++ b/test/unit/obj/test_ssync_sender.py @@ -70,6 +70,9 @@ class NullBufferedHTTPConnection(object): def getresponse(*args, **kwargs): pass + def close(*args, **kwargs): + pass + class FakeResponse(object): From 524c89b7eeff037b8a6b421888771e15f98c2da2 Mon Sep 17 00:00:00 2001 From: John Dickinson Date: Fri, 21 Aug 2015 13:39:41 -0700 Subject: [PATCH 68/70] Updated CHANGELOG, AUTHORS, and .mailmap for 2.4.0 release. Change-Id: Ic6301146b839c9921bb85c4f4c1e585c9ab66661 --- .mailmap | 3 ++ AUTHORS | 18 +++++++ CHANGELOG | 141 ++++++++++++++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 158 insertions(+), 4 deletions(-) diff --git a/.mailmap b/.mailmap index 4a6368f24a..717b0ec369 100644 --- a/.mailmap +++ b/.mailmap @@ -78,3 +78,6 @@ Jaivish Kothari Kazuhiro Miyahara Alexandra Settle +Kenichiro Matsuda +Atsushi Sakai +Takashi Natsume diff --git a/AUTHORS b/AUTHORS index 1f628d9c3b..29bc166604 100644 --- a/AUTHORS +++ b/AUTHORS @@ -26,6 +26,7 @@ Chuck Thier (cthier@gmail.com) Contributors ------------ Mehdi Abaakouk (mehdi.abaakouk@enovance.com) +Timur Alperovich (timur.alperovich@gmail.com) Jesse Andrews (anotherjesse@gmail.com) Joe Arnold (joe@swiftstack.com) Ionuț Arțăriși (iartarisi@suse.cz) @@ -47,6 +48,7 @@ Tim Burke (tim.burke@gmail.com) Brian D. Burns (iosctr@gmail.com) Devin Carlen (devin.carlen@gmail.com) Thierry Carrez (thierry@openstack.org) +Carlos Cavanna (ccavanna@ca.ibm.com) Emmanuel Cazenave (contact@emcaz.fr) Mahati Chamarthy (mahati.chamarthy@gmail.com) Zap Chang (zapchang@gmail.com) @@ -55,6 +57,7 @@ Ray Chen (oldsharp@163.com) Harshit Chitalia (harshit@acelio.com) Brian Cline (bcline@softlayer.com) Alistair Coles (alistair.coles@hp.com) +Clément Contini (ccontini@cloudops.com) Brian Curtin (brian.curtin@rackspace.com) Thiago da Silva (thiago@redhat.com) Julien Danjou (julien@danjou.info) @@ -64,6 +67,7 @@ Cedric Dos Santos (cedric.dos.sant@gmail.com) Gerry Drudy (gerry.drudy@hp.com) Morgan Fainberg (morgan.fainberg@gmail.com) ZhiQiang Fan (aji.zqfan@gmail.com) +Oshrit Feder (oshritf@il.ibm.com) Mike Fedosin (mfedosin@mirantis.com) Ricardo Ferreira (ricardo.sff@gmail.com) Flaper Fesp (flaper87@gmail.com) @@ -91,8 +95,10 @@ Dan Hersam (dan.hersam@hp.com) Derek Higgins (derekh@redhat.com) Alex Holden (alex@alexjonasholden.com) Edward Hope-Morley (opentastic@gmail.com) +Charles Hsu (charles0126@gmail.com) Joanna H. Huang (joanna.huitzu.huang@gmail.com) Kun Huang (gareth@unitedstack.com) +Bill Huber (wbhuber@us.ibm.com) Matthieu Huin (mhu@enovance.com) Hodong Hwang (hodong.hwang@kt.com) Motonobu Ichimura (motonobu@gmail.com) @@ -126,6 +132,7 @@ John Leach (john@johnleach.co.uk) Ed Leafe (ed.leafe@rackspace.com) Thomas Leaman (thomas.leaman@hp.com) Eohyung Lee (liquidnuker@gmail.com) +Zhao Lei (zhaolei@cn.fujitsu.com) Jamie Lennox (jlennox@redhat.com) Tong Li (litong01@us.ibm.com) Changbin Liu (changbin.liu@gmail.com) @@ -136,10 +143,12 @@ Zhongyue Luo (zhongyue.nah@intel.com) Paul Luse (paul.e.luse@intel.com) Christopher MacGown (chris@pistoncloud.com) Dragos Manolescu (dragosm@hp.com) +Ben Martin (blmartin@us.ibm.com) Steve Martinelli (stevemar@ca.ibm.com) Juan J. Martinez (juan@memset.com) Marcelo Martins (btorch@gmail.com) Dolph Mathews (dolph.mathews@gmail.com) +Kenichiro Matsuda (matsuda_kenichi@jp.fujitsu.com) Michael Matur (michael.matur@gmail.com) Donagh McCabe (donagh.mccabe@hp.com) Andy McCrae (andy.mccrae@gmail.com) @@ -151,6 +160,7 @@ Jola Mirecka (jola.mirecka@hp.com) Kazuhiro Miyahara (miyahara.kazuhiro@lab.ntt.co.jp) Daisuke Morita (morita.daisuke@lab.ntt.co.jp) Dirk Mueller (dirk@dmllr.de) +Takashi Natsume (natsume.takashi@lab.ntt.co.jp) Russ Nelson (russ@crynwr.com) Maru Newby (mnewby@internap.com) Newptone (xingchao@unitedstack.com) @@ -170,18 +180,24 @@ Constantine Peresypkin (constantine.peresypk@rackspace.com) Dieter Plaetinck (dieter@vimeo.com) Dan Prince (dprince@redhat.com) Sarvesh Ranjan (saranjan@cisco.com) +Falk Reimann (falk.reimann@sap.com) +Brian Reitz (brian.reitz@oracle.com) Felipe Reyes (freyes@tty.cl) Janie Richling (jrichli@us.ibm.com) Matt Riedemann (mriedem@us.ibm.com) Li Riqiang (lrqrun@gmail.com) Rafael Rivero (rafael@cloudscaling.com) Victor Rodionov (victor.rodionov@nexenta.com) +Eran Rom (eranr@il.ibm.com) Aaron Rosen (arosen@nicira.com) Brent Roskos (broskos@internap.com) +Hamdi Roumani (roumani@ca.ibm.com) Shilla Saebi (shilla.saebi@gmail.com) +Atsushi Sakai (sakaia@jp.fujitsu.com) Cristian A Sanchez (cristian.a.sanchez@intel.com) Christian Schwede (cschwede@redhat.com) Mark Seger (Mark.Seger@hp.com) +Azhagu Selvan SP (tamizhgeek@gmail.com) Alexandra Settle (alexandra.settle@rackspace.com) Andrew Clay Shafer (acs@parvuscaptus.com) Mitsuhiro SHIGEMATSU (shigematsu.mitsuhiro@lab.ntt.co.jp) @@ -199,6 +215,7 @@ Jeremy Stanley (fungi@yuggoth.org) Mauro Stettler (mauro.stettler@gmail.com) Tobias Stevenson (tstevenson@vbridges.com) Victor Stinner (vstinner@redhat.com) +Akihito Takai (takaiak@nttdata.co.jp) Pearl Yajing Tan (pearl.y.tan@seagate.com) Yuriy Taraday (yorik.sar@gmail.com) Monty Taylor (mordred@inaugust.com) @@ -232,5 +249,6 @@ Guang Yee (guang.yee@hp.com) Pete Zaitcev (zaitcev@kotori.zaitcev.us) Hua Zhang (zhuadl@cn.ibm.com) Jian Zhang (jian.zhang@intel.com) +Kai Zhang (zakir.exe@gmail.com) Ning Zhang (ning@zmanda.com) Yuan Zhou (yuan.zhou@intel.com) diff --git a/CHANGELOG b/CHANGELOG index 3625a077ed..f4ce9fcf35 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,132 @@ +swift (2.4.0) + + * Dependency changes + + - Added six requirement. This is part of an ongoing effort to add + support for Python 3. + + - Dropped support for Python 2.6. + + * Config changes + + - Recent versions of Python restrict the number of headers allowed in a + request to 100. This number may be too low for custom middleware. The + new "extra_header_count" config value in swift.conf can be used to + increase the number of headers allowed. + + - Renamed "run_pause" setting to "interval" (current configs with + run_pause still work). Future versions of Swift may remove the + run_pause setting. + + * Versioned writes middleware + + The versioned writes feature has been refactored and reimplemented as + middleware. You should explicitly add the versioned_writes middleware to + your proxy pipeline, but do not remove or disable the existing container + server config setting ("allow_versions"), if it is currently enabled. + The existing container server config setting enables existing + containers to continue being versioned. Please see + http://swift.openstack.org/middleware.html#how-to-enable-object-versioning-in-a-swift-cluster + for further upgrade notes. + + * Allow 1+ object-servers-per-disk deployment + + Enabled by a new > 0 integer config value, "servers_per_port" in the + [DEFAULT] config section for object-server and/or replication server + configs. The setting's integer value determines how many different + object-server workers handle requests for any single unique local port + in the ring. In this mode, the parent swift-object-server process + continues to run as the original user (i.e. root if low-port binding + is required), binds to all ports as defined in the ring, and forks off + the specified number of workers per listen socket. The child, per-port + servers drop privileges and behave pretty much how object-server workers + always have, except that because the ring has unique ports per disk, the + object-servers will only be handling requests for a single disk. The + parent process detects dead servers and restarts them (with the correct + listen socket), starts missing servers when an updated ring file is + found with a device on the server with a new port, and kills extraneous + servers when their port is found to no longer be in the ring. The ring + files are stat'ed at most every "ring_check_interval" seconds, as + configured in the object-server config (same default of 15s). + + In testing, this deployment configuration (with a value of 3) lowers + request latency, improves requests per second, and isolates slow disk + IO as compared to the existing "workers" setting. To use this, each + device must be added to the ring using a different port. + + * Do container listing updates in another (green)thread + + The object server has learned the "container_update_timeout" setting + (with a default of 1 second). This value is the number of seconds that + the object server will wait for the container server to update the + listing before returning the status of the object PUT operation. + + Previously, the object server would wait up to 3 seconds for the + container server response. The new behavior dramatically lowers object + PUT latency when container servers in the cluster are busy (e.g. when + the container is very large). Setting the value too low may result in a + client PUT'ing an object and not being able to immediately find it in + listings. Setting it too high will increase latency for clients when + container servers are busy. + + * TempURL fixes (closes CVE-2015-5223) + + Do not allow PUT tempurls to create pointers to other data. + Specifically, disallow the creation of DLO object manifests via a PUT + tempurl. This prevents discoverability attacks which can use any PUT + tempurl to probe for private data by creating a DLO object manifest and + then using the PUT tempurl to head the object. + + * Ring changes + + - Partition placement no longer uses the port number to place + partitions. This improves dispersion in small clusters running one + object server per drive, and it does not affect dispersion in + clusters running one object server per server. + + - Added ring-builder-analyzer tool to more easily test and analyze a + series of ring management operations. + + - Stop moving partitions unnecessarily when overload is on. + + * Significant improvements and bug fixes have been made to erasure code + support. This feature is suitable for beta testing, but it is not yet + ready for broad production usage. + + * Bulk upload now treats user xattrs on files in the given archive as + object metadata on the resulting created objects. + + * Emit warning log in object replicator if "handoffs_first" or + "handoff_delete" is set. + + * Enable object replicator's failure count in swift-recon. + + * Added storage policy support to dispersion tools. + + * Support keystone v3 domains in swift-dispersion. + + * Added domain_remap information to the /info endpoint. + + * Added support for a "default_reseller_prefix" in domain_remap + middleware config. + + * Allow SLO PUTs to forgo per-segment integrity checks. Previously, each + segment referenced in the manifest also needed the correct etag and + bytes setting. These fields now allow the "null" value to skip those + particular checks on the given segment. + + * Allow rsync to use compression via a "rsync_compress" config. If set to + true, compression is only enabled for an rsync to a device in a + different region. In some cases, this can speed up cross-region + replication data transfer. + + * Added time synchronization check in swift-recon (the --time option). + + * The account reaper now runs faster on large accounts. + + * Various other minor bug fixes and improvements. + + swift (2.3.0, OpenStack Kilo) * Erasure Code support (beta) @@ -58,6 +187,7 @@ swift (2.3.0, OpenStack Kilo) * Various other minor bug fixes and improvements. + swift (2.2.2) * Data placement changes @@ -117,6 +247,7 @@ swift (2.2.2) * Various other minor bug fixes and improvements. + swift (2.2.1) * Swift now rejects object names with Unicode surrogates. @@ -962,14 +1093,14 @@ swift (1.7.0) Serialize RingData in a versioned, custom format which is a combination of a JSON-encoded header and .tostring() dumps of the - replica2part2dev_id arrays. This format deserializes hundreds of times + replica2part2dev_id arrays. This format deserializes hundreds of times faster than rings serialized with Python 2.7's pickle (a significant performance regression for ring loading between Python 2.6 and Python - 2.7). Fixes bug 1031954. + 2.7). Fixes bug 1031954. The new implementation is backward-compatible; if a ring does not begin with a new-style magic string, it is assumed to be an - old-style pickle-dumped ring and is handled as before. So new Swift + old-style pickle-dumped ring and is handled as before. So new Swift code can read old rings, but old Swift code will not be able to read newly-serialized rings. @@ -1430,4 +1561,6 @@ swift (1.3.0, OpenStack Cactus) swift (1.2.0, OpenStack Bexar) -swift (1.0.0, OpenStack Austin) +swift (1.1.0, OpenStack Austin) + +swift (1.0.0, Initial Release) From d06d4ad0fd2dfe69da8008e729651264522c6c06 Mon Sep 17 00:00:00 2001 From: Minwoo Bae Date: Tue, 1 Sep 2015 15:08:44 -0500 Subject: [PATCH 69/70] Included reference in swift.obj.diskfile to enumerate the string used for data file paths. Change-Id: Ie22caa678bc00dfc43fabec7efbbb9f34490f1b5 --- swift/obj/diskfile.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/swift/obj/diskfile.py b/swift/obj/diskfile.py index 51158a0fe4..d3937cf9a5 100644 --- a/swift/obj/diskfile.py +++ b/swift/obj/diskfile.py @@ -1364,6 +1364,10 @@ class BaseDiskFile(object): The arguments to the constructor are considered implementation specific. The API does not define the constructor arguments. + The following path format is used for data file locations: + ///// + . + :param mgr: associated DiskFileManager instance :param device_path: path to the target device or drive :param threadpool: thread pool to use for blocking operations From e02609c66a804845672413b06830b87395afef31 Mon Sep 17 00:00:00 2001 From: Samuel Merritt Date: Tue, 1 Sep 2015 15:19:50 -0700 Subject: [PATCH 70/70] Preserve traceback in swift-dispersion-report Commit c690bcb fixed a bug in the dispersion report, but changed this from a bare "raise" to "raise err", which loses the traceback. Not a big deal, but worth putting back IMO. Change-Id: Id5b72153a4b8df8e3faaf1fa3fb2040e28ba85cc --- bin/swift-dispersion-report | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bin/swift-dispersion-report b/bin/swift-dispersion-report index 31a1741dda..5d524892d0 100755 --- a/bin/swift-dispersion-report +++ b/bin/swift-dispersion-report @@ -179,7 +179,7 @@ def object_dispersion_report(coropool, connpool, account, object_ring, container, prefix='dispersion_', full_listing=True)[1]] except urllib2.HTTPError as err: if err.getcode() != 404: - raise err + raise print >>stderr, 'No objects to query. Has ' \ 'swift-dispersion-populate been run?'