2013-09-20 01:00:54 +08:00
|
|
|
# Copyright (c) 2010 OpenStack Foundation
|
2010-07-12 17:03:45 -05:00
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
|
|
# implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
2013-08-31 22:36:58 -04:00
|
|
|
"""Tests for swift.common.wsgi"""
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2013-03-26 20:42:26 +00:00
|
|
|
import errno
|
2013-03-25 16:34:43 -07:00
|
|
|
import logging
|
2010-07-12 17:03:45 -05:00
|
|
|
import mimetools
|
|
|
|
import socket
|
|
|
|
import unittest
|
2013-03-25 16:34:43 -07:00
|
|
|
import os
|
|
|
|
from textwrap import dedent
|
Object replication ssync (an rsync alternative)
For this commit, ssync is just a direct replacement for how
we use rsync. Assuming we switch over to ssync completely
someday and drop rsync, we will then be able to improve the
algorithms even further (removing local objects as we
successfully transfer each one rather than waiting for whole
partitions, using an index.db with hash-trees, etc., etc.)
For easier review, this commit can be thought of in distinct
parts:
1) New global_conf_callback functionality for allowing
services to perform setup code before workers, etc. are
launched. (This is then used by ssync in the object
server to create a cross-worker semaphore to restrict
concurrent incoming replication.)
2) A bit of shifting of items up from object server and
replicator to diskfile or DEFAULT conf sections for
better sharing of the same settings. conn_timeout,
node_timeout, client_timeout, network_chunk_size,
disk_chunk_size.
3) Modifications to the object server and replicator to
optionally use ssync in place of rsync. This is done in
a generic enough way that switching to FutureSync should
be easy someday.
4) The biggest part, and (at least for now) completely
optional part, are the new ssync_sender and
ssync_receiver files. Nice and isolated for easier
testing and visibility into test coverage, etc.
All the usual logging, statsd, recon, etc. instrumentation
is still there when using ssync, just as it is when using
rsync.
Beyond the essential error and exceptional condition
logging, I have not added any additional instrumentation at
this time. Unless there is something someone finds super
pressing to have added to the logging, I think such
additions would be better as separate change reviews.
FOR NOW, IT IS NOT RECOMMENDED TO USE SSYNC ON PRODUCTION
CLUSTERS. Some of us will be in a limited fashion to look
for any subtle issues, tuning, etc. but generally ssync is
an experimental feature. In its current implementation it is
probably going to be a bit slower than rsync, but if all
goes according to plan it will end up much faster.
There are no comparisions yet between ssync and rsync other
than some raw virtual machine testing I've done to show it
should compete well enough once we can put it in use in the
real world.
If you Tweet, Google+, or whatever, be sure to indicate it's
experimental. It'd be best to keep it out of deployment
guides, howtos, etc. until we all figure out if we like it,
find it to be stable, etc.
Change-Id: If003dcc6f4109e2d2a42f4873a0779110fff16d6
2013-08-28 16:10:43 +00:00
|
|
|
from contextlib import nested
|
2010-07-12 17:03:45 -05:00
|
|
|
from StringIO import StringIO
|
2010-11-19 12:15:41 -06:00
|
|
|
from collections import defaultdict
|
2012-05-07 13:46:41 -07:00
|
|
|
from urllib import quote
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2013-03-25 16:34:43 -07:00
|
|
|
from eventlet import listen
|
|
|
|
|
Object replication ssync (an rsync alternative)
For this commit, ssync is just a direct replacement for how
we use rsync. Assuming we switch over to ssync completely
someday and drop rsync, we will then be able to improve the
algorithms even further (removing local objects as we
successfully transfer each one rather than waiting for whole
partitions, using an index.db with hash-trees, etc., etc.)
For easier review, this commit can be thought of in distinct
parts:
1) New global_conf_callback functionality for allowing
services to perform setup code before workers, etc. are
launched. (This is then used by ssync in the object
server to create a cross-worker semaphore to restrict
concurrent incoming replication.)
2) A bit of shifting of items up from object server and
replicator to diskfile or DEFAULT conf sections for
better sharing of the same settings. conn_timeout,
node_timeout, client_timeout, network_chunk_size,
disk_chunk_size.
3) Modifications to the object server and replicator to
optionally use ssync in place of rsync. This is done in
a generic enough way that switching to FutureSync should
be easy someday.
4) The biggest part, and (at least for now) completely
optional part, are the new ssync_sender and
ssync_receiver files. Nice and isolated for easier
testing and visibility into test coverage, etc.
All the usual logging, statsd, recon, etc. instrumentation
is still there when using ssync, just as it is when using
rsync.
Beyond the essential error and exceptional condition
logging, I have not added any additional instrumentation at
this time. Unless there is something someone finds super
pressing to have added to the logging, I think such
additions would be better as separate change reviews.
FOR NOW, IT IS NOT RECOMMENDED TO USE SSYNC ON PRODUCTION
CLUSTERS. Some of us will be in a limited fashion to look
for any subtle issues, tuning, etc. but generally ssync is
an experimental feature. In its current implementation it is
probably going to be a bit slower than rsync, but if all
goes according to plan it will end up much faster.
There are no comparisions yet between ssync and rsync other
than some raw virtual machine testing I've done to show it
should compete well enough once we can put it in use in the
real world.
If you Tweet, Google+, or whatever, be sure to indicate it's
experimental. It'd be best to keep it out of deployment
guides, howtos, etc. until we all figure out if we like it,
find it to be stable, etc.
Change-Id: If003dcc6f4109e2d2a42f4873a0779110fff16d6
2013-08-28 16:10:43 +00:00
|
|
|
import mock
|
|
|
|
|
2013-11-14 13:56:47 -05:00
|
|
|
import swift.common.middleware.catch_errors
|
2013-12-03 22:02:39 +00:00
|
|
|
import swift.common.middleware.gatekeeper
|
2013-11-14 13:56:47 -05:00
|
|
|
import swift.proxy.server
|
|
|
|
|
2014-04-16 17:16:57 -07:00
|
|
|
import swift.obj.server as obj_server
|
|
|
|
import swift.container.server as container_server
|
|
|
|
import swift.account.server as account_server
|
2012-09-04 14:02:19 -07:00
|
|
|
from swift.common.swob import Request
|
2014-04-28 19:22:51 -07:00
|
|
|
from swift.common import wsgi, utils
|
2014-05-27 01:17:13 -07:00
|
|
|
from swift.common.storage_policy import StoragePolicy, \
|
|
|
|
StoragePolicyCollection
|
2013-03-25 16:34:43 -07:00
|
|
|
|
2014-04-16 17:16:57 -07:00
|
|
|
from test.unit import temptree, with_tempdir, write_fake_ring, patch_policies
|
2013-03-25 16:34:43 -07:00
|
|
|
|
2013-12-13 13:11:01 -08:00
|
|
|
from paste.deploy import loadwsgi
|
2013-03-25 16:34:43 -07:00
|
|
|
|
|
|
|
|
|
|
|
def _fake_rings(tmpdir):
|
2014-04-28 19:22:51 -07:00
|
|
|
write_fake_ring(os.path.join(tmpdir, 'account.ring.gz'))
|
|
|
|
write_fake_ring(os.path.join(tmpdir, 'container.ring.gz'))
|
2014-05-27 01:17:13 -07:00
|
|
|
# Some storage-policy-specific fake rings.
|
|
|
|
policy = [StoragePolicy(0, 'zero'),
|
|
|
|
StoragePolicy(1, 'one', is_default=True)]
|
|
|
|
policies = StoragePolicyCollection(policy)
|
|
|
|
for pol in policies:
|
|
|
|
obj_ring_path = \
|
|
|
|
os.path.join(tmpdir, pol.ring_name + '.ring.gz')
|
|
|
|
write_fake_ring(obj_ring_path)
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2012-10-08 15:45:40 -07:00
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
class TestWSGI(unittest.TestCase):
|
2013-08-31 22:36:58 -04:00
|
|
|
"""Tests for swift.common.wsgi"""
|
2010-07-12 17:03:45 -05:00
|
|
|
|
2013-03-25 16:34:43 -07:00
|
|
|
def setUp(self):
|
|
|
|
utils.HASH_PATH_PREFIX = 'startcap'
|
|
|
|
self._orig_parsetype = mimetools.Message.parsetype
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
mimetools.Message.parsetype = self._orig_parsetype
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
def test_monkey_patch_mimetools(self):
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).type, 'text/plain')
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).plisttext, '')
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).maintype, 'text')
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).subtype, 'plain')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).type, 'text/html')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).plisttext,
|
|
|
|
'; charset=ISO-8859-4')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).maintype, 'text')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).subtype, 'html')
|
|
|
|
|
|
|
|
wsgi.monkey_patch_mimetools()
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).type, None)
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).plisttext, '')
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).maintype, None)
|
|
|
|
sio = StringIO('blah')
|
|
|
|
self.assertEquals(mimetools.Message(sio).subtype, None)
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).type, 'text/html')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).plisttext,
|
|
|
|
'; charset=ISO-8859-4')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).maintype, 'text')
|
|
|
|
sio = StringIO('Content-Type: text/html; charset=ISO-8859-4')
|
|
|
|
self.assertEquals(mimetools.Message(sio).subtype, 'html')
|
|
|
|
|
2013-03-25 16:34:43 -07:00
|
|
|
def test_init_request_processor(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
2013-12-13 13:11:01 -08:00
|
|
|
pipeline = proxy-server
|
2013-03-25 16:34:43 -07:00
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
"""
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
|
|
|
app, conf, logger, log_name = wsgi.init_request_processor(
|
|
|
|
conf_file, 'proxy-server')
|
2013-11-21 17:31:16 -08:00
|
|
|
# verify pipeline is catch_errors -> dlo -> proxy-server
|
2013-03-25 16:34:43 -07:00
|
|
|
expected = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
|
|
|
self.assert_(isinstance(app, expected))
|
2013-11-21 17:31:16 -08:00
|
|
|
|
2013-12-03 22:02:39 +00:00
|
|
|
app = app.app
|
|
|
|
expected = swift.common.middleware.gatekeeper.GatekeeperMiddleware
|
|
|
|
self.assert_(isinstance(app, expected))
|
2013-11-21 17:31:16 -08:00
|
|
|
|
|
|
|
app = app.app
|
|
|
|
expected = swift.common.middleware.dlo.DynamicLargeObject
|
|
|
|
self.assert_(isinstance(app, expected))
|
|
|
|
|
|
|
|
app = app.app
|
|
|
|
expected = swift.proxy.server.Application
|
|
|
|
self.assert_(isinstance(app, expected))
|
2013-03-25 16:34:43 -07:00
|
|
|
# config settings applied to app instance
|
2013-11-21 17:31:16 -08:00
|
|
|
self.assertEquals(0.2, app.conn_timeout)
|
2013-03-25 16:34:43 -07:00
|
|
|
# appconfig returns values from 'proxy-server' section
|
|
|
|
expected = {
|
|
|
|
'__file__': conf_file,
|
|
|
|
'here': os.path.dirname(conf_file),
|
|
|
|
'conn_timeout': '0.2',
|
|
|
|
'swift_dir': t,
|
|
|
|
}
|
|
|
|
self.assertEquals(expected, conf)
|
|
|
|
# logger works
|
|
|
|
logger.info('testing')
|
|
|
|
self.assertEquals('proxy-server', log_name)
|
|
|
|
|
|
|
|
def test_init_request_processor_from_conf_dir(self):
|
|
|
|
config_dir = {
|
|
|
|
'proxy-server.conf.d/pipeline.conf': """
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = catch_errors proxy-server
|
|
|
|
""",
|
|
|
|
'proxy-server.conf.d/app.conf': """
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
""",
|
|
|
|
'proxy-server.conf.d/catch-errors.conf': """
|
|
|
|
[filter:catch_errors]
|
|
|
|
use = egg:swift#catch_errors
|
|
|
|
"""
|
|
|
|
}
|
|
|
|
# strip indent from test config contents
|
|
|
|
config_dir = dict((f, dedent(c)) for (f, c) in config_dir.items())
|
2013-12-13 13:11:01 -08:00
|
|
|
with mock.patch('swift.proxy.server.Application.modify_wsgi_pipeline'):
|
|
|
|
with temptree(*zip(*config_dir.items())) as conf_root:
|
|
|
|
conf_dir = os.path.join(conf_root, 'proxy-server.conf.d')
|
|
|
|
with open(os.path.join(conf_dir, 'swift.conf'), 'w') as f:
|
|
|
|
f.write('[DEFAULT]\nswift_dir = %s' % conf_root)
|
|
|
|
_fake_rings(conf_root)
|
|
|
|
app, conf, logger, log_name = wsgi.init_request_processor(
|
|
|
|
conf_dir, 'proxy-server')
|
|
|
|
# verify pipeline is catch_errors -> proxy-server
|
2013-03-25 16:34:43 -07:00
|
|
|
expected = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
|
|
|
self.assert_(isinstance(app, expected))
|
|
|
|
self.assert_(isinstance(app.app, swift.proxy.server.Application))
|
|
|
|
# config settings applied to app instance
|
|
|
|
self.assertEquals(0.2, app.app.conn_timeout)
|
|
|
|
# appconfig returns values from 'proxy-server' section
|
|
|
|
expected = {
|
|
|
|
'__file__': conf_dir,
|
|
|
|
'here': conf_dir,
|
|
|
|
'conn_timeout': '0.2',
|
|
|
|
'swift_dir': conf_root,
|
|
|
|
}
|
|
|
|
self.assertEquals(expected, conf)
|
|
|
|
# logger works
|
|
|
|
logger.info('testing')
|
|
|
|
self.assertEquals('proxy-server', log_name)
|
|
|
|
|
2010-11-19 12:15:41 -06:00
|
|
|
def test_get_socket(self):
|
|
|
|
# stubs
|
|
|
|
conf = {}
|
|
|
|
ssl_conf = {
|
|
|
|
'cert_file': '',
|
|
|
|
'key_file': '',
|
|
|
|
}
|
|
|
|
|
|
|
|
# mocks
|
2014-03-03 17:28:48 -08:00
|
|
|
class MockSocket(object):
|
2010-11-19 12:15:41 -06:00
|
|
|
def __init__(self):
|
|
|
|
self.opts = defaultdict(dict)
|
|
|
|
|
|
|
|
def setsockopt(self, level, optname, value):
|
|
|
|
self.opts[level][optname] = value
|
|
|
|
|
|
|
|
def mock_listen(*args, **kwargs):
|
|
|
|
return MockSocket()
|
|
|
|
|
2014-03-03 17:28:48 -08:00
|
|
|
class MockSsl(object):
|
2010-11-19 12:15:41 -06:00
|
|
|
def __init__(self):
|
|
|
|
self.wrap_socket_called = []
|
|
|
|
|
|
|
|
def wrap_socket(self, sock, **kwargs):
|
|
|
|
self.wrap_socket_called.append(kwargs)
|
|
|
|
return sock
|
|
|
|
|
|
|
|
# patch
|
|
|
|
old_listen = wsgi.listen
|
|
|
|
old_ssl = wsgi.ssl
|
|
|
|
try:
|
|
|
|
wsgi.listen = mock_listen
|
|
|
|
wsgi.ssl = MockSsl()
|
|
|
|
# test
|
|
|
|
sock = wsgi.get_socket(conf)
|
|
|
|
# assert
|
|
|
|
self.assert_(isinstance(sock, MockSocket))
|
|
|
|
expected_socket_opts = {
|
|
|
|
socket.SOL_SOCKET: {
|
|
|
|
socket.SO_REUSEADDR: 1,
|
|
|
|
socket.SO_KEEPALIVE: 1,
|
|
|
|
},
|
2013-11-25 13:30:41 -05:00
|
|
|
socket.IPPROTO_TCP: {
|
|
|
|
socket.TCP_NODELAY: 1,
|
|
|
|
}
|
2010-11-19 12:15:41 -06:00
|
|
|
}
|
2013-06-18 18:05:28 +04:00
|
|
|
if hasattr(socket, 'TCP_KEEPIDLE'):
|
2013-11-25 13:30:41 -05:00
|
|
|
expected_socket_opts[socket.IPPROTO_TCP][
|
|
|
|
socket.TCP_KEEPIDLE] = 600
|
2010-11-19 12:15:41 -06:00
|
|
|
self.assertEquals(sock.opts, expected_socket_opts)
|
|
|
|
# test ssl
|
|
|
|
sock = wsgi.get_socket(ssl_conf)
|
|
|
|
expected_kwargs = {
|
|
|
|
'certfile': '',
|
|
|
|
'keyfile': '',
|
|
|
|
}
|
|
|
|
self.assertEquals(wsgi.ssl.wrap_socket_called, [expected_kwargs])
|
|
|
|
finally:
|
|
|
|
wsgi.listen = old_listen
|
|
|
|
wsgi.ssl = old_ssl
|
|
|
|
|
|
|
|
def test_address_in_use(self):
|
|
|
|
# stubs
|
|
|
|
conf = {}
|
|
|
|
|
|
|
|
# mocks
|
|
|
|
def mock_listen(*args, **kwargs):
|
|
|
|
raise socket.error(errno.EADDRINUSE)
|
|
|
|
|
|
|
|
def value_error_listen(*args, **kwargs):
|
|
|
|
raise ValueError('fake')
|
|
|
|
|
|
|
|
def mock_sleep(*args):
|
|
|
|
pass
|
|
|
|
|
2014-03-03 17:28:48 -08:00
|
|
|
class MockTime(object):
|
2010-11-19 12:15:41 -06:00
|
|
|
"""Fast clock advances 10 seconds after every call to time
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.current_time = old_time.time()
|
|
|
|
|
|
|
|
def time(self, *args, **kwargs):
|
|
|
|
rv = self.current_time
|
|
|
|
# advance for next call
|
|
|
|
self.current_time += 10
|
|
|
|
return rv
|
|
|
|
|
|
|
|
old_listen = wsgi.listen
|
|
|
|
old_sleep = wsgi.sleep
|
|
|
|
old_time = wsgi.time
|
|
|
|
try:
|
|
|
|
wsgi.listen = mock_listen
|
|
|
|
wsgi.sleep = mock_sleep
|
|
|
|
wsgi.time = MockTime()
|
|
|
|
# test error
|
|
|
|
self.assertRaises(Exception, wsgi.get_socket, conf)
|
|
|
|
# different error
|
|
|
|
wsgi.listen = value_error_listen
|
|
|
|
self.assertRaises(ValueError, wsgi.get_socket, conf)
|
|
|
|
finally:
|
|
|
|
wsgi.listen = old_listen
|
|
|
|
wsgi.sleep = old_sleep
|
|
|
|
wsgi.time = old_time
|
|
|
|
|
2013-03-25 16:34:43 -07:00
|
|
|
def test_run_server(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
eventlet_debug = yes
|
|
|
|
client_timeout = 30
|
2013-07-11 17:00:57 -07:00
|
|
|
max_clients = 1000
|
2013-03-25 16:34:43 -07:00
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
2013-07-11 17:00:57 -07:00
|
|
|
# while "set" values normally override default
|
|
|
|
set client_timeout = 20
|
|
|
|
# this section is not in conf during run_server
|
|
|
|
set max_clients = 10
|
2013-03-25 16:34:43 -07:00
|
|
|
"""
|
|
|
|
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
2013-12-13 13:11:01 -08:00
|
|
|
with mock.patch('swift.proxy.server.Application.'
|
|
|
|
'modify_wsgi_pipeline'):
|
|
|
|
with mock.patch('swift.common.wsgi.wsgi') as _wsgi:
|
|
|
|
with mock.patch('swift.common.wsgi.eventlet') as _eventlet:
|
|
|
|
conf = wsgi.appconfig(conf_file)
|
|
|
|
logger = logging.getLogger('test')
|
|
|
|
sock = listen(('localhost', 0))
|
|
|
|
wsgi.run_server(conf, logger, sock)
|
2013-03-25 16:34:43 -07:00
|
|
|
self.assertEquals('HTTP/1.0',
|
|
|
|
_wsgi.HttpProtocol.default_request_version)
|
|
|
|
self.assertEquals(30, _wsgi.WRITE_TIMEOUT)
|
|
|
|
_eventlet.hubs.use_hub.assert_called_with(utils.get_hub())
|
|
|
|
_eventlet.patcher.monkey_patch.assert_called_with(all=False,
|
|
|
|
socket=True)
|
|
|
|
_eventlet.debug.hub_exceptions.assert_called_with(True)
|
|
|
|
_wsgi.server.assert_called()
|
|
|
|
args, kwargs = _wsgi.server.call_args
|
|
|
|
server_sock, server_app, server_logger = args
|
|
|
|
self.assertEquals(sock, server_sock)
|
|
|
|
self.assert_(isinstance(server_app, swift.proxy.server.Application))
|
2013-07-11 17:00:57 -07:00
|
|
|
self.assertEquals(20, server_app.client_timeout)
|
2013-03-25 16:34:43 -07:00
|
|
|
self.assert_(isinstance(server_logger, wsgi.NullLogger))
|
|
|
|
self.assert_('custom_pool' in kwargs)
|
2013-07-11 17:00:57 -07:00
|
|
|
self.assertEquals(1000, kwargs['custom_pool'].size)
|
2013-03-25 16:34:43 -07:00
|
|
|
|
|
|
|
def test_run_server_conf_dir(self):
|
|
|
|
config_dir = {
|
|
|
|
'proxy-server.conf.d/pipeline.conf': """
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = proxy-server
|
|
|
|
""",
|
|
|
|
'proxy-server.conf.d/app.conf': """
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
""",
|
|
|
|
'proxy-server.conf.d/default.conf': """
|
|
|
|
[DEFAULT]
|
|
|
|
eventlet_debug = yes
|
|
|
|
client_timeout = 30
|
|
|
|
"""
|
|
|
|
}
|
|
|
|
# strip indent from test config contents
|
|
|
|
config_dir = dict((f, dedent(c)) for (f, c) in config_dir.items())
|
|
|
|
with temptree(*zip(*config_dir.items())) as conf_root:
|
|
|
|
conf_dir = os.path.join(conf_root, 'proxy-server.conf.d')
|
|
|
|
with open(os.path.join(conf_dir, 'swift.conf'), 'w') as f:
|
|
|
|
f.write('[DEFAULT]\nswift_dir = %s' % conf_root)
|
|
|
|
_fake_rings(conf_root)
|
2013-12-13 13:11:01 -08:00
|
|
|
with mock.patch('swift.proxy.server.Application.'
|
|
|
|
'modify_wsgi_pipeline'):
|
|
|
|
with mock.patch('swift.common.wsgi.wsgi') as _wsgi:
|
|
|
|
with mock.patch('swift.common.wsgi.eventlet') as _eventlet:
|
|
|
|
with mock.patch.dict('os.environ', {'TZ': ''}):
|
|
|
|
conf = wsgi.appconfig(conf_dir)
|
|
|
|
logger = logging.getLogger('test')
|
|
|
|
sock = listen(('localhost', 0))
|
|
|
|
wsgi.run_server(conf, logger, sock)
|
|
|
|
self.assert_(os.environ['TZ'] is not '')
|
2013-03-25 16:34:43 -07:00
|
|
|
|
|
|
|
self.assertEquals('HTTP/1.0',
|
|
|
|
_wsgi.HttpProtocol.default_request_version)
|
|
|
|
self.assertEquals(30, _wsgi.WRITE_TIMEOUT)
|
|
|
|
_eventlet.hubs.use_hub.assert_called_with(utils.get_hub())
|
|
|
|
_eventlet.patcher.monkey_patch.assert_called_with(all=False,
|
|
|
|
socket=True)
|
|
|
|
_eventlet.debug.hub_exceptions.assert_called_with(True)
|
|
|
|
_wsgi.server.assert_called()
|
|
|
|
args, kwargs = _wsgi.server.call_args
|
|
|
|
server_sock, server_app, server_logger = args
|
|
|
|
self.assertEquals(sock, server_sock)
|
|
|
|
self.assert_(isinstance(server_app, swift.proxy.server.Application))
|
|
|
|
self.assert_(isinstance(server_logger, wsgi.NullLogger))
|
|
|
|
self.assert_('custom_pool' in kwargs)
|
|
|
|
|
|
|
|
def test_appconfig_dir_ignores_hidden_files(self):
|
|
|
|
config_dir = {
|
|
|
|
'server.conf.d/01.conf': """
|
|
|
|
[app:main]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
port = 8080
|
|
|
|
""",
|
|
|
|
'server.conf.d/.01.conf.swp': """
|
|
|
|
[app:main]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
port = 8081
|
|
|
|
""",
|
|
|
|
}
|
|
|
|
# strip indent from test config contents
|
|
|
|
config_dir = dict((f, dedent(c)) for (f, c) in config_dir.items())
|
|
|
|
with temptree(*zip(*config_dir.items())) as path:
|
|
|
|
conf_dir = os.path.join(path, 'server.conf.d')
|
|
|
|
conf = wsgi.appconfig(conf_dir)
|
|
|
|
expected = {
|
|
|
|
'__file__': os.path.join(path, 'server.conf.d'),
|
|
|
|
'here': os.path.join(path, 'server.conf.d'),
|
|
|
|
'port': '8080',
|
|
|
|
}
|
|
|
|
self.assertEquals(conf, expected)
|
|
|
|
|
2012-05-31 18:33:12 +00:00
|
|
|
def test_pre_auth_wsgi_input(self):
|
|
|
|
oldenv = {}
|
|
|
|
newenv = wsgi.make_pre_authed_env(oldenv)
|
|
|
|
self.assertTrue('wsgi.input' in newenv)
|
|
|
|
self.assertEquals(newenv['wsgi.input'].read(), '')
|
|
|
|
|
|
|
|
oldenv = {'wsgi.input': StringIO('original wsgi.input')}
|
|
|
|
newenv = wsgi.make_pre_authed_env(oldenv)
|
|
|
|
self.assertTrue('wsgi.input' in newenv)
|
|
|
|
self.assertEquals(newenv['wsgi.input'].read(), '')
|
|
|
|
|
2013-02-06 10:57:17 -08:00
|
|
|
oldenv = {'swift.source': 'UT'}
|
|
|
|
newenv = wsgi.make_pre_authed_env(oldenv)
|
|
|
|
self.assertEquals(newenv['swift.source'], 'UT')
|
|
|
|
|
|
|
|
oldenv = {'swift.source': 'UT'}
|
|
|
|
newenv = wsgi.make_pre_authed_env(oldenv, swift_source='SA')
|
|
|
|
self.assertEquals(newenv['swift.source'], 'SA')
|
|
|
|
|
2011-08-30 12:07:32 -07:00
|
|
|
def test_pre_auth_req(self):
|
|
|
|
class FakeReq(object):
|
|
|
|
@classmethod
|
2014-05-10 05:15:12 -05:00
|
|
|
def fake_blank(cls, path, environ=None, body='', headers=None):
|
|
|
|
if environ is None:
|
|
|
|
environ = {}
|
|
|
|
if headers is None:
|
|
|
|
headers = {}
|
2011-08-30 12:07:32 -07:00
|
|
|
self.assertEquals(environ['swift.authorize']('test'), None)
|
2011-10-13 09:36:17 -07:00
|
|
|
self.assertFalse('HTTP_X_TRANS_ID' in environ)
|
2011-08-30 12:07:32 -07:00
|
|
|
was_blank = Request.blank
|
|
|
|
Request.blank = FakeReq.fake_blank
|
|
|
|
wsgi.make_pre_authed_request({'HTTP_X_TRANS_ID': '1234'},
|
|
|
|
'PUT', '/', body='tester', headers={})
|
|
|
|
wsgi.make_pre_authed_request({'HTTP_X_TRANS_ID': '1234'},
|
|
|
|
'PUT', '/', headers={})
|
|
|
|
Request.blank = was_blank
|
2010-11-19 12:15:41 -06:00
|
|
|
|
2012-05-07 13:46:41 -07:00
|
|
|
def test_pre_auth_req_with_quoted_path(self):
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'HTTP_X_TRANS_ID': '1234'}, 'PUT', path=quote('/a space'),
|
|
|
|
body='tester', headers={})
|
|
|
|
self.assertEquals(r.path, quote('/a space'))
|
|
|
|
|
2012-05-27 21:48:13 +00:00
|
|
|
def test_pre_auth_req_drops_query(self):
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'QUERY_STRING': 'original'}, 'GET', 'path')
|
|
|
|
self.assertEquals(r.query_string, 'original')
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'QUERY_STRING': 'original'}, 'GET', 'path?replacement')
|
|
|
|
self.assertEquals(r.query_string, 'replacement')
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'QUERY_STRING': 'original'}, 'GET', 'path?')
|
|
|
|
self.assertEquals(r.query_string, '')
|
|
|
|
|
2012-10-08 15:45:40 -07:00
|
|
|
def test_pre_auth_req_with_body(self):
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'QUERY_STRING': 'original'}, 'GET', 'path', 'the body')
|
|
|
|
self.assertEquals(r.body, 'the body')
|
2012-05-27 21:48:13 +00:00
|
|
|
|
2012-10-30 15:40:41 +00:00
|
|
|
def test_pre_auth_creates_script_name(self):
|
|
|
|
e = wsgi.make_pre_authed_env({})
|
|
|
|
self.assertTrue('SCRIPT_NAME' in e)
|
|
|
|
|
|
|
|
def test_pre_auth_copies_script_name(self):
|
|
|
|
e = wsgi.make_pre_authed_env({'SCRIPT_NAME': '/script_name'})
|
|
|
|
self.assertEquals(e['SCRIPT_NAME'], '/script_name')
|
|
|
|
|
|
|
|
def test_pre_auth_copies_script_name_unless_path_overridden(self):
|
|
|
|
e = wsgi.make_pre_authed_env({'SCRIPT_NAME': '/script_name'},
|
|
|
|
path='/override')
|
|
|
|
self.assertEquals(e['SCRIPT_NAME'], '')
|
|
|
|
self.assertEquals(e['PATH_INFO'], '/override')
|
|
|
|
|
2013-02-06 10:57:17 -08:00
|
|
|
def test_pre_auth_req_swift_source(self):
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'QUERY_STRING': 'original'}, 'GET', 'path', 'the body',
|
|
|
|
swift_source='UT')
|
|
|
|
self.assertEquals(r.body, 'the body')
|
|
|
|
self.assertEquals(r.environ['swift.source'], 'UT')
|
2012-10-30 15:40:41 +00:00
|
|
|
|
Object replication ssync (an rsync alternative)
For this commit, ssync is just a direct replacement for how
we use rsync. Assuming we switch over to ssync completely
someday and drop rsync, we will then be able to improve the
algorithms even further (removing local objects as we
successfully transfer each one rather than waiting for whole
partitions, using an index.db with hash-trees, etc., etc.)
For easier review, this commit can be thought of in distinct
parts:
1) New global_conf_callback functionality for allowing
services to perform setup code before workers, etc. are
launched. (This is then used by ssync in the object
server to create a cross-worker semaphore to restrict
concurrent incoming replication.)
2) A bit of shifting of items up from object server and
replicator to diskfile or DEFAULT conf sections for
better sharing of the same settings. conn_timeout,
node_timeout, client_timeout, network_chunk_size,
disk_chunk_size.
3) Modifications to the object server and replicator to
optionally use ssync in place of rsync. This is done in
a generic enough way that switching to FutureSync should
be easy someday.
4) The biggest part, and (at least for now) completely
optional part, are the new ssync_sender and
ssync_receiver files. Nice and isolated for easier
testing and visibility into test coverage, etc.
All the usual logging, statsd, recon, etc. instrumentation
is still there when using ssync, just as it is when using
rsync.
Beyond the essential error and exceptional condition
logging, I have not added any additional instrumentation at
this time. Unless there is something someone finds super
pressing to have added to the logging, I think such
additions would be better as separate change reviews.
FOR NOW, IT IS NOT RECOMMENDED TO USE SSYNC ON PRODUCTION
CLUSTERS. Some of us will be in a limited fashion to look
for any subtle issues, tuning, etc. but generally ssync is
an experimental feature. In its current implementation it is
probably going to be a bit slower than rsync, but if all
goes according to plan it will end up much faster.
There are no comparisions yet between ssync and rsync other
than some raw virtual machine testing I've done to show it
should compete well enough once we can put it in use in the
real world.
If you Tweet, Google+, or whatever, be sure to indicate it's
experimental. It'd be best to keep it out of deployment
guides, howtos, etc. until we all figure out if we like it,
find it to be stable, etc.
Change-Id: If003dcc6f4109e2d2a42f4873a0779110fff16d6
2013-08-28 16:10:43 +00:00
|
|
|
def test_run_server_global_conf_callback(self):
|
|
|
|
calls = defaultdict(lambda: 0)
|
|
|
|
|
|
|
|
def _initrp(conf_file, app_section, *args, **kwargs):
|
|
|
|
return (
|
|
|
|
{'__file__': 'test', 'workers': 0},
|
|
|
|
'logger',
|
|
|
|
'log_name')
|
|
|
|
|
|
|
|
def _global_conf_callback(preloaded_app_conf, global_conf):
|
|
|
|
calls['_global_conf_callback'] += 1
|
|
|
|
self.assertEqual(
|
|
|
|
preloaded_app_conf, {'__file__': 'test', 'workers': 0})
|
|
|
|
self.assertEqual(global_conf, {'log_name': 'log_name'})
|
|
|
|
global_conf['test1'] = 'one'
|
|
|
|
|
|
|
|
def _loadapp(uri, name=None, **kwargs):
|
|
|
|
calls['_loadapp'] += 1
|
|
|
|
self.assertTrue('global_conf' in kwargs)
|
|
|
|
self.assertEqual(kwargs['global_conf'],
|
|
|
|
{'log_name': 'log_name', 'test1': 'one'})
|
|
|
|
|
|
|
|
with nested(
|
|
|
|
mock.patch.object(wsgi, '_initrp', _initrp),
|
|
|
|
mock.patch.object(wsgi, 'get_socket'),
|
|
|
|
mock.patch.object(wsgi, 'drop_privileges'),
|
|
|
|
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
|
|
|
mock.patch.object(wsgi, 'capture_stdio'),
|
|
|
|
mock.patch.object(wsgi, 'run_server')):
|
|
|
|
wsgi.run_wsgi('conf_file', 'app_section',
|
|
|
|
global_conf_callback=_global_conf_callback)
|
|
|
|
self.assertEqual(calls['_global_conf_callback'], 1)
|
|
|
|
self.assertEqual(calls['_loadapp'], 1)
|
|
|
|
|
2013-11-22 18:57:44 -07:00
|
|
|
def test_run_server_success(self):
|
|
|
|
calls = defaultdict(lambda: 0)
|
|
|
|
|
|
|
|
def _initrp(conf_file, app_section, *args, **kwargs):
|
|
|
|
calls['_initrp'] += 1
|
|
|
|
return (
|
|
|
|
{'__file__': 'test', 'workers': 0},
|
|
|
|
'logger',
|
|
|
|
'log_name')
|
|
|
|
|
|
|
|
def _loadapp(uri, name=None, **kwargs):
|
|
|
|
calls['_loadapp'] += 1
|
|
|
|
|
|
|
|
with nested(
|
|
|
|
mock.patch.object(wsgi, '_initrp', _initrp),
|
|
|
|
mock.patch.object(wsgi, 'get_socket'),
|
|
|
|
mock.patch.object(wsgi, 'drop_privileges'),
|
|
|
|
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
|
|
|
mock.patch.object(wsgi, 'capture_stdio'),
|
|
|
|
mock.patch.object(wsgi, 'run_server')):
|
|
|
|
rc = wsgi.run_wsgi('conf_file', 'app_section')
|
|
|
|
self.assertEqual(calls['_initrp'], 1)
|
|
|
|
self.assertEqual(calls['_loadapp'], 1)
|
|
|
|
self.assertEqual(rc, 0)
|
|
|
|
|
|
|
|
def test_run_server_failure1(self):
|
|
|
|
calls = defaultdict(lambda: 0)
|
|
|
|
|
|
|
|
def _initrp(conf_file, app_section, *args, **kwargs):
|
|
|
|
calls['_initrp'] += 1
|
|
|
|
raise wsgi.ConfigFileError('test exception')
|
|
|
|
|
|
|
|
def _loadapp(uri, name=None, **kwargs):
|
|
|
|
calls['_loadapp'] += 1
|
|
|
|
|
|
|
|
with nested(
|
|
|
|
mock.patch.object(wsgi, '_initrp', _initrp),
|
|
|
|
mock.patch.object(wsgi, 'get_socket'),
|
|
|
|
mock.patch.object(wsgi, 'drop_privileges'),
|
|
|
|
mock.patch.object(wsgi, 'loadapp', _loadapp),
|
|
|
|
mock.patch.object(wsgi, 'capture_stdio'),
|
|
|
|
mock.patch.object(wsgi, 'run_server')):
|
|
|
|
rc = wsgi.run_wsgi('conf_file', 'app_section')
|
|
|
|
self.assertEqual(calls['_initrp'], 1)
|
|
|
|
self.assertEqual(calls['_loadapp'], 0)
|
|
|
|
self.assertEqual(rc, 1)
|
|
|
|
|
2013-07-05 13:18:16 +01:00
|
|
|
def test_pre_auth_req_with_empty_env_no_path(self):
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{}, 'GET')
|
|
|
|
self.assertEquals(r.path, quote(''))
|
|
|
|
self.assertTrue('SCRIPT_NAME' in r.environ)
|
|
|
|
self.assertTrue('PATH_INFO' in r.environ)
|
|
|
|
|
|
|
|
def test_pre_auth_req_with_env_path(self):
|
|
|
|
r = wsgi.make_pre_authed_request(
|
|
|
|
{'PATH_INFO': '/unquoted path with %20'}, 'GET')
|
|
|
|
self.assertEquals(r.path, quote('/unquoted path with %20'))
|
|
|
|
self.assertEquals(r.environ['SCRIPT_NAME'], '')
|
|
|
|
|
|
|
|
def test_pre_auth_req_with_env_script(self):
|
|
|
|
r = wsgi.make_pre_authed_request({'SCRIPT_NAME': '/hello'}, 'GET')
|
|
|
|
self.assertEquals(r.path, quote('/hello'))
|
|
|
|
|
|
|
|
def test_pre_auth_req_with_env_path_and_script(self):
|
|
|
|
env = {'PATH_INFO': '/unquoted path with %20',
|
|
|
|
'SCRIPT_NAME': '/script'}
|
|
|
|
r = wsgi.make_pre_authed_request(env, 'GET')
|
|
|
|
expected_path = quote(env['SCRIPT_NAME'] + env['PATH_INFO'])
|
|
|
|
self.assertEquals(r.path, expected_path)
|
|
|
|
env = {'PATH_INFO': '', 'SCRIPT_NAME': '/script'}
|
|
|
|
r = wsgi.make_pre_authed_request(env, 'GET')
|
|
|
|
self.assertEquals(r.path, '/script')
|
|
|
|
env = {'PATH_INFO': '/path', 'SCRIPT_NAME': ''}
|
|
|
|
r = wsgi.make_pre_authed_request(env, 'GET')
|
|
|
|
self.assertEquals(r.path, '/path')
|
|
|
|
env = {'PATH_INFO': '', 'SCRIPT_NAME': ''}
|
|
|
|
r = wsgi.make_pre_authed_request(env, 'GET')
|
|
|
|
self.assertEquals(r.path, '')
|
|
|
|
|
|
|
|
def test_pre_auth_req_path_overrides_env(self):
|
|
|
|
env = {'PATH_INFO': '/path', 'SCRIPT_NAME': '/script'}
|
|
|
|
r = wsgi.make_pre_authed_request(env, 'GET', '/override')
|
|
|
|
self.assertEquals(r.path, '/override')
|
|
|
|
self.assertEquals(r.environ['SCRIPT_NAME'], '')
|
|
|
|
self.assertEquals(r.environ['PATH_INFO'], '/override')
|
2013-03-25 16:34:43 -07:00
|
|
|
|
2013-08-31 23:13:15 -04:00
|
|
|
|
2012-12-21 19:24:16 +00:00
|
|
|
class TestWSGIContext(unittest.TestCase):
|
|
|
|
|
|
|
|
def test_app_call(self):
|
|
|
|
statuses = ['200 Ok', '404 Not Found']
|
|
|
|
|
|
|
|
def app(env, start_response):
|
|
|
|
start_response(statuses.pop(0), [('Content-Length', '3')])
|
|
|
|
yield 'Ok\n'
|
|
|
|
|
|
|
|
wc = wsgi.WSGIContext(app)
|
|
|
|
r = Request.blank('/')
|
|
|
|
it = wc._app_call(r.environ)
|
|
|
|
self.assertEquals(wc._response_status, '200 Ok')
|
|
|
|
self.assertEquals(''.join(it), 'Ok\n')
|
|
|
|
r = Request.blank('/')
|
|
|
|
it = wc._app_call(r.environ)
|
|
|
|
self.assertEquals(wc._response_status, '404 Not Found')
|
|
|
|
self.assertEquals(''.join(it), 'Ok\n')
|
|
|
|
|
2013-12-03 14:49:57 -08:00
|
|
|
def test_app_iter_is_closable(self):
|
|
|
|
|
|
|
|
def app(env, start_response):
|
|
|
|
start_response('200 OK', [('Content-Length', '25')])
|
|
|
|
yield 'aaaaa'
|
|
|
|
yield 'bbbbb'
|
|
|
|
yield 'ccccc'
|
|
|
|
yield 'ddddd'
|
|
|
|
yield 'eeeee'
|
|
|
|
|
|
|
|
wc = wsgi.WSGIContext(app)
|
|
|
|
r = Request.blank('/')
|
|
|
|
iterable = wc._app_call(r.environ)
|
|
|
|
self.assertEquals(wc._response_status, '200 OK')
|
|
|
|
|
|
|
|
iterator = iter(iterable)
|
|
|
|
self.assertEqual('aaaaa', iterator.next())
|
|
|
|
self.assertEqual('bbbbb', iterator.next())
|
|
|
|
iterable.close()
|
|
|
|
self.assertRaises(StopIteration, iterator.next)
|
|
|
|
|
2012-12-21 19:24:16 +00:00
|
|
|
|
2013-12-13 13:11:01 -08:00
|
|
|
class TestPipelineWrapper(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = healthcheck catch_errors tempurl proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
|
|
|
|
[filter:catch_errors]
|
|
|
|
use = egg:swift#catch_errors
|
|
|
|
|
|
|
|
[filter:healthcheck]
|
|
|
|
use = egg:swift#healthcheck
|
|
|
|
|
|
|
|
[filter:tempurl]
|
|
|
|
paste.filter_factory = swift.common.middleware.tempurl:filter_factory
|
|
|
|
"""
|
|
|
|
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
ctx = wsgi.loadcontext(loadwsgi.APP, conf_file, global_conf={})
|
|
|
|
self.pipe = wsgi.PipelineWrapper(ctx)
|
|
|
|
|
|
|
|
def _entry_point_names(self):
|
|
|
|
# Helper method to return a list of the entry point names for the
|
|
|
|
# filters in the pipeline.
|
|
|
|
return [c.entry_point_name for c in self.pipe.context.filter_contexts]
|
|
|
|
|
2013-12-03 22:02:39 +00:00
|
|
|
def test_startswith(self):
|
|
|
|
self.assertTrue(self.pipe.startswith("healthcheck"))
|
|
|
|
self.assertFalse(self.pipe.startswith("tempurl"))
|
|
|
|
|
|
|
|
def test_startswith_no_filters(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
"""
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
ctx = wsgi.loadcontext(loadwsgi.APP, conf_file, global_conf={})
|
|
|
|
pipe = wsgi.PipelineWrapper(ctx)
|
|
|
|
self.assertTrue(pipe.startswith('proxy'))
|
|
|
|
|
2013-12-13 13:11:01 -08:00
|
|
|
def test_insert_filter(self):
|
|
|
|
original_modules = ['healthcheck', 'catch_errors', None]
|
|
|
|
self.assertEqual(self._entry_point_names(), original_modules)
|
|
|
|
|
|
|
|
self.pipe.insert_filter(self.pipe.create_filter('catch_errors'))
|
|
|
|
expected_modules = ['catch_errors', 'healthcheck',
|
|
|
|
'catch_errors', None]
|
|
|
|
self.assertEqual(self._entry_point_names(), expected_modules)
|
|
|
|
|
|
|
|
def test_str(self):
|
|
|
|
self.assertEqual(
|
|
|
|
str(self.pipe),
|
2014-05-09 19:10:08 -06:00
|
|
|
"healthcheck catch_errors tempurl proxy-server")
|
2013-12-13 13:11:01 -08:00
|
|
|
|
|
|
|
def test_str_unknown_filter(self):
|
2014-05-09 19:10:08 -06:00
|
|
|
del self.pipe.context.filter_contexts[0].__dict__['name']
|
2013-12-13 13:11:01 -08:00
|
|
|
self.pipe.context.filter_contexts[0].object = 'mysterious'
|
|
|
|
self.assertEqual(
|
|
|
|
str(self.pipe),
|
2014-05-09 19:10:08 -06:00
|
|
|
"<unknown> catch_errors tempurl proxy-server")
|
2013-12-13 13:11:01 -08:00
|
|
|
|
|
|
|
|
2014-04-16 17:16:57 -07:00
|
|
|
@mock.patch('swift.common.utils.HASH_PATH_SUFFIX', new='endcap')
|
2013-12-13 13:11:01 -08:00
|
|
|
class TestPipelineModification(unittest.TestCase):
|
|
|
|
def pipeline_modules(self, app):
|
|
|
|
# This is rather brittle; it'll break if a middleware stores its app
|
|
|
|
# anywhere other than an attribute named "app", but it works for now.
|
|
|
|
pipe = []
|
|
|
|
for _ in xrange(1000):
|
|
|
|
pipe.append(app.__class__.__module__)
|
|
|
|
if not hasattr(app, 'app'):
|
|
|
|
break
|
|
|
|
app = app.app
|
|
|
|
return pipe
|
|
|
|
|
|
|
|
def test_load_app(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = healthcheck proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
|
|
|
|
[filter:catch_errors]
|
|
|
|
use = egg:swift#catch_errors
|
|
|
|
|
|
|
|
[filter:healthcheck]
|
|
|
|
use = egg:swift#healthcheck
|
|
|
|
"""
|
|
|
|
|
|
|
|
def modify_func(app, pipe):
|
|
|
|
new = pipe.create_filter('catch_errors')
|
|
|
|
pipe.insert_filter(new)
|
|
|
|
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
|
|
|
with mock.patch(
|
|
|
|
'swift.proxy.server.Application.modify_wsgi_pipeline',
|
|
|
|
modify_func):
|
|
|
|
app = wsgi.loadapp(conf_file, global_conf={})
|
|
|
|
exp = swift.common.middleware.catch_errors.CatchErrorMiddleware
|
|
|
|
self.assertTrue(isinstance(app, exp), app)
|
|
|
|
exp = swift.common.middleware.healthcheck.HealthCheckMiddleware
|
|
|
|
self.assertTrue(isinstance(app.app, exp), app.app)
|
|
|
|
exp = swift.proxy.server.Application
|
|
|
|
self.assertTrue(isinstance(app.app.app, exp), app.app.app)
|
|
|
|
|
2014-02-27 18:30:15 -08:00
|
|
|
# make sure you can turn off the pipeline modification if you want
|
|
|
|
def blow_up(*_, **__):
|
|
|
|
raise self.fail("needs more struts")
|
|
|
|
|
|
|
|
with mock.patch(
|
|
|
|
'swift.proxy.server.Application.modify_wsgi_pipeline',
|
|
|
|
blow_up):
|
|
|
|
app = wsgi.loadapp(conf_file, global_conf={},
|
|
|
|
allow_modify_pipeline=False)
|
|
|
|
|
|
|
|
# the pipeline was untouched
|
|
|
|
exp = swift.common.middleware.healthcheck.HealthCheckMiddleware
|
|
|
|
self.assertTrue(isinstance(app, exp), app)
|
|
|
|
exp = swift.proxy.server.Application
|
|
|
|
self.assertTrue(isinstance(app.app, exp), app.app)
|
|
|
|
|
2013-12-13 13:11:01 -08:00
|
|
|
def test_proxy_unmodified_wsgi_pipeline(self):
|
|
|
|
# Make sure things are sane even when we modify nothing
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
2013-12-03 22:02:39 +00:00
|
|
|
pipeline = catch_errors gatekeeper proxy-server
|
2013-12-13 13:11:01 -08:00
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
|
|
|
|
[filter:catch_errors]
|
|
|
|
use = egg:swift#catch_errors
|
2013-12-03 22:02:39 +00:00
|
|
|
|
|
|
|
[filter:gatekeeper]
|
|
|
|
use = egg:swift#gatekeeper
|
2013-12-13 13:11:01 -08:00
|
|
|
"""
|
|
|
|
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
|
|
|
app = wsgi.loadapp(conf_file, global_conf={})
|
|
|
|
|
|
|
|
self.assertEqual(self.pipeline_modules(app),
|
|
|
|
['swift.common.middleware.catch_errors',
|
2013-11-21 17:31:16 -08:00
|
|
|
'swift.common.middleware.gatekeeper',
|
|
|
|
'swift.common.middleware.dlo',
|
2013-12-13 13:11:01 -08:00
|
|
|
'swift.proxy.server'])
|
|
|
|
|
|
|
|
def test_proxy_modify_wsgi_pipeline(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = healthcheck proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
|
|
|
|
[filter:healthcheck]
|
|
|
|
use = egg:swift#healthcheck
|
|
|
|
"""
|
|
|
|
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
|
|
|
app = wsgi.loadapp(conf_file, global_conf={})
|
|
|
|
|
2013-12-03 22:02:39 +00:00
|
|
|
self.assertEqual(self.pipeline_modules(app),
|
|
|
|
['swift.common.middleware.catch_errors',
|
2013-11-21 17:31:16 -08:00
|
|
|
'swift.common.middleware.gatekeeper',
|
|
|
|
'swift.common.middleware.dlo',
|
|
|
|
'swift.common.middleware.healthcheck',
|
|
|
|
'swift.proxy.server'])
|
2013-12-13 13:11:01 -08:00
|
|
|
|
|
|
|
def test_proxy_modify_wsgi_pipeline_ordering(self):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = healthcheck proxy-logging bulk tempurl proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
|
|
|
|
[filter:healthcheck]
|
|
|
|
use = egg:swift#healthcheck
|
|
|
|
|
|
|
|
[filter:proxy-logging]
|
|
|
|
use = egg:swift#proxy_logging
|
|
|
|
|
|
|
|
[filter:bulk]
|
|
|
|
use = egg:swift#bulk
|
|
|
|
|
|
|
|
[filter:tempurl]
|
|
|
|
use = egg:swift#tempurl
|
|
|
|
"""
|
|
|
|
|
|
|
|
new_req_filters = [
|
|
|
|
# not in pipeline, no afters
|
|
|
|
{'name': 'catch_errors'},
|
|
|
|
# already in pipeline
|
|
|
|
{'name': 'proxy_logging',
|
2013-11-21 17:31:16 -08:00
|
|
|
'after_fn': lambda _: ['catch_errors']},
|
2013-12-13 13:11:01 -08:00
|
|
|
# not in pipeline, comes after more than one thing
|
|
|
|
{'name': 'container_quotas',
|
2013-11-21 17:31:16 -08:00
|
|
|
'after_fn': lambda _: ['catch_errors', 'bulk']}]
|
2013-12-13 13:11:01 -08:00
|
|
|
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
|
|
|
with mock.patch.object(swift.proxy.server, 'required_filters',
|
|
|
|
new_req_filters):
|
|
|
|
app = wsgi.loadapp(conf_file, global_conf={})
|
|
|
|
|
|
|
|
self.assertEqual(self.pipeline_modules(app), [
|
|
|
|
'swift.common.middleware.catch_errors',
|
|
|
|
'swift.common.middleware.healthcheck',
|
|
|
|
'swift.common.middleware.proxy_logging',
|
|
|
|
'swift.common.middleware.bulk',
|
|
|
|
'swift.common.middleware.container_quotas',
|
|
|
|
'swift.common.middleware.tempurl',
|
|
|
|
'swift.proxy.server'])
|
|
|
|
|
2013-12-03 22:02:39 +00:00
|
|
|
def _proxy_modify_wsgi_pipeline(self, pipe):
|
|
|
|
config = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = TEMPDIR
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = %s
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
conn_timeout = 0.2
|
|
|
|
|
|
|
|
[filter:healthcheck]
|
|
|
|
use = egg:swift#healthcheck
|
|
|
|
|
|
|
|
[filter:catch_errors]
|
|
|
|
use = egg:swift#catch_errors
|
|
|
|
|
|
|
|
[filter:gatekeeper]
|
|
|
|
use = egg:swift#gatekeeper
|
|
|
|
"""
|
|
|
|
config = config % (pipe,)
|
|
|
|
contents = dedent(config)
|
|
|
|
with temptree(['proxy-server.conf']) as t:
|
|
|
|
conf_file = os.path.join(t, 'proxy-server.conf')
|
|
|
|
with open(conf_file, 'w') as f:
|
|
|
|
f.write(contents.replace('TEMPDIR', t))
|
|
|
|
_fake_rings(t)
|
|
|
|
app = wsgi.loadapp(conf_file, global_conf={})
|
|
|
|
return app
|
|
|
|
|
|
|
|
def test_gatekeeper_insertion_catch_errors_configured_at_start(self):
|
|
|
|
# catch_errors is configured at start, gatekeeper is not configured,
|
|
|
|
# so gatekeeper should be inserted just after catch_errors
|
|
|
|
pipe = 'catch_errors healthcheck proxy-server'
|
|
|
|
app = self._proxy_modify_wsgi_pipeline(pipe)
|
|
|
|
self.assertEqual(self.pipeline_modules(app), [
|
|
|
|
'swift.common.middleware.catch_errors',
|
|
|
|
'swift.common.middleware.gatekeeper',
|
2013-11-21 17:31:16 -08:00
|
|
|
'swift.common.middleware.dlo',
|
2013-12-03 22:02:39 +00:00
|
|
|
'swift.common.middleware.healthcheck',
|
|
|
|
'swift.proxy.server'])
|
|
|
|
|
|
|
|
def test_gatekeeper_insertion_catch_errors_configured_not_at_start(self):
|
|
|
|
# catch_errors is configured, gatekeeper is not configured, so
|
|
|
|
# gatekeeper should be inserted at start of pipeline
|
|
|
|
pipe = 'healthcheck catch_errors proxy-server'
|
|
|
|
app = self._proxy_modify_wsgi_pipeline(pipe)
|
|
|
|
self.assertEqual(self.pipeline_modules(app), [
|
|
|
|
'swift.common.middleware.gatekeeper',
|
|
|
|
'swift.common.middleware.healthcheck',
|
|
|
|
'swift.common.middleware.catch_errors',
|
2013-11-21 17:31:16 -08:00
|
|
|
'swift.common.middleware.dlo',
|
2013-12-03 22:02:39 +00:00
|
|
|
'swift.proxy.server'])
|
|
|
|
|
|
|
|
def test_catch_errors_gatekeeper_configured_not_at_start(self):
|
|
|
|
# catch_errors is configured, gatekeeper is configured, so
|
|
|
|
# no change should be made to pipeline
|
|
|
|
pipe = 'healthcheck catch_errors gatekeeper proxy-server'
|
|
|
|
app = self._proxy_modify_wsgi_pipeline(pipe)
|
|
|
|
self.assertEqual(self.pipeline_modules(app), [
|
|
|
|
'swift.common.middleware.healthcheck',
|
|
|
|
'swift.common.middleware.catch_errors',
|
|
|
|
'swift.common.middleware.gatekeeper',
|
2013-11-21 17:31:16 -08:00
|
|
|
'swift.common.middleware.dlo',
|
2013-12-03 22:02:39 +00:00
|
|
|
'swift.proxy.server'])
|
2013-12-13 13:11:01 -08:00
|
|
|
|
2014-04-16 17:16:57 -07:00
|
|
|
@patch_policies
|
|
|
|
@with_tempdir
|
|
|
|
def test_loadapp_proxy(self, tempdir):
|
|
|
|
conf_path = os.path.join(tempdir, 'proxy-server.conf')
|
|
|
|
conf_body = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = %s
|
|
|
|
|
|
|
|
[pipeline:main]
|
|
|
|
pipeline = catch_errors cache proxy-server
|
|
|
|
|
|
|
|
[app:proxy-server]
|
|
|
|
use = egg:swift#proxy
|
|
|
|
|
|
|
|
[filter:cache]
|
|
|
|
use = egg:swift#memcache
|
|
|
|
|
|
|
|
[filter:catch_errors]
|
|
|
|
use = egg:swift#catch_errors
|
|
|
|
""" % tempdir
|
|
|
|
with open(conf_path, 'w') as f:
|
|
|
|
f.write(dedent(conf_body))
|
|
|
|
account_ring_path = os.path.join(tempdir, 'account.ring.gz')
|
|
|
|
write_fake_ring(account_ring_path)
|
|
|
|
container_ring_path = os.path.join(tempdir, 'container.ring.gz')
|
|
|
|
write_fake_ring(container_ring_path)
|
|
|
|
object_ring_path = os.path.join(tempdir, 'object.ring.gz')
|
|
|
|
write_fake_ring(object_ring_path)
|
|
|
|
object_1_ring_path = os.path.join(tempdir, 'object-1.ring.gz')
|
|
|
|
write_fake_ring(object_1_ring_path)
|
|
|
|
app = wsgi.loadapp(conf_path)
|
|
|
|
proxy_app = app.app.app.app.app
|
|
|
|
self.assertEqual(proxy_app.account_ring.serialized_path,
|
|
|
|
account_ring_path)
|
|
|
|
self.assertEqual(proxy_app.container_ring.serialized_path,
|
|
|
|
container_ring_path)
|
|
|
|
self.assertEqual(proxy_app.get_object_ring(0).serialized_path,
|
|
|
|
object_ring_path)
|
|
|
|
self.assertEqual(proxy_app.get_object_ring(1).serialized_path,
|
|
|
|
object_1_ring_path)
|
|
|
|
|
|
|
|
@with_tempdir
|
|
|
|
def test_loadapp_storage(self, tempdir):
|
|
|
|
expectations = {
|
|
|
|
'object': obj_server.ObjectController,
|
|
|
|
'container': container_server.ContainerController,
|
|
|
|
'account': account_server.AccountController,
|
|
|
|
}
|
|
|
|
|
|
|
|
for server_type, controller in expectations.items():
|
|
|
|
conf_path = os.path.join(
|
|
|
|
tempdir, '%s-server.conf' % server_type)
|
|
|
|
conf_body = """
|
|
|
|
[DEFAULT]
|
|
|
|
swift_dir = %s
|
|
|
|
|
|
|
|
[app:main]
|
|
|
|
use = egg:swift#%s
|
|
|
|
""" % (tempdir, server_type)
|
|
|
|
with open(conf_path, 'w') as f:
|
|
|
|
f.write(dedent(conf_body))
|
|
|
|
app = wsgi.loadapp(conf_path)
|
|
|
|
self.assertTrue(isinstance(app, controller))
|
|
|
|
|
|
|
|
def test_pipeline_property(self):
|
|
|
|
depth = 3
|
|
|
|
|
|
|
|
class FakeApp(object):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class AppFilter(object):
|
|
|
|
|
|
|
|
def __init__(self, app):
|
|
|
|
self.app = app
|
|
|
|
|
|
|
|
# make a pipeline
|
|
|
|
app = FakeApp()
|
|
|
|
filtered_app = app
|
|
|
|
for i in range(depth):
|
|
|
|
filtered_app = AppFilter(filtered_app)
|
|
|
|
|
|
|
|
# AttributeError if no apps in the pipeline have attribute
|
|
|
|
wsgi._add_pipeline_properties(filtered_app, 'foo')
|
|
|
|
self.assertRaises(AttributeError, getattr, filtered_app, 'foo')
|
|
|
|
|
|
|
|
# set the attribute
|
|
|
|
self.assert_(isinstance(app, FakeApp))
|
|
|
|
app.foo = 'bar'
|
|
|
|
self.assertEqual(filtered_app.foo, 'bar')
|
|
|
|
|
|
|
|
# attribute is cached
|
|
|
|
app.foo = 'baz'
|
|
|
|
self.assertEqual(filtered_app.foo, 'bar')
|
|
|
|
|
|
|
|
|
2010-07-12 17:03:45 -05:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|