merge to trunk conflicts
This commit is contained in:
commit
b544570da5
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/env python
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -128,3 +128,9 @@ Swift3
|
||||
:members:
|
||||
:show-inheritance:
|
||||
|
||||
StaticWeb
|
||||
=========
|
||||
|
||||
.. automodule:: swift.common.middleware.staticweb
|
||||
:members:
|
||||
:show-inheritance:
|
||||
|
@ -30,6 +30,10 @@ use = egg:swift#object
|
||||
# slow = 1
|
||||
# on PUTs, sync data every n MB
|
||||
# mb_per_sync = 512
|
||||
# Comma separated list of headers that can be set in metadata on an object.
|
||||
# This list is in addition to X-Object-Meta-* headers and cannot include
|
||||
# Content-Type, etag, Content-Length, or deleted
|
||||
# allowed_headers = Content-Encoding, Content-Disposition, X-Object-Manifest
|
||||
|
||||
[object-replicator]
|
||||
# You can override the default log routing for this app here (don't use set!):
|
||||
|
@ -150,3 +150,17 @@ use = egg:swift#cname_lookup
|
||||
# set log_headers = False
|
||||
# storage_domain = example.com
|
||||
# lookup_depth = 1
|
||||
|
||||
# Note: Put staticweb just after your auth filter(s) in the pipeline
|
||||
[filter:staticweb]
|
||||
use = egg:swift#staticweb
|
||||
# Seconds to cache container x-container-meta-web-* header values.
|
||||
# cache_timeout = 300
|
||||
# You can override the default log routing for this filter here:
|
||||
# set log_name = staticweb
|
||||
# set log_facility = LOG_LOCAL0
|
||||
# set log_level = INFO
|
||||
# set access_log_name = staticweb
|
||||
# set access_log_facility = LOG_LOCAL0
|
||||
# set access_log_level = INFO
|
||||
# set log_headers = False
|
||||
|
1
setup.py
1
setup.py
@ -115,6 +115,7 @@ setup(
|
||||
'catch_errors=swift.common.middleware.catch_errors:filter_factory',
|
||||
'domain_remap=swift.common.middleware.domain_remap:filter_factory',
|
||||
'swift3=swift.common.middleware.swift3:filter_factory',
|
||||
'staticweb=swift.common.middleware.staticweb:filter_factory',
|
||||
],
|
||||
},
|
||||
)
|
||||
|
@ -58,6 +58,10 @@ def clean_acl(name, value):
|
||||
.r:
|
||||
.r:-
|
||||
|
||||
By default, allowing read access via .r will not allow listing objects in
|
||||
the container -- just retrieving objects from the container. To turn on
|
||||
listings, use the .rlistings directive.
|
||||
|
||||
Also, .r designations aren't allowed in headers whose names include the
|
||||
word 'write'.
|
||||
|
||||
@ -71,6 +75,7 @@ def clean_acl(name, value):
|
||||
``bob,,,sue`` ``bob,sue``
|
||||
``.referrer : *`` ``.r:*``
|
||||
``.ref:*.example.com`` ``.r:.example.com``
|
||||
``.r:*, .rlistings`` ``.r:*,.rlistings``
|
||||
====================== ======================
|
||||
|
||||
:param name: The name of the header being cleaned, such as X-Container-Read
|
||||
|
547
swift/common/middleware/staticweb.py
Normal file
547
swift/common/middleware/staticweb.py
Normal file
@ -0,0 +1,547 @@
|
||||
# Copyright (c) 2010-2011 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""
|
||||
This StaticWeb WSGI middleware will serve container data as a static web site
|
||||
with index file and error file resolution and optional file listings. This mode
|
||||
is normally only active for anonymous requests. If you want to use it with
|
||||
authenticated requests, set the ``X-Web-Mode: true`` header on the request.
|
||||
|
||||
The ``staticweb`` filter should be added to the pipeline in your
|
||||
``/etc/swift/proxy-server.conf`` file just after any auth middleware. Also, the
|
||||
configuration section for the ``staticweb`` middleware itself needs to be
|
||||
added. For example::
|
||||
|
||||
[DEFAULT]
|
||||
...
|
||||
|
||||
[pipeline:main]
|
||||
pipeline = healthcheck cache swauth staticweb proxy-server
|
||||
|
||||
...
|
||||
|
||||
[filter:staticweb]
|
||||
use = egg:swift#staticweb
|
||||
# Seconds to cache container x-container-meta-web-* header values.
|
||||
# cache_timeout = 300
|
||||
# You can override the default log routing for this filter here:
|
||||
# set log_name = staticweb
|
||||
# set log_facility = LOG_LOCAL0
|
||||
# set log_level = INFO
|
||||
# set access_log_name = staticweb
|
||||
# set access_log_facility = LOG_LOCAL0
|
||||
# set access_log_level = INFO
|
||||
# set log_headers = False
|
||||
|
||||
Any publicly readable containers (for example, ``X-Container-Read: .r:*``, see
|
||||
`acls`_ for more information on this) will be checked for
|
||||
X-Container-Meta-Web-Index and X-Container-Meta-Web-Error header values::
|
||||
|
||||
X-Container-Meta-Web-Index <index.name>
|
||||
X-Container-Meta-Web-Error <error.name.suffix>
|
||||
|
||||
If X-Container-Meta-Web-Index is set, any <index.name> files will be served
|
||||
without having to specify the <index.name> part. For instance, setting
|
||||
``X-Container-Meta-Web-Index: index.html`` will be able to serve the object
|
||||
.../pseudo/path/index.html with just .../pseudo/path or .../pseudo/path/
|
||||
|
||||
If X-Container-Meta-Web-Error is set, any errors (currently just 401
|
||||
Unauthorized and 404 Not Found) will instead serve the
|
||||
.../<status.code><error.name.suffix> object. For instance, setting
|
||||
``X-Container-Meta-Web-Error: error.html`` will serve .../404error.html for
|
||||
requests for paths not found.
|
||||
|
||||
For psuedo paths that have no <index.name>, this middleware can serve HTML file
|
||||
listings if you set the ``X-Container-Meta-Web-Listings: true`` metadata item
|
||||
on the container.
|
||||
|
||||
If listings are enabled, the listings can have a custom style sheet by setting
|
||||
the X-Container-Meta-Web-Listings-CSS header. For instance, setting
|
||||
``X-Container-Meta-Web-Listings-CSS: listing.css`` will make listings link to
|
||||
the .../listing.css style sheet. If you "view source" in your browser on a
|
||||
listing page, you will see the well defined document structure that can be
|
||||
styled.
|
||||
|
||||
Example usage of this middleware via ``st``:
|
||||
|
||||
Make the container publicly readable::
|
||||
|
||||
st post -r '.r:*' container
|
||||
|
||||
You should be able to get objects directly, but no index.html resolution or
|
||||
listings.
|
||||
|
||||
Set an index file directive::
|
||||
|
||||
st post -m 'web-index:index.html' container
|
||||
|
||||
You should be able to hit paths that have an index.html without needing to
|
||||
type the index.html part.
|
||||
|
||||
Turn on listings::
|
||||
|
||||
st post -m 'web-listings: true' container
|
||||
|
||||
Now you should see object listings for paths and pseudo paths that have no
|
||||
index.html.
|
||||
|
||||
Enable a custom listings style sheet::
|
||||
|
||||
st post -m 'web-listings-css:listings.css' container
|
||||
|
||||
Set an error file::
|
||||
|
||||
st post -m 'web-error:error.html' container
|
||||
|
||||
Now 401's should load 401error.html, 404's should load 404error.html, etc.
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
|
||||
import cgi
|
||||
import time
|
||||
from urllib import unquote, quote
|
||||
|
||||
from webob import Response, Request
|
||||
from webob.exc import HTTPMovedPermanently, HTTPNotFound
|
||||
|
||||
from swift.common.utils import cache_from_env, get_logger, human_readable, \
|
||||
split_path, TRUE_VALUES
|
||||
|
||||
|
||||
class StaticWeb(object):
|
||||
"""
|
||||
The Static Web WSGI middleware filter; serves container data as a static
|
||||
web site. See `staticweb`_ for an overview.
|
||||
|
||||
:param app: The next WSGI application/filter in the paste.deploy pipeline.
|
||||
:param conf: The filter configuration dict.
|
||||
"""
|
||||
|
||||
def __init__(self, app, conf):
|
||||
#: The next WSGI application/filter in the paste.deploy pipeline.
|
||||
self.app = app
|
||||
#: The filter configuration dict.
|
||||
self.conf = conf
|
||||
#: The seconds to cache the x-container-meta-web-* headers.,
|
||||
self.cache_timeout = int(conf.get('cache_timeout', 300))
|
||||
#: Logger for this filter.
|
||||
self.logger = get_logger(conf, log_route='staticweb')
|
||||
access_log_conf = {}
|
||||
for key in ('log_facility', 'log_name', 'log_level'):
|
||||
value = conf.get('access_' + key, conf.get(key, None))
|
||||
if value:
|
||||
access_log_conf[key] = value
|
||||
#: Web access logger for this filter.
|
||||
self.access_logger = get_logger(access_log_conf,
|
||||
log_route='staticweb-access')
|
||||
#: Indicates whether full HTTP headers should be logged or not.
|
||||
self.log_headers = conf.get('log_headers') == 'True'
|
||||
# Results from the last call to self._start_response.
|
||||
self._response_status = None
|
||||
self._response_headers = None
|
||||
self._response_exc_info = None
|
||||
# Results from the last call to self._get_container_info.
|
||||
self._index = self._error = self._listings = self._listings_css = None
|
||||
|
||||
def _start_response(self, status, headers, exc_info=None):
|
||||
"""
|
||||
Saves response info without sending it to the remote client.
|
||||
Uses the same semantics as the usual WSGI start_response.
|
||||
"""
|
||||
self._response_status = status
|
||||
self._response_headers = headers
|
||||
self._response_exc_info = exc_info
|
||||
|
||||
def _error_response(self, response, env, start_response):
|
||||
"""
|
||||
Sends the error response to the remote client, possibly resolving a
|
||||
custom error response body based on x-container-meta-web-error.
|
||||
|
||||
:param response: The error response we should default to sending.
|
||||
:param env: The original request WSGI environment.
|
||||
:param start_response: The WSGI start_response hook.
|
||||
"""
|
||||
self._log_response(env, self._get_status_int())
|
||||
if not self._error:
|
||||
start_response(self._response_status, self._response_headers,
|
||||
self._response_exc_info)
|
||||
return response
|
||||
save_response_status = self._response_status
|
||||
save_response_headers = self._response_headers
|
||||
save_response_exc_info = self._response_exc_info
|
||||
tmp_env = self._get_escalated_env(env)
|
||||
tmp_env['REQUEST_METHOD'] = 'GET'
|
||||
tmp_env['PATH_INFO'] = '/%s/%s/%s/%s%s' % (self.version, self.account,
|
||||
self.container, self._get_status_int(), self._error)
|
||||
resp = self.app(tmp_env, self._start_response)
|
||||
if self._get_status_int() // 100 == 2:
|
||||
start_response(save_response_status, self._response_headers,
|
||||
self._response_exc_info)
|
||||
return resp
|
||||
start_response(save_response_status, save_response_headers,
|
||||
save_response_exc_info)
|
||||
return response
|
||||
|
||||
def _get_status_int(self):
|
||||
"""
|
||||
Returns the HTTP status int from the last called self._start_response
|
||||
result.
|
||||
"""
|
||||
return int(self._response_status.split(' ', 1)[0])
|
||||
|
||||
def _get_escalated_env(self, env):
|
||||
"""
|
||||
Returns a new fresh WSGI environment with escalated privileges to do
|
||||
backend checks, listings, etc. that the remote user wouldn't be able to
|
||||
accomplish directly.
|
||||
"""
|
||||
new_env = {'REQUEST_METHOD': 'GET',
|
||||
'HTTP_USER_AGENT': '%s StaticWeb' % env.get('HTTP_USER_AGENT')}
|
||||
for name in ('eventlet.posthooks', 'HTTP_X_CF_TRANS_ID', 'REMOTE_USER',
|
||||
'SCRIPT_NAME', 'SERVER_NAME', 'SERVER_PORT',
|
||||
'SERVER_PROTOCOL', 'swift.cache'):
|
||||
if name in env:
|
||||
new_env[name] = env[name]
|
||||
return new_env
|
||||
|
||||
def _get_container_info(self, env, start_response):
|
||||
"""
|
||||
Retrieves x-container-meta-web-index, x-container-meta-web-error,
|
||||
x-container-meta-web-listings, and x-container-meta-web-listings-css
|
||||
from memcache or from the cluster and stores the result in memcache and
|
||||
in self._index, self._error, self._listings, and self._listings_css.
|
||||
|
||||
:param env: The WSGI environment dict.
|
||||
:param start_response: The WSGI start_response hook.
|
||||
"""
|
||||
self._index = self._error = self._listings = self._listings_css = None
|
||||
memcache_client = cache_from_env(env)
|
||||
if memcache_client:
|
||||
memcache_key = '/staticweb/%s/%s/%s' % (self.version, self.account,
|
||||
self.container)
|
||||
cached_data = memcache_client.get(memcache_key)
|
||||
if cached_data:
|
||||
(self._index, self._error, self._listings,
|
||||
self._listings_css) = cached_data
|
||||
return
|
||||
tmp_env = self._get_escalated_env(env)
|
||||
tmp_env['REQUEST_METHOD'] = 'HEAD'
|
||||
req = Request.blank('/%s/%s/%s' % (self.version, self.account,
|
||||
self.container), environ=tmp_env)
|
||||
resp = req.get_response(self.app)
|
||||
if resp.status_int // 100 == 2:
|
||||
self._index = \
|
||||
resp.headers.get('x-container-meta-web-index', '').strip()
|
||||
self._error = \
|
||||
resp.headers.get('x-container-meta-web-error', '').strip()
|
||||
self._listings = \
|
||||
resp.headers.get('x-container-meta-web-listings', '').strip()
|
||||
self._listings_css = \
|
||||
resp.headers.get('x-container-meta-web-listings-css',
|
||||
'').strip()
|
||||
if memcache_client:
|
||||
memcache_client.set(memcache_key,
|
||||
(self._index, self._error, self._listings,
|
||||
self._listings_css),
|
||||
timeout=self.cache_timeout)
|
||||
|
||||
def _listing(self, env, start_response, prefix=None):
|
||||
"""
|
||||
Sends an HTML object listing to the remote client.
|
||||
|
||||
:param env: The original WSGI environment dict.
|
||||
:param start_response: The original WSGI start_response hook.
|
||||
:param prefix: Any prefix desired for the container listing.
|
||||
"""
|
||||
if self._listings not in TRUE_VALUES:
|
||||
resp = HTTPNotFound()(env, self._start_response)
|
||||
return self._error_response(resp, env, start_response)
|
||||
tmp_env = self._get_escalated_env(env)
|
||||
tmp_env['REQUEST_METHOD'] = 'GET'
|
||||
tmp_env['PATH_INFO'] = \
|
||||
'/%s/%s/%s' % (self.version, self.account, self.container)
|
||||
tmp_env['QUERY_STRING'] = 'delimiter=/&format=json'
|
||||
if prefix:
|
||||
tmp_env['QUERY_STRING'] += '&prefix=%s' % quote(prefix)
|
||||
else:
|
||||
prefix = ''
|
||||
resp = self.app(tmp_env, self._start_response)
|
||||
if self._get_status_int() // 100 != 2:
|
||||
return self._error_response(resp, env, start_response)
|
||||
listing = json.loads(''.join(resp))
|
||||
if not listing:
|
||||
resp = HTTPNotFound()(env, self._start_response)
|
||||
return self._error_response(resp, env, start_response)
|
||||
headers = {'Content-Type': 'text/html'}
|
||||
body = '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 ' \
|
||||
'Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">\n' \
|
||||
'<html>\n' \
|
||||
' <head>\n' \
|
||||
' <title>Listing of %s</title>\n' % \
|
||||
cgi.escape(env['PATH_INFO'])
|
||||
if self._listings_css:
|
||||
body += ' <link rel="stylesheet" type="text/css" ' \
|
||||
'href="%s%s" />\n' % \
|
||||
('../' * prefix.count('/'), quote(self._listings_css))
|
||||
else:
|
||||
body += ' <style type="text/css">\n' \
|
||||
' h1 {font-size: 1em; font-weight: bold;}\n' \
|
||||
' th {text-align: left; padding: 0px 1em 0px 1em;}\n' \
|
||||
' td {padding: 0px 1em 0px 1em;}\n' \
|
||||
' a {text-decoration: none;}\n' \
|
||||
' </style>\n'
|
||||
body += ' </head>\n' \
|
||||
' <body>\n' \
|
||||
' <h1 id="title">Listing of %s</h1>\n' \
|
||||
' <table id="listing">\n' \
|
||||
' <tr id="heading">\n' \
|
||||
' <th class="colname">Name</th>\n' \
|
||||
' <th class="colsize">Size</th>\n' \
|
||||
' <th class="coldate">Date</th>\n' \
|
||||
' </tr>\n' % \
|
||||
cgi.escape(env['PATH_INFO'])
|
||||
if prefix:
|
||||
body += ' <tr id="parent" class="item">\n' \
|
||||
' <td class="colname"><a href="../">../</a></td>\n' \
|
||||
' <td class="colsize"> </td>\n' \
|
||||
' <td class="coldate"> </td>\n' \
|
||||
' </tr>\n'
|
||||
for item in listing:
|
||||
if 'subdir' in item:
|
||||
subdir = item['subdir']
|
||||
if prefix:
|
||||
subdir = subdir[len(prefix):]
|
||||
body += ' <tr class="item subdir">\n' \
|
||||
' <td class="colname"><a href="%s">%s</a></td>\n' \
|
||||
' <td class="colsize"> </td>\n' \
|
||||
' <td class="coldate"> </td>\n' \
|
||||
' </tr>\n' % \
|
||||
(quote(subdir), cgi.escape(subdir))
|
||||
for item in listing:
|
||||
if 'name' in item:
|
||||
name = item['name']
|
||||
if prefix:
|
||||
name = name[len(prefix):]
|
||||
body += ' <tr class="item %s">\n' \
|
||||
' <td class="colname"><a href="%s">%s</a></td>\n' \
|
||||
' <td class="colsize">%s</td>\n' \
|
||||
' <td class="coldate">%s</td>\n' \
|
||||
' </tr>\n' % \
|
||||
(' '.join('type-' + cgi.escape(t.lower(), quote=True)
|
||||
for t in item['content_type'].split('/')),
|
||||
quote(name), cgi.escape(name),
|
||||
human_readable(item['bytes']),
|
||||
cgi.escape(item['last_modified']).split('.')[0].
|
||||
replace('T', ' '))
|
||||
body += ' </table>\n' \
|
||||
' </body>\n' \
|
||||
'</html>\n'
|
||||
resp = Response(headers=headers, body=body)
|
||||
self._log_response(env, resp.status_int)
|
||||
return resp(env, start_response)
|
||||
|
||||
def _handle_container(self, env, start_response):
|
||||
"""
|
||||
Handles a possible static web request for a container.
|
||||
|
||||
:param env: The original WSGI environment dict.
|
||||
:param start_response: The original WSGI start_response hook.
|
||||
"""
|
||||
self._get_container_info(env, start_response)
|
||||
if not self._listings and not self._index:
|
||||
return self.app(env, start_response)
|
||||
if env['PATH_INFO'][-1] != '/':
|
||||
resp = HTTPMovedPermanently(
|
||||
location=(env['PATH_INFO'] + '/'))
|
||||
self._log_response(env, resp.status_int)
|
||||
return resp(env, start_response)
|
||||
if not self._index:
|
||||
return self._listing(env, start_response)
|
||||
tmp_env = dict(env)
|
||||
tmp_env['HTTP_USER_AGENT'] = \
|
||||
'%s StaticWeb' % env.get('HTTP_USER_AGENT')
|
||||
tmp_env['PATH_INFO'] += self._index
|
||||
resp = self.app(tmp_env, self._start_response)
|
||||
status_int = self._get_status_int()
|
||||
if status_int == 404:
|
||||
return self._listing(env, start_response)
|
||||
elif self._get_status_int() // 100 not in (2, 3):
|
||||
return self._error_response(resp, env, start_response)
|
||||
start_response(self._response_status, self._response_headers,
|
||||
self._response_exc_info)
|
||||
return resp
|
||||
|
||||
def _handle_object(self, env, start_response):
|
||||
"""
|
||||
Handles a possible static web request for an object. This object could
|
||||
resolve into an index or listing request.
|
||||
|
||||
:param env: The original WSGI environment dict.
|
||||
:param start_response: The original WSGI start_response hook.
|
||||
"""
|
||||
tmp_env = dict(env)
|
||||
tmp_env['HTTP_USER_AGENT'] = \
|
||||
'%s StaticWeb' % env.get('HTTP_USER_AGENT')
|
||||
resp = self.app(tmp_env, self._start_response)
|
||||
status_int = self._get_status_int()
|
||||
if status_int // 100 in (2, 3):
|
||||
return self.app(env, start_response)
|
||||
if status_int != 404:
|
||||
return self._error_response(resp, env, start_response)
|
||||
self._get_container_info(env, start_response)
|
||||
if not self._listings and not self._index:
|
||||
return self.app(env, start_response)
|
||||
status_int = 404
|
||||
if self._index:
|
||||
tmp_env = dict(env)
|
||||
tmp_env['HTTP_USER_AGENT'] = \
|
||||
'%s StaticWeb' % env.get('HTTP_USER_AGENT')
|
||||
if tmp_env['PATH_INFO'][-1] != '/':
|
||||
tmp_env['PATH_INFO'] += '/'
|
||||
tmp_env['PATH_INFO'] += self._index
|
||||
resp = self.app(tmp_env, self._start_response)
|
||||
status_int = self._get_status_int()
|
||||
if status_int // 100 in (2, 3):
|
||||
if env['PATH_INFO'][-1] != '/':
|
||||
resp = HTTPMovedPermanently(
|
||||
location=env['PATH_INFO'] + '/')
|
||||
self._log_response(env, resp.status_int)
|
||||
return resp(env, start_response)
|
||||
start_response(self._response_status, self._response_headers,
|
||||
self._response_exc_info)
|
||||
return resp
|
||||
if status_int == 404:
|
||||
if env['PATH_INFO'][-1] != '/':
|
||||
tmp_env = self._get_escalated_env(env)
|
||||
tmp_env['REQUEST_METHOD'] = 'GET'
|
||||
tmp_env['PATH_INFO'] = '/%s/%s/%s' % (self.version,
|
||||
self.account, self.container)
|
||||
tmp_env['QUERY_STRING'] = 'limit=1&format=json&delimiter' \
|
||||
'=/&limit=1&prefix=%s' % quote(self.obj + '/')
|
||||
resp = self.app(tmp_env, self._start_response)
|
||||
if self._get_status_int() // 100 != 2 or \
|
||||
not json.loads(''.join(resp)):
|
||||
resp = HTTPNotFound()(env, self._start_response)
|
||||
return self._error_response(resp, env, start_response)
|
||||
resp = HTTPMovedPermanently(location=env['PATH_INFO'] +
|
||||
'/')
|
||||
self._log_response(env, resp.status_int)
|
||||
return resp(env, start_response)
|
||||
return self._listing(env, start_response, self.obj)
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
"""
|
||||
Main hook into the WSGI paste.deploy filter/app pipeline.
|
||||
|
||||
:param env: The WSGI environment dict.
|
||||
:param start_response: The WSGI start_response hook.
|
||||
"""
|
||||
env['staticweb.start_time'] = time.time()
|
||||
try:
|
||||
(self.version, self.account, self.container, self.obj) = \
|
||||
split_path(env['PATH_INFO'], 2, 4, True)
|
||||
except ValueError:
|
||||
return self.app(env, start_response)
|
||||
memcache_client = cache_from_env(env)
|
||||
if memcache_client:
|
||||
if env['REQUEST_METHOD'] in ('PUT', 'POST'):
|
||||
if not self.obj and self.container:
|
||||
memcache_key = '/staticweb/%s/%s/%s' % \
|
||||
(self.version, self.account, self.container)
|
||||
memcache_client.delete(memcache_key)
|
||||
return self.app(env, start_response)
|
||||
if (env['REQUEST_METHOD'] not in ('HEAD', 'GET') or
|
||||
(env.get('REMOTE_USER') and
|
||||
env.get('HTTP_X_WEB_MODE', 'f').lower() not in TRUE_VALUES) or
|
||||
(not env.get('REMOTE_USER') and
|
||||
env.get('HTTP_X_WEB_MODE', 't').lower() not in TRUE_VALUES)):
|
||||
return self.app(env, start_response)
|
||||
if self.obj:
|
||||
return self._handle_object(env, start_response)
|
||||
elif self.container:
|
||||
return self._handle_container(env, start_response)
|
||||
return self.app(env, start_response)
|
||||
|
||||
def _log_response(self, env, status_int):
|
||||
"""
|
||||
Logs an access line for StaticWeb responses; use when the next app in
|
||||
the pipeline will not be handling the final response to the remote
|
||||
user.
|
||||
|
||||
Assumes that the request and response bodies are 0 bytes or very near 0
|
||||
so no bytes transferred are tracked or logged.
|
||||
|
||||
This does mean that the listings responses that actually do transfer
|
||||
content will not be logged with any bytes transferred, but in counter
|
||||
to that the full bytes for the underlying listing will be logged by the
|
||||
proxy even if the remote client disconnects early for the StaticWeb
|
||||
listing.
|
||||
|
||||
I didn't think the extra complexity of getting the bytes transferred
|
||||
exactly correct for these requests was worth it, but perhaps someone
|
||||
else will think it is.
|
||||
|
||||
To get things exact, this filter would need to use an
|
||||
eventlet.posthooks logger like the proxy does and any log processing
|
||||
systems would need to ignore some (but not all) proxy requests made by
|
||||
StaticWeb if they were just interested in the bytes transferred to the
|
||||
remote client.
|
||||
"""
|
||||
trans_time = '%.4f' % (time.time() -
|
||||
env.get('staticweb.start_time', time.time()))
|
||||
the_request = quote(unquote(env['PATH_INFO']))
|
||||
if env.get('QUERY_STRING'):
|
||||
the_request = the_request + '?' + env['QUERY_STRING']
|
||||
# remote user for zeus
|
||||
client = env.get('HTTP_X_CLUSTER_CLIENT_IP')
|
||||
if not client and 'HTTP_X_FORWARDED_FOR' in env:
|
||||
# remote user for other lbs
|
||||
client = env['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
|
||||
logged_headers = None
|
||||
if self.log_headers:
|
||||
logged_headers = '\n'.join('%s: %s' % (k, v)
|
||||
for k, v in req.headers.items())
|
||||
self.access_logger.info(' '.join(quote(str(x)) for x in (
|
||||
client or '-',
|
||||
env.get('REMOTE_ADDR', '-'),
|
||||
time.strftime('%d/%b/%Y/%H/%M/%S', time.gmtime()),
|
||||
env['REQUEST_METHOD'],
|
||||
the_request,
|
||||
env['SERVER_PROTOCOL'],
|
||||
status_int,
|
||||
env.get('HTTP_REFERER', '-'),
|
||||
env.get('HTTP_USER_AGENT', '-'),
|
||||
env.get('HTTP_X_AUTH_TOKEN', '-'),
|
||||
'-',
|
||||
'-',
|
||||
env.get('HTTP_ETAG', '-'),
|
||||
env.get('HTTP_X_CF_TRANS_ID', '-'),
|
||||
logged_headers or '-',
|
||||
trans_time)))
|
||||
|
||||
|
||||
def filter_factory(global_conf, **local_conf):
|
||||
""" Returns a Static Web WSGI filter for use with paste.deploy. """
|
||||
conf = global_conf.copy()
|
||||
conf.update(local_conf)
|
||||
|
||||
def staticweb_filter(app):
|
||||
return StaticWeb(app, conf)
|
||||
return staticweb_filter
|
@ -277,7 +277,9 @@ class Swauth(object):
|
||||
return None
|
||||
referrers, groups = parse_acl(getattr(req, 'acl', None))
|
||||
if referrer_allowed(req.referer, referrers):
|
||||
return None
|
||||
if obj or '.rlistings' in groups:
|
||||
return None
|
||||
return self.denied_response(req)
|
||||
if not req.remote_user:
|
||||
return self.denied_response(req)
|
||||
for user_group in user_groups:
|
||||
@ -1179,7 +1181,7 @@ class Swauth(object):
|
||||
|
||||
:returns: webob.Request object
|
||||
"""
|
||||
newenv = {'REQUEST_METHOD': method}
|
||||
newenv = {'REQUEST_METHOD': method, 'HTTP_USER_AGENT': 'Swauth'}
|
||||
for name in ('swift.cache', 'HTTP_X_CF_TRANS_ID'):
|
||||
if name in env:
|
||||
newenv[name] = env[name]
|
||||
|
@ -72,7 +72,7 @@ if hash_conf.read('/etc/swift/swift.conf'):
|
||||
pass
|
||||
|
||||
# Used when reading config values
|
||||
TRUE_VALUES = set(('true', '1', 'yes', 'True', 'Yes', 'on', 'On'))
|
||||
TRUE_VALUES = set(('true', '1', 'yes', 'True', 'Yes', 'on', 'On', 't', 'y'))
|
||||
|
||||
|
||||
def validate_configuration():
|
||||
@ -969,3 +969,18 @@ def urlparse(url):
|
||||
:param url: URL to parse.
|
||||
"""
|
||||
return ModifiedParseResult(*stdlib_urlparse(url))
|
||||
|
||||
|
||||
def human_readable(value):
|
||||
"""
|
||||
Returns the number in a human readable format; for example 1048576 = "1Mi".
|
||||
"""
|
||||
value = float(value)
|
||||
index = -1
|
||||
suffixes = 'KMGTPEZY'
|
||||
while value >= 1024 and index + 1 < len(suffixes):
|
||||
index += 1
|
||||
value = round(value / 1024)
|
||||
if index == -1:
|
||||
return '%d' % value
|
||||
return '%d%si' % (round(value), suffixes[index])
|
||||
|
@ -85,37 +85,6 @@ def hash_suffix(path, reclaim_age):
|
||||
return md5.hexdigest()
|
||||
|
||||
|
||||
def recalculate_hashes(partition_dir, suffixes, reclaim_age=ONE_WEEK):
|
||||
"""
|
||||
Recalculates hashes for the given suffixes in the partition and updates
|
||||
them in the partition's hashes file.
|
||||
|
||||
:param partition_dir: directory of the partition in which to recalculate
|
||||
:param suffixes: list of suffixes to recalculate
|
||||
:param reclaim_age: age in seconds at which tombstones should be removed
|
||||
"""
|
||||
|
||||
def tpool_listdir(partition_dir):
|
||||
return dict(((suff, None) for suff in os.listdir(partition_dir)
|
||||
if len(suff) == 3 and isdir(join(partition_dir, suff))))
|
||||
hashes_file = join(partition_dir, HASH_FILE)
|
||||
with lock_path(partition_dir):
|
||||
try:
|
||||
with open(hashes_file, 'rb') as fp:
|
||||
hashes = pickle.load(fp)
|
||||
except Exception:
|
||||
hashes = tpool.execute(tpool_listdir, partition_dir)
|
||||
for suffix in suffixes:
|
||||
suffix_dir = join(partition_dir, suffix)
|
||||
if os.path.exists(suffix_dir):
|
||||
hashes[suffix] = hash_suffix(suffix_dir, reclaim_age)
|
||||
elif suffix in hashes:
|
||||
del hashes[suffix]
|
||||
with open(hashes_file + '.tmp', 'wb') as fp:
|
||||
pickle.dump(hashes, fp, PICKLE_PROTOCOL)
|
||||
renamer(hashes_file + '.tmp', hashes_file)
|
||||
|
||||
|
||||
def invalidate_hash(suffix_dir):
|
||||
"""
|
||||
Invalidates the hash for a suffix_dir in the partition's hashes file.
|
||||
@ -141,23 +110,21 @@ def invalidate_hash(suffix_dir):
|
||||
renamer(hashes_file + '.tmp', hashes_file)
|
||||
|
||||
|
||||
def get_hashes(partition_dir, do_listdir=True, reclaim_age=ONE_WEEK):
|
||||
def get_hashes(partition_dir, recalculate=[], do_listdir=False,
|
||||
reclaim_age=ONE_WEEK):
|
||||
"""
|
||||
Get a list of hashes for the suffix dir. do_listdir causes it to mistrust
|
||||
the hash cache for suffix existence at the (unexpectedly high) cost of a
|
||||
listdir. reclaim_age is just passed on to hash_suffix.
|
||||
|
||||
:param partition_dir: absolute path of partition to get hashes for
|
||||
:param recalculate: list of suffixes which should be recalculated when got
|
||||
:param do_listdir: force existence check for all hashes in the partition
|
||||
:param reclaim_age: age at which to remove tombstones
|
||||
|
||||
:returns: tuple of (number of suffix dirs hashed, dictionary of hashes)
|
||||
"""
|
||||
|
||||
def tpool_listdir(hashes, partition_dir):
|
||||
return dict(((suff, hashes.get(suff, None))
|
||||
for suff in os.listdir(partition_dir)
|
||||
if len(suff) == 3 and isdir(join(partition_dir, suff))))
|
||||
hashed = 0
|
||||
hashes_file = join(partition_dir, HASH_FILE)
|
||||
with lock_path(partition_dir):
|
||||
@ -169,8 +136,12 @@ def get_hashes(partition_dir, do_listdir=True, reclaim_age=ONE_WEEK):
|
||||
except Exception:
|
||||
do_listdir = True
|
||||
if do_listdir:
|
||||
hashes = tpool.execute(tpool_listdir, hashes, partition_dir)
|
||||
hashes = dict(((suff, hashes.get(suff, None))
|
||||
for suff in os.listdir(partition_dir)
|
||||
if len(suff) == 3 and isdir(join(partition_dir, suff))))
|
||||
modified = True
|
||||
for hash_ in recalculate:
|
||||
hashes[hash_] = None
|
||||
for suffix, hash_ in hashes.items():
|
||||
if not hash_:
|
||||
suffix_dir = join(partition_dir, suffix)
|
||||
@ -342,8 +313,7 @@ class ObjectReplicator(Daemon):
|
||||
success = self.rsync(node, job, suffixes)
|
||||
if success:
|
||||
with Timeout(self.http_timeout):
|
||||
http_connect(node['ip'],
|
||||
node['port'],
|
||||
http_connect(node['ip'], node['port'],
|
||||
node['device'], job['partition'], 'REPLICATE',
|
||||
'/' + '-'.join(suffixes),
|
||||
headers={'Content-Length': '0'}).getresponse().read()
|
||||
@ -366,7 +336,7 @@ class ObjectReplicator(Daemon):
|
||||
self.replication_count += 1
|
||||
begin = time.time()
|
||||
try:
|
||||
hashed, local_hash = get_hashes(job['path'],
|
||||
hashed, local_hash = tpool.execute(get_hashes, job['path'],
|
||||
do_listdir=(self.replication_count % 10) == 0,
|
||||
reclaim_age=self.reclaim_age)
|
||||
self.suffix_hash += hashed
|
||||
@ -394,14 +364,15 @@ class ObjectReplicator(Daemon):
|
||||
continue
|
||||
remote_hash = pickle.loads(resp.read())
|
||||
del resp
|
||||
suffixes = [suffix for suffix in local_hash
|
||||
if local_hash[suffix] !=
|
||||
remote_hash.get(suffix, -1)]
|
||||
suffixes = [suffix for suffix in local_hash if
|
||||
local_hash[suffix] != remote_hash.get(suffix, -1)]
|
||||
if not suffixes:
|
||||
continue
|
||||
hashed, local_hash = tpool.execute(get_hashes, job['path'],
|
||||
recalculate=suffixes, reclaim_age=self.reclaim_age)
|
||||
suffixes = [suffix for suffix in local_hash if
|
||||
local_hash[suffix] != remote_hash.get(suffix, -1)]
|
||||
self.rsync(node, job, suffixes)
|
||||
recalculate_hashes(job['path'], suffixes,
|
||||
reclaim_age=self.reclaim_age)
|
||||
with Timeout(self.http_timeout):
|
||||
conn = http_connect(node['ip'], node['port'],
|
||||
node['device'], job['partition'], 'REPLICATE',
|
||||
@ -556,7 +527,7 @@ class ObjectReplicator(Daemon):
|
||||
_("Object replication complete. (%.02f minutes)"), total)
|
||||
|
||||
def run_forever(self, *args, **kwargs):
|
||||
self.logger.info("Starting object replicator in daemon mode.")
|
||||
self.logger.info(_("Starting object replicator in daemon mode."))
|
||||
# Run the replicator continually
|
||||
while True:
|
||||
start = time.time()
|
||||
|
@ -44,8 +44,7 @@ from swift.common.constraints import check_object_creation, check_mount, \
|
||||
check_float, check_utf8
|
||||
from swift.common.exceptions import ConnectionTimeout, DiskFileError, \
|
||||
DiskFileNotExist
|
||||
from swift.obj.replicator import get_hashes, invalidate_hash, \
|
||||
recalculate_hashes
|
||||
from swift.obj.replicator import get_hashes, invalidate_hash
|
||||
|
||||
|
||||
DATADIR = 'objects'
|
||||
@ -54,6 +53,8 @@ PICKLE_PROTOCOL = 2
|
||||
METADATA_KEY = 'user.swift.metadata'
|
||||
MAX_OBJECT_NAME_LENGTH = 1024
|
||||
KEEP_CACHE_SIZE = (5 * 1024 * 1024)
|
||||
# keep these lower-case
|
||||
DISALLOWED_HEADERS = set('content-length content-type deleted etag'.split())
|
||||
|
||||
|
||||
def read_metadata(fd):
|
||||
@ -171,8 +172,7 @@ class DiskFile(object):
|
||||
if self.meta_file:
|
||||
with open(self.meta_file) as mfp:
|
||||
for key in self.metadata.keys():
|
||||
if key.lower() not in ('content-type', 'content-encoding',
|
||||
'deleted', 'content-length', 'etag'):
|
||||
if key.lower() not in DISALLOWED_HEADERS:
|
||||
del self.metadata[key]
|
||||
self.metadata.update(read_metadata(mfp))
|
||||
|
||||
@ -388,6 +388,12 @@ class ObjectController(object):
|
||||
self.max_upload_time = int(conf.get('max_upload_time', 86400))
|
||||
self.slow = int(conf.get('slow', 0))
|
||||
self.bytes_per_sync = int(conf.get('mb_per_sync', 512)) * 1024 * 1024
|
||||
default_allowed_headers = 'content-encoding, x-object-manifest, ' \
|
||||
'content-disposition'
|
||||
self.allowed_headers = set(i.strip().lower() for i in \
|
||||
conf.get('allowed_headers', \
|
||||
default_allowed_headers).split(',') if i.strip() and \
|
||||
i.strip().lower() not in DISALLOWED_HEADERS)
|
||||
|
||||
def container_update(self, op, account, container, obj, headers_in,
|
||||
headers_out, objdevice):
|
||||
@ -467,6 +473,10 @@ class ObjectController(object):
|
||||
metadata = {'X-Timestamp': request.headers['x-timestamp']}
|
||||
metadata.update(val for val in request.headers.iteritems()
|
||||
if val[0].lower().startswith('x-object-meta-'))
|
||||
for header_key in self.allowed_headers:
|
||||
if header_key in request.headers:
|
||||
header_caps = header_key.title()
|
||||
metadata[header_caps] = request.headers[header_key]
|
||||
with file.mkstemp() as (fd, tmppath):
|
||||
file.put(fd, tmppath, metadata, extension='.meta')
|
||||
return response_class(request=request)
|
||||
@ -525,15 +535,13 @@ class ObjectController(object):
|
||||
'ETag': etag,
|
||||
'Content-Length': str(os.fstat(fd).st_size),
|
||||
}
|
||||
if 'x-object-manifest' in request.headers:
|
||||
metadata['X-Object-Manifest'] = \
|
||||
request.headers['x-object-manifest']
|
||||
metadata.update(val for val in request.headers.iteritems()
|
||||
if val[0].lower().startswith('x-object-meta-') and
|
||||
len(val[0]) > 14)
|
||||
if 'content-encoding' in request.headers:
|
||||
metadata['Content-Encoding'] = \
|
||||
request.headers['Content-Encoding']
|
||||
for header_key in self.allowed_headers:
|
||||
if header_key in request.headers:
|
||||
header_caps = header_key.title()
|
||||
metadata[header_caps] = request.headers[header_key]
|
||||
file.put(fd, tmppath, metadata)
|
||||
file.unlinkold(metadata['X-Timestamp'])
|
||||
self.container_update('PUT', account, container, obj, request.headers,
|
||||
@ -603,8 +611,8 @@ class ObjectController(object):
|
||||
'application/octet-stream'), app_iter=file,
|
||||
request=request, conditional_response=True)
|
||||
for key, value in file.metadata.iteritems():
|
||||
if key == 'X-Object-Manifest' or \
|
||||
key.lower().startswith('x-object-meta-'):
|
||||
if key.lower().startswith('x-object-meta-') or \
|
||||
key.lower() in self.allowed_headers:
|
||||
response.headers[key] = value
|
||||
response.etag = file.metadata['ETag']
|
||||
response.last_modified = float(file.metadata['X-Timestamp'])
|
||||
@ -699,10 +707,8 @@ class ObjectController(object):
|
||||
path = os.path.join(self.devices, device, DATADIR, partition)
|
||||
if not os.path.exists(path):
|
||||
mkdirs(path)
|
||||
if suffix:
|
||||
recalculate_hashes(path, suffix.split('-'))
|
||||
return Response()
|
||||
_junk, hashes = get_hashes(path, do_listdir=False)
|
||||
suffixes = suffix.split('-') if suffix else []
|
||||
_junk, hashes = tpool.execute(get_hashes, path, recalculate=suffixes)
|
||||
return Response(body=pickle.dumps(hashes))
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
|
@ -560,7 +560,7 @@ class Controller(object):
|
||||
status_index = statuses.index(status)
|
||||
resp.status = '%s %s' % (status, reasons[status_index])
|
||||
resp.body = bodies[status_index]
|
||||
resp.content_type = 'text/plain'
|
||||
resp.content_type = 'text/html'
|
||||
if etag:
|
||||
resp.headers['etag'] = etag.strip('"')
|
||||
return resp
|
||||
|
@ -338,7 +338,7 @@ class TestContainer(unittest.TestCase):
|
||||
def post(url, token, parsed, conn):
|
||||
conn.request('POST', parsed.path + '/' + self.name, '',
|
||||
{'X-Auth-Token': token,
|
||||
'X-Container-Read': '.r:*'})
|
||||
'X-Container-Read': '.r:*,.rlistings'})
|
||||
return check_response(conn)
|
||||
resp = retry(post)
|
||||
resp.read()
|
||||
@ -430,7 +430,7 @@ class TestContainer(unittest.TestCase):
|
||||
def post(url, token, parsed, conn):
|
||||
conn.request('POST', parsed.path + '/' + self.name, '',
|
||||
{'X-Auth-Token': token,
|
||||
'X-Container-Read': '.r:*'})
|
||||
'X-Container-Read': '.r:*,.rlistings'})
|
||||
return check_response(conn)
|
||||
resp = retry(post)
|
||||
resp.read()
|
||||
|
510
test/unit/common/middleware/test_staticweb.py
Normal file
510
test/unit/common/middleware/test_staticweb.py
Normal file
@ -0,0 +1,510 @@
|
||||
# Copyright (c) 2010 OpenStack, LLC.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
import json
|
||||
import unittest
|
||||
from contextlib import contextmanager
|
||||
|
||||
from webob import Request, Response
|
||||
|
||||
from swift.common.middleware import staticweb
|
||||
|
||||
|
||||
class FakeMemcache(object):
|
||||
|
||||
def __init__(self):
|
||||
self.store = {}
|
||||
|
||||
def get(self, key):
|
||||
return self.store.get(key)
|
||||
|
||||
def set(self, key, value, timeout=0):
|
||||
self.store[key] = value
|
||||
return True
|
||||
|
||||
def incr(self, key, timeout=0):
|
||||
self.store[key] = self.store.setdefault(key, 0) + 1
|
||||
return self.store[key]
|
||||
|
||||
@contextmanager
|
||||
def soft_lock(self, key, timeout=0, retries=5):
|
||||
yield True
|
||||
|
||||
def delete(self, key):
|
||||
try:
|
||||
del self.store[key]
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
|
||||
|
||||
class FakeApp(object):
|
||||
|
||||
def __init__(self, status_headers_body_iter=None):
|
||||
self.get_c4_called = False
|
||||
|
||||
def __call__(self, env, start_response):
|
||||
if env['PATH_INFO'] == '/':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1':
|
||||
return Response(
|
||||
status='412 Precondition Failed')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a':
|
||||
return Response(status='401 Unauthorized')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c1':
|
||||
return Response(status='401 Unauthorized')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c2':
|
||||
return self.listing(env, start_response,
|
||||
{'x-container-read': '.r:*'})
|
||||
elif env['PATH_INFO'] == '/v1/a/c2/one.txt':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3':
|
||||
return self.listing(env, start_response,
|
||||
{'x-container-read': '.r:*',
|
||||
'x-container-meta-web-index': 'index.html',
|
||||
'x-container-meta-web-listings': 't'})
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/index.html':
|
||||
return Response(status='200 Ok', body='''
|
||||
<html>
|
||||
<body>
|
||||
<h1>Test main index.html file.</h1>
|
||||
<p>Visit <a href="subdir">subdir</a>.</p>
|
||||
<p>Don't visit <a href="subdir2/">subdir2</a> because it doesn't really
|
||||
exist.</p>
|
||||
<p>Visit <a href="subdir3">subdir3</a>.</p>
|
||||
<p>Visit <a href="subdir3/subsubdir">subdir3/subsubdir</a>.</p>
|
||||
</body>
|
||||
</html>
|
||||
''')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdir':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdir/':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdir/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdir3/subsubdir/index.html':
|
||||
return Response(status='200 Ok', body='index file')(env,
|
||||
start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdirx/':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdirx/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdiry/':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdiry/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdirz':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/subdirz/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/unknown':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c3/unknown/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4':
|
||||
self.get_c4_called = True
|
||||
return self.listing(env, start_response,
|
||||
{'x-container-read': '.r:*',
|
||||
'x-container-meta-web-index': 'index.html',
|
||||
'x-container-meta-web-error': 'error.html',
|
||||
'x-container-meta-web-listings': 't',
|
||||
'x-container-meta-web-listings-css': 'listing.css'})
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/one.txt':
|
||||
return Response(status='200 Ok', body='1')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/two.txt':
|
||||
return Response(status='503 Service Unavailable')(env,
|
||||
start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/subdir/':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/subdir/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/unknown':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/unknown/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c4/404error.html':
|
||||
return Response(status='200 Ok', body='''
|
||||
<html>
|
||||
<body style="background: #000000; color: #ffaaaa">
|
||||
<p>Chrome's 404 fancy-page sucks.</p>
|
||||
<body>
|
||||
<html>
|
||||
'''.strip())(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c5':
|
||||
return self.listing(env, start_response,
|
||||
{'x-container-read': '.r:*',
|
||||
'x-container-meta-web-index': 'index.html',
|
||||
'x-container-meta-listings': 't',
|
||||
'x-container-meta-web-error': 'error.html'})
|
||||
elif env['PATH_INFO'] == '/v1/a/c5/index.html':
|
||||
return Response(status='503 Service Unavailable')(env,
|
||||
start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c5/503error.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c5/unknown':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c5/unknown/index.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c5/404error.html':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] == '/v1/a/c6':
|
||||
return self.listing(env, start_response,
|
||||
{'x-container-read': '.r:*',
|
||||
'x-container-meta-web-listings': 't'})
|
||||
elif env['PATH_INFO'] == '/v1/a/c6/subdir':
|
||||
return Response(status='404 Not Found')(env, start_response)
|
||||
elif env['PATH_INFO'] in ('/v1/a/c7', '/v1/a/c7/'):
|
||||
return self.listing(env, start_response,
|
||||
{'x-container-read': '.r:*',
|
||||
'x-container-meta-web-listings': 'f'})
|
||||
else:
|
||||
raise Exception('Unknown path %r' % env['PATH_INFO'])
|
||||
|
||||
def listing(self, env, start_response, headers):
|
||||
if env['PATH_INFO'] in ('/v1/a/c3', '/v1/a/c4') and \
|
||||
env['QUERY_STRING'] == 'delimiter=/&format=json&prefix=subdir/':
|
||||
headers.update({'X-Container-Object-Count': '11',
|
||||
'X-Container-Bytes-Used': '73741',
|
||||
'X-Container-Read': '.r:*',
|
||||
'Content-Type': 'application/json; charset=utf8'})
|
||||
body = '''
|
||||
[{"name":"subdir/1.txt",
|
||||
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.709100"},
|
||||
{"name":"subdir/2.txt",
|
||||
"hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.734140"},
|
||||
{"subdir":"subdir3/subsubdir/"}]
|
||||
'''.strip()
|
||||
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
|
||||
'delimiter=/&format=json&prefix=subdiry/':
|
||||
headers.update({'X-Container-Object-Count': '11',
|
||||
'X-Container-Bytes-Used': '73741',
|
||||
'X-Container-Read': '.r:*',
|
||||
'Content-Type': 'application/json; charset=utf8'})
|
||||
body = '[]'
|
||||
elif env['PATH_INFO'] == '/v1/a/c3' and env['QUERY_STRING'] == \
|
||||
'limit=1&format=json&delimiter=/&limit=1&prefix=subdirz/':
|
||||
headers.update({'X-Container-Object-Count': '11',
|
||||
'X-Container-Bytes-Used': '73741',
|
||||
'X-Container-Read': '.r:*',
|
||||
'Content-Type': 'application/json; charset=utf8'})
|
||||
body = '''
|
||||
[{"name":"subdirz/1.txt",
|
||||
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.709100"}]
|
||||
'''.strip()
|
||||
elif env['PATH_INFO'] == '/v1/a/c6' and env['QUERY_STRING'] == \
|
||||
'limit=1&format=json&delimiter=/&limit=1&prefix=subdir/':
|
||||
headers.update({'X-Container-Object-Count': '11',
|
||||
'X-Container-Bytes-Used': '73741',
|
||||
'X-Container-Read': '.r:*',
|
||||
'X-Container-Web-Listings': 't',
|
||||
'Content-Type': 'application/json; charset=utf8'})
|
||||
body = '''
|
||||
[{"name":"subdir/1.txt",
|
||||
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.709100"}]
|
||||
'''.strip()
|
||||
elif 'prefix=' in env['QUERY_STRING']:
|
||||
return Response(status='204 No Content')
|
||||
elif 'format=json' in env['QUERY_STRING']:
|
||||
headers.update({'X-Container-Object-Count': '11',
|
||||
'X-Container-Bytes-Used': '73741',
|
||||
'Content-Type': 'application/json; charset=utf8'})
|
||||
body = '''
|
||||
[{"name":"401error.html",
|
||||
"hash":"893f8d80692a4d3875b45be8f152ad18", "bytes":110,
|
||||
"content_type":"text/html",
|
||||
"last_modified":"2011-03-24T04:27:52.713710"},
|
||||
{"name":"404error.html",
|
||||
"hash":"62dcec9c34ed2b347d94e6ca707aff8c", "bytes":130,
|
||||
"content_type":"text/html",
|
||||
"last_modified":"2011-03-24T04:27:52.720850"},
|
||||
{"name":"index.html",
|
||||
"hash":"8b469f2ca117668a5131fe9ee0815421", "bytes":347,
|
||||
"content_type":"text/html",
|
||||
"last_modified":"2011-03-24T04:27:52.683590"},
|
||||
{"name":"listing.css",
|
||||
"hash":"7eab5d169f3fcd06a08c130fa10c5236", "bytes":17,
|
||||
"content_type":"text/css",
|
||||
"last_modified":"2011-03-24T04:27:52.721610"},
|
||||
{"name":"one.txt", "hash":"73f1dd69bacbf0847cc9cffa3c6b23a1",
|
||||
"bytes":22, "content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.722270"},
|
||||
{"name":"subdir/1.txt",
|
||||
"hash":"5f595114a4b3077edfac792c61ca4fe4", "bytes":20,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.709100"},
|
||||
{"name":"subdir/2.txt",
|
||||
"hash":"c85c1dcd19cf5cbac84e6043c31bb63e", "bytes":20,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.734140"},
|
||||
{"name":"subdir/omgomg.txt",
|
||||
"hash":"7337d028c093130898d937c319cc9865", "bytes":72981,
|
||||
"content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.735460"},
|
||||
{"name":"subdir2", "hash":"d41d8cd98f00b204e9800998ecf8427e",
|
||||
"bytes":0, "content_type":"text/directory",
|
||||
"last_modified":"2011-03-24T04:27:52.676690"},
|
||||
{"name":"subdir3/subsubdir/index.html",
|
||||
"hash":"04eea67110f883b1a5c97eb44ccad08c", "bytes":72,
|
||||
"content_type":"text/html",
|
||||
"last_modified":"2011-03-24T04:27:52.751260"},
|
||||
{"name":"two.txt", "hash":"10abb84c63a5cff379fdfd6385918833",
|
||||
"bytes":22, "content_type":"text/plain",
|
||||
"last_modified":"2011-03-24T04:27:52.825110"}]
|
||||
'''.strip()
|
||||
else:
|
||||
headers.update({'X-Container-Object-Count': '11',
|
||||
'X-Container-Bytes-Used': '73741',
|
||||
'Content-Type': 'text/plain; charset=utf8'})
|
||||
body = '\n'.join(['401error.html', '404error.html', 'index.html',
|
||||
'listing.css', 'one.txt', 'subdir/1.txt',
|
||||
'subdir/2.txt', 'subdir/omgomg.txt', 'subdir2',
|
||||
'subdir3/subsubdir/index.html', 'two.txt'])
|
||||
return Response(status='200 Ok', headers=headers,
|
||||
body=body)(env, start_response)
|
||||
|
||||
|
||||
class TestStaticWeb(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.test_staticweb = staticweb.filter_factory({})(FakeApp())
|
||||
|
||||
def test_app_set(self):
|
||||
app = FakeApp()
|
||||
sw = staticweb.filter_factory({})(app)
|
||||
self.assertEquals(sw.app, app)
|
||||
|
||||
def test_conf_set(self):
|
||||
conf = {'blah': 1}
|
||||
sw = staticweb.filter_factory(conf)(FakeApp())
|
||||
self.assertEquals(sw.conf, conf)
|
||||
|
||||
def test_cache_timeout_unset(self):
|
||||
sw = staticweb.filter_factory({})(FakeApp())
|
||||
self.assertEquals(sw.cache_timeout, 300)
|
||||
|
||||
def test_cache_timeout_set(self):
|
||||
sw = staticweb.filter_factory({'cache_timeout': '1'})(FakeApp())
|
||||
self.assertEquals(sw.cache_timeout, 1)
|
||||
|
||||
def test_root(self):
|
||||
resp = Request.blank('/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
|
||||
def test_version(self):
|
||||
resp = Request.blank('/v1').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 412)
|
||||
|
||||
def test_account(self):
|
||||
resp = Request.blank('/v1/a').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 401)
|
||||
|
||||
def test_container1(self):
|
||||
resp = Request.blank('/v1/a/c1').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 401)
|
||||
|
||||
def test_container2(self):
|
||||
resp = Request.blank('/v1/a/c2').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assertEquals(resp.content_type, 'text/plain')
|
||||
self.assertEquals(len(resp.body.split('\n')),
|
||||
int(resp.headers['x-container-object-count']))
|
||||
|
||||
def test_container2onetxt(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c2/one.txt').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
|
||||
def test_container2json(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c2?format=json').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assertEquals(resp.content_type, 'application/json')
|
||||
self.assertEquals(len(json.loads(resp.body)),
|
||||
int(resp.headers['x-container-object-count']))
|
||||
|
||||
def test_container3(self):
|
||||
resp = Request.blank('/v1/a/c3').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
self.assertEquals(resp.headers['location'],
|
||||
'http://localhost/v1/a/c3/')
|
||||
|
||||
def test_container3indexhtml(self):
|
||||
resp = Request.blank('/v1/a/c3/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assert_('Test main index.html file.' in resp.body)
|
||||
|
||||
def test_container3subdir(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdir').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
|
||||
def test_container3subsubdir(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdir3/subsubdir').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
|
||||
def test_container3subsubdircontents(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdir3/subsubdir/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assertEquals(resp.body, 'index file')
|
||||
|
||||
def test_container3subdir(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdir/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assert_('Listing of /v1/a/c3/subdir/' in resp.body)
|
||||
self.assert_('</style>' in resp.body)
|
||||
self.assert_('<link' not in resp.body)
|
||||
self.assert_('listing.css' not in resp.body)
|
||||
|
||||
def test_container3subdirx(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdirx/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
|
||||
def test_container3subdiry(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdiry/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
|
||||
def test_container3subdirz(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/subdirz').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
|
||||
def test_container3unknown(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c3/unknown').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
self.assert_("Chrome's 404 fancy-page sucks." not in resp.body)
|
||||
|
||||
def test_container4indexhtml(self):
|
||||
resp = Request.blank('/v1/a/c4/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assert_('Listing of /v1/a/c4/' in resp.body)
|
||||
|
||||
def test_container4indexhtmlauthed(self):
|
||||
resp = Request.blank('/v1/a/c4').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
resp = Request.blank('/v1/a/c4',
|
||||
environ={'REMOTE_USER': 'authed'}).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
resp = Request.blank('/v1/a/c4', headers={'x-web-mode': 't'},
|
||||
environ={'REMOTE_USER': 'authed'}).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
|
||||
def test_container4unknown(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c4/unknown').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
self.assert_("Chrome's 404 fancy-page sucks." in resp.body)
|
||||
|
||||
def test_container4unknown_memcache(self):
|
||||
fake_memcache = FakeMemcache()
|
||||
self.assertEquals(fake_memcache.store, {})
|
||||
resp = Request.blank('/v1/a/c4',
|
||||
environ={'swift.cache': fake_memcache}
|
||||
).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
self.assertEquals(fake_memcache.store,
|
||||
{'/staticweb/v1/a/c4':
|
||||
('index.html', 'error.html', 't', 'listing.css')})
|
||||
self.assert_(self.test_staticweb.app.get_c4_called)
|
||||
self.test_staticweb.app.get_c4_called = False
|
||||
resp = Request.blank('/v1/a/c4',
|
||||
environ={'swift.cache': fake_memcache}
|
||||
).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
self.assert_(not self.test_staticweb.app.get_c4_called)
|
||||
self.assertEquals(fake_memcache.store,
|
||||
{'/staticweb/v1/a/c4':
|
||||
('index.html', 'error.html', 't', 'listing.css')})
|
||||
resp = Request.blank('/v1/a/c4',
|
||||
environ={'swift.cache': fake_memcache, 'REQUEST_METHOD': 'PUT'}
|
||||
).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assertEquals(fake_memcache.store, {})
|
||||
resp = Request.blank('/v1/a/c4',
|
||||
environ={'swift.cache': fake_memcache}
|
||||
).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
self.assertEquals(fake_memcache.store,
|
||||
{'/staticweb/v1/a/c4':
|
||||
('index.html', 'error.html', 't', 'listing.css')})
|
||||
resp = Request.blank('/v1/a/c4',
|
||||
environ={'swift.cache': fake_memcache, 'REQUEST_METHOD': 'POST'}
|
||||
).get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assertEquals(fake_memcache.store, {})
|
||||
|
||||
def test_container4subdir(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c4/subdir/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
self.assert_('Listing of /v1/a/c4/subdir/' in resp.body)
|
||||
self.assert_('</style>' not in resp.body)
|
||||
self.assert_('<link' in resp.body)
|
||||
self.assert_('listing.css' in resp.body)
|
||||
|
||||
def test_container4onetxt(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c4/one.txt').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 200)
|
||||
|
||||
def test_container4twotxt(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c4/two.txt').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 503)
|
||||
|
||||
def test_container5indexhtml(self):
|
||||
resp = Request.blank('/v1/a/c5/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 503)
|
||||
|
||||
def test_container5unknown(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c5/unknown').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
self.assert_("Chrome's 404 fancy-page sucks." not in resp.body)
|
||||
|
||||
def test_container6subdir(self):
|
||||
resp = Request.blank(
|
||||
'/v1/a/c6/subdir').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 301)
|
||||
|
||||
def test_container7listing(self):
|
||||
resp = Request.blank('/v1/a/c7/').get_response(self.test_staticweb)
|
||||
self.assertEquals(resp.status_int, 404)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
@ -400,37 +400,46 @@ class TestAuth(unittest.TestCase):
|
||||
self.assertEquals(resp.status_int, 403)
|
||||
|
||||
def test_authorize_acl_referrer_access(self):
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.remote_user = 'act:usr,act'
|
||||
resp = self.test_auth.authorize(req)
|
||||
self.assertEquals(resp.status_int, 403)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.remote_user = 'act:usr,act'
|
||||
req.acl = '.r:*'
|
||||
req.acl = '.r:*,.rlistings'
|
||||
self.assertEquals(self.test_auth.authorize(req), None)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.remote_user = 'act:usr,act'
|
||||
req.acl = '.r:.example.com'
|
||||
req.acl = '.r:*' # No listings allowed
|
||||
resp = self.test_auth.authorize(req)
|
||||
self.assertEquals(resp.status_int, 403)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.remote_user = 'act:usr,act'
|
||||
req.acl = '.r:.example.com,.rlistings'
|
||||
resp = self.test_auth.authorize(req)
|
||||
self.assertEquals(resp.status_int, 403)
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.remote_user = 'act:usr,act'
|
||||
req.referer = 'http://www.example.com/index.html'
|
||||
req.acl = '.r:.example.com'
|
||||
req.acl = '.r:.example.com,.rlistings'
|
||||
self.assertEquals(self.test_auth.authorize(req), None)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
resp = self.test_auth.authorize(req)
|
||||
self.assertEquals(resp.status_int, 401)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req.acl = '.r:*'
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.acl = '.r:*,.rlistings'
|
||||
self.assertEquals(self.test_auth.authorize(req), None)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req.acl = '.r:.example.com'
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.acl = '.r:*' # No listings allowed
|
||||
resp = self.test_auth.authorize(req)
|
||||
self.assertEquals(resp.status_int, 401)
|
||||
req = Request.blank('/v1/AUTH_cfa')
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.acl = '.r:.example.com,.rlistings'
|
||||
resp = self.test_auth.authorize(req)
|
||||
self.assertEquals(resp.status_int, 401)
|
||||
req = Request.blank('/v1/AUTH_cfa/c')
|
||||
req.referer = 'http://www.example.com/index.html'
|
||||
req.acl = '.r:.example.com'
|
||||
req.acl = '.r:.example.com,.rlistings'
|
||||
self.assertEquals(self.test_auth.authorize(req), None)
|
||||
|
||||
def test_account_put_permissions(self):
|
||||
|
@ -737,6 +737,29 @@ log_name = yarr'''
|
||||
self.assertEquals(utils.remove_file(file_name), None)
|
||||
self.assertFalse(os.path.exists(file_name))
|
||||
|
||||
def test_human_readable(self):
|
||||
self.assertEquals(utils.human_readable(0), '0')
|
||||
self.assertEquals(utils.human_readable(1), '1')
|
||||
self.assertEquals(utils.human_readable(10), '10')
|
||||
self.assertEquals(utils.human_readable(100), '100')
|
||||
self.assertEquals(utils.human_readable(999), '999')
|
||||
self.assertEquals(utils.human_readable(1024), '1Ki')
|
||||
self.assertEquals(utils.human_readable(1535), '1Ki')
|
||||
self.assertEquals(utils.human_readable(1536), '2Ki')
|
||||
self.assertEquals(utils.human_readable(1047552), '1023Ki')
|
||||
self.assertEquals(utils.human_readable(1048063), '1023Ki')
|
||||
self.assertEquals(utils.human_readable(1048064), '1Mi')
|
||||
self.assertEquals(utils.human_readable(1048576), '1Mi')
|
||||
self.assertEquals(utils.human_readable(1073741824), '1Gi')
|
||||
self.assertEquals(utils.human_readable(1099511627776), '1Ti')
|
||||
self.assertEquals(utils.human_readable(1125899906842624), '1Pi')
|
||||
self.assertEquals(utils.human_readable(1152921504606846976), '1Ei')
|
||||
self.assertEquals(utils.human_readable(1180591620717411303424), '1Zi')
|
||||
self.assertEquals(utils.human_readable(1208925819614629174706176),
|
||||
'1Yi')
|
||||
self.assertEquals(utils.human_readable(1237940039285380274899124224),
|
||||
'1024Yi')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
@ -188,6 +188,23 @@ class TestObjectReplicator(unittest.TestCase):
|
||||
|
||||
object_replicator.http_connect = was_connector
|
||||
|
||||
def test_get_hashes(self):
|
||||
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
|
||||
mkdirs(df.datadir)
|
||||
with open(os.path.join(df.datadir, normalize_timestamp(
|
||||
time.time()) + '.ts'), 'wb') as f:
|
||||
f.write('1234567890')
|
||||
part = os.path.join(self.objects, '0')
|
||||
hashed, hashes = object_replicator.get_hashes(part)
|
||||
self.assertEquals(hashed, 1)
|
||||
self.assert_('a83' in hashes)
|
||||
hashed, hashes = object_replicator.get_hashes(part, do_listdir=True)
|
||||
self.assertEquals(hashed, 0)
|
||||
self.assert_('a83' in hashes)
|
||||
hashed, hashes = object_replicator.get_hashes(part, recalculate=['a83'])
|
||||
self.assertEquals(hashed, 1)
|
||||
self.assert_('a83' in hashes)
|
||||
|
||||
def test_hash_suffix_one_file(self):
|
||||
df = DiskFile(self.devices, 'sda', '0', 'a', 'c', 'o', FakeLogger())
|
||||
mkdirs(df.datadir)
|
||||
|
@ -303,10 +303,15 @@ class TestObjectController(unittest.TestCase):
|
||||
|
||||
def test_POST_update_meta(self):
|
||||
""" Test swift.object_server.ObjectController.POST """
|
||||
original_headers = self.object_controller.allowed_headers
|
||||
test_headers = 'content-encoding foo bar'.split()
|
||||
self.object_controller.allowed_headers = set(test_headers)
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test',
|
||||
'Foo': 'fooheader',
|
||||
'Baz': 'bazheader',
|
||||
'X-Object-Meta-1': 'One',
|
||||
'X-Object-Meta-Two': 'Two'})
|
||||
req.body = 'VERIFY'
|
||||
@ -319,13 +324,81 @@ class TestObjectController(unittest.TestCase):
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'X-Object-Meta-3': 'Three',
|
||||
'X-Object-Meta-4': 'Four',
|
||||
'Content-Encoding': 'gzip',
|
||||
'Foo': 'fooheader',
|
||||
'Bar': 'barheader',
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = self.object_controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
|
||||
req = Request.blank('/sda1/p/a/c/o')
|
||||
resp = self.object_controller.GET(req)
|
||||
self.assert_("X-Object-Meta-1" not in resp.headers and \
|
||||
self.assert_("X-Object-Meta-1" not in resp.headers and
|
||||
"X-Object-Meta-Two" not in resp.headers and
|
||||
"X-Object-Meta-3" in resp.headers and
|
||||
"X-Object-Meta-4" in resp.headers and
|
||||
"Foo" in resp.headers and
|
||||
"Bar" in resp.headers and
|
||||
"Baz" not in resp.headers and
|
||||
"Content-Encoding" in resp.headers)
|
||||
self.assertEquals(resp.headers['Content-Type'], 'application/x-test')
|
||||
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank('/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = self.object_controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a/c/o')
|
||||
resp = self.object_controller.GET(req)
|
||||
self.assert_("X-Object-Meta-3" not in resp.headers and
|
||||
"X-Object-Meta-4" not in resp.headers and
|
||||
"Foo" not in resp.headers and
|
||||
"Bar" not in resp.headers and
|
||||
"Content-Encoding" not in resp.headers)
|
||||
self.assertEquals(resp.headers['Content-Type'], 'application/x-test')
|
||||
|
||||
# test defaults
|
||||
self.object_controller.allowed_headers = original_headers
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'Content-Type': 'application/x-test',
|
||||
'Foo': 'fooheader',
|
||||
'X-Object-Meta-1': 'One',
|
||||
'X-Object-Manifest': 'c/bar',
|
||||
'Content-Encoding': 'gzip',
|
||||
'Content-Disposition': 'bar',
|
||||
})
|
||||
req.body = 'VERIFY'
|
||||
resp = self.object_controller.PUT(req)
|
||||
self.assertEquals(resp.status_int, 201)
|
||||
req = Request.blank('/sda1/p/a/c/o')
|
||||
resp = self.object_controller.GET(req)
|
||||
self.assert_("X-Object-Meta-1" in resp.headers and
|
||||
"Foo" not in resp.headers and
|
||||
"Content-Encoding" in resp.headers and
|
||||
"X-Object-Manifest" in resp.headers and
|
||||
"Content-Disposition" in resp.headers)
|
||||
self.assertEquals(resp.headers['Content-Type'], 'application/x-test')
|
||||
|
||||
timestamp = normalize_timestamp(time())
|
||||
req = Request.blank('/sda1/p/a/c/o',
|
||||
environ={'REQUEST_METHOD': 'POST'},
|
||||
headers={'X-Timestamp': timestamp,
|
||||
'X-Object-Meta-3': 'Three',
|
||||
'Foo': 'fooheader',
|
||||
'Content-Type': 'application/x-test'})
|
||||
resp = self.object_controller.POST(req)
|
||||
self.assertEquals(resp.status_int, 202)
|
||||
req = Request.blank('/sda1/p/a/c/o')
|
||||
resp = self.object_controller.GET(req)
|
||||
self.assert_("X-Object-Meta-1" not in resp.headers and
|
||||
"Foo" not in resp.headers and
|
||||
"Content-Encoding" not in resp.headers and
|
||||
"X-Object-Manifest" not in resp.headers and
|
||||
"Content-Disposition" not in resp.headers and
|
||||
"X-Object-Meta-3" in resp.headers)
|
||||
self.assertEquals(resp.headers['Content-Type'], 'application/x-test')
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user