Merge branch 'master' into feature/losf

Change-Id: I4150fd0152a6a3fd3f31b86315f212316f27ec39
This commit is contained in:
Tim Burke 2019-08-21 15:13:12 -07:00
commit 6204a2bc57
88 changed files with 3225 additions and 880 deletions

View File

@ -127,3 +127,6 @@ zhangdebo1987 <zhangdebo@inspur.com> zhangdebo
Thomas Goirand <thomas@goirand.fr> <zigo@debian.org> Thomas Goirand <thomas@goirand.fr> <zigo@debian.org>
Thiago da Silva <thiagodasilva@gmail.com> <thiago@redhat.com> Thiago da Silva <thiagodasilva@gmail.com> <thiago@redhat.com>
Kota Tsuyuzaki <kota.tsuyuzaki.pc@hco.ntt.co.jp> <tsuyuzaki.kota@lab.ntt.co.jp> Kota Tsuyuzaki <kota.tsuyuzaki.pc@hco.ntt.co.jp> <tsuyuzaki.kota@lab.ntt.co.jp>
Ehud Kaldor <ehud@unfairfunction.org> <ehud@UnfairFunction.org>
Takashi Kajinami <tkajinam@redhat.com> <kajinamit@nttdata.co.jp>
Yuxin Wang <wang.yuxin@ostorage.com.cn> Wang Yuxin

View File

@ -96,6 +96,30 @@
bindep_profile: test py37 bindep_profile: test py37
python_version: 3.7 python_version: 3.7
- job:
name: swift-tox-func-encryption-py37
parent: swift-tox-func-py37
description: |
Run functional tests for swift under cPython version 3.7.
Uses tox with the ``func-encryption-py3`` environment.
It sets TMPDIR to an XFS mount point created via
tools/test-setup.sh.
vars:
tox_envlist: func-encryption-py3
- job:
name: swift-tox-func-ec-py37
parent: swift-tox-func-py37
description: |
Run functional tests for swift under cPython version 3.7.
Uses tox with the ``func-ec-py3`` environment.
It sets TMPDIR to an XFS mount point created via
tools/test-setup.sh.
vars:
tox_envlist: func-ec-py3
- job: - job:
name: swift-tox-func-domain-remap-staticweb-py37 name: swift-tox-func-domain-remap-staticweb-py37
parent: swift-tox-func-py37 parent: swift-tox-func-py37
@ -108,6 +132,18 @@
vars: vars:
tox_envlist: func-domain-remap-staticweb-py3 tox_envlist: func-domain-remap-staticweb-py3
- job:
name: swift-tox-func-s3api-py37
parent: swift-tox-func-py37
description: |
Run functional tests for swift under cPython version 3.7.
Uses tox with the ``func-s3api`` environment.
It sets TMPDIR to an XFS mount point created via
tools/test-setup.sh.
vars:
tox_envlist: func-s3api-py3
- job: - job:
name: swift-tox-func-centos-7 name: swift-tox-func-centos-7
parent: swift-tox-func parent: swift-tox-func
@ -244,8 +280,7 @@
vars: vars:
devstack_localrc: devstack_localrc:
SERVICE_IP_VERSION: 6 SERVICE_IP_VERSION: 6
HOST_IPV6: '[::1]' SERVICE_HOST: ""
SERVICE_HOST: '[::1]'
- job: - job:
name: swift-tox-func-s3api-ceph-s3tests-tempauth name: swift-tox-func-s3api-ceph-s3tests-tempauth
@ -408,12 +443,15 @@
- periodic-stable-jobs - periodic-stable-jobs
- check-requirements - check-requirements
- release-notes-jobs-python3 - release-notes-jobs-python3
- integrated-gate-object-storage
check: check:
jobs: jobs:
- swift-build-image: - swift-build-image:
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/(functional|probe)/.*$ - ^test/(functional|probe)/.*$
# Unit tests
- swift-tox-py27: - swift-tox-py27:
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
@ -422,21 +460,17 @@
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/(functional|probe)/.*$ - ^test/(functional|probe)/.*$
voting: false
- swift-tox-py37: - swift-tox-py37:
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/(functional|probe)/.*$ - ^test/(functional|probe)/.*$
# Functional tests
- swift-tox-func: - swift-tox-func:
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$ - ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$ - ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- swift-tox-func-py37:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- swift-tox-func-encryption: - swift-tox-func-encryption:
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
@ -462,11 +496,35 @@
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$ - ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$ - ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
# py3 functional tests
- swift-tox-func-py37:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- swift-tox-func-encryption-py37:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- swift-tox-func-domain-remap-staticweb-py37: - swift-tox-func-domain-remap-staticweb-py37:
irrelevant-files: irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$ - ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$ - ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- swift-tox-func-ec-py37:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- swift-tox-func-s3api-py37:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
# Other tests
- swift-tox-func-s3api-ceph-s3tests-tempauth: - swift-tox-func-s3api-ceph-s3tests-tempauth:
irrelevant-files: irrelevant-files:
- ^(api-ref|releasenotes)/.*$ - ^(api-ref|releasenotes)/.*$
@ -511,7 +569,16 @@
- ^(api-ref|doc|releasenotes)/.*$ - ^(api-ref|doc|releasenotes)/.*$
- ^test/probe/.*$ - ^test/probe/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$ - ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
voting: false - tempest-integrated-object-storage:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- grenade-py3:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
gate: gate:
jobs: jobs:
# For gate jobs, err towards running more jobs (so, generally avoid # For gate jobs, err towards running more jobs (so, generally avoid
@ -520,14 +587,18 @@
# in-tree definitions) dsvm jobs. # in-tree definitions) dsvm jobs.
- swift-upload-image - swift-upload-image
- swift-tox-py27 - swift-tox-py27
- swift-tox-py36
- swift-tox-py37 - swift-tox-py37
- swift-tox-func - swift-tox-func
- swift-tox-func-py37
- swift-tox-func-encryption - swift-tox-func-encryption
- swift-tox-func-domain-remap-staticweb - swift-tox-func-domain-remap-staticweb
- swift-tox-func-ec - swift-tox-func-ec
- swift-tox-func-s3api - swift-tox-func-s3api
- swift-tox-func-py37
- swift-tox-func-encryption
- swift-tox-func-domain-remap-staticweb-py37 - swift-tox-func-domain-remap-staticweb-py37
- swift-tox-func-ec-py37
- swift-tox-func-s3api-py37
- swift-probetests-centos-7: - swift-probetests-centos-7:
irrelevant-files: irrelevant-files:
- ^(api-ref|releasenotes)/.*$ - ^(api-ref|releasenotes)/.*$
@ -559,6 +630,16 @@
- ^(api-ref|etc|examples|releasenotes)/.*$ - ^(api-ref|etc|examples|releasenotes)/.*$
# Keep doc/manpages -- we want to syntax check them # Keep doc/manpages -- we want to syntax check them
- ^doc/(requirements.txt|(saio|s3api|source)/.*)$ - ^doc/(requirements.txt|(saio|s3api|source)/.*)$
- tempest-integrated-object-storage:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
- grenade-py3:
irrelevant-files:
- ^(api-ref|doc|releasenotes)/.*$
- ^test/.*$
- ^(.gitreview|.mailmap|AUTHORS|CHANGELOG)$
experimental: experimental:
jobs: jobs:
- swift-tox-py27-centos-7 - swift-tox-py27-centos-7

13
AUTHORS
View File

@ -143,7 +143,7 @@ Drew Balfour (andrew.balfour@oracle.com)
Eamonn O'Toole (eamonn.otoole@hpe.com) Eamonn O'Toole (eamonn.otoole@hpe.com)
Ed Leafe (ed.leafe@rackspace.com) Ed Leafe (ed.leafe@rackspace.com)
Edward Hope-Morley (opentastic@gmail.com) Edward Hope-Morley (opentastic@gmail.com)
Ehud Kaldor (ehud@UnfairFunction.org) Ehud Kaldor (ehud@unfairfunction.org)
Ellen Leahy (ellen.mar.leahy@hpe.com) Ellen Leahy (ellen.mar.leahy@hpe.com)
Emett Speer (speer.emett@gmail.com) Emett Speer (speer.emett@gmail.com)
Emile Snyder (emile.snyder@gmail.com) Emile Snyder (emile.snyder@gmail.com)
@ -174,6 +174,7 @@ gengchc2 (geng.changcai2@zte.com.cn)
Gerard Gine (ggine@swiftstack.com) Gerard Gine (ggine@swiftstack.com)
Gerry Drudy (gerry.drudy@hpe.com) Gerry Drudy (gerry.drudy@hpe.com)
Gil Vernik (gilv@il.ibm.com) Gil Vernik (gilv@il.ibm.com)
Gilles Biannic (gilles.biannic@corp.ovh.com)
Gleb Samsonov (sams-gleb@yandex.ru) Gleb Samsonov (sams-gleb@yandex.ru)
Gonéri Le Bouder (goneri.lebouder@enovance.com) Gonéri Le Bouder (goneri.lebouder@enovance.com)
Graham Hayes (graham.hayes@hpe.com) Graham Hayes (graham.hayes@hpe.com)
@ -216,6 +217,7 @@ Jing Liuqing (jing.liuqing@99cloud.net)
Joanna H. Huang (joanna.huitzu.huang@gmail.com) Joanna H. Huang (joanna.huitzu.huang@gmail.com)
Joe Arnold (joe@swiftstack.com) Joe Arnold (joe@swiftstack.com)
Joe Gordon (jogo@cloudscaling.com) Joe Gordon (jogo@cloudscaling.com)
Joe Yang (jyang@swiftstack.com)
Joel Wright (joel.wright@sohonet.com) Joel Wright (joel.wright@sohonet.com)
John Leach (john@johnleach.co.uk) John Leach (john@johnleach.co.uk)
Jola Mirecka (jola.mirecka@hp.com) Jola Mirecka (jola.mirecka@hp.com)
@ -238,6 +240,7 @@ Kiyoung Jung (kiyoung.jung@kt.com)
Koert van der Veer (koert@cloudvps.com) Koert van der Veer (koert@cloudvps.com)
Kota Tsuyuzaki (kota.tsuyuzaki.pc@hco.ntt.co.jp) Kota Tsuyuzaki (kota.tsuyuzaki.pc@hco.ntt.co.jp)
Ksenia Demina (kdemina@mirantis.com) Ksenia Demina (kdemina@mirantis.com)
Kuan-Lin Chen (kuanlinchen@synology.com)
Kun Huang (gareth@unitedstack.com) Kun Huang (gareth@unitedstack.com)
Larry Rensing (lr699s@att.com) Larry Rensing (lr699s@att.com)
Leah Klearman (lklrmn@gmail.com) Leah Klearman (lklrmn@gmail.com)
@ -259,6 +262,7 @@ M V P Nitesh (m.nitesh@nectechnologies.in)
Madhuri Kumari (madhuri.rai07@gmail.com) Madhuri Kumari (madhuri.rai07@gmail.com)
Mahati Chamarthy (mahati.chamarthy@gmail.com) Mahati Chamarthy (mahati.chamarthy@gmail.com)
malei (malei@maleideMacBook-Pro.local) malei (malei@maleideMacBook-Pro.local)
Mandell Degerness (mdegerness@swiftstack.com)
maoshuai (fwsakura@163.com) maoshuai (fwsakura@163.com)
Marcelo Martins (btorch@gmail.com) Marcelo Martins (btorch@gmail.com)
Maria Malyarova (savoreux69@gmail.com) Maria Malyarova (savoreux69@gmail.com)
@ -348,6 +352,7 @@ Sarvesh Ranjan (saranjan@cisco.com)
Sascha Peilicke (saschpe@gmx.de) Sascha Peilicke (saschpe@gmx.de)
Saverio Proto (saverio.proto@switch.ch) Saverio Proto (saverio.proto@switch.ch)
Scott Simpson (sasimpson@gmail.com) Scott Simpson (sasimpson@gmail.com)
Sean McGinnis (sean.mcginnis@gmail.com)
Sergey Kraynev (skraynev@mirantis.com) Sergey Kraynev (skraynev@mirantis.com)
Sergey Lukjanov (slukjanov@mirantis.com) Sergey Lukjanov (slukjanov@mirantis.com)
Shane Wang (shane.wang@intel.com) Shane Wang (shane.wang@intel.com)
@ -357,6 +362,7 @@ Shashank Kumar Shankar (shashank.kumar.shankar@intel.com)
Shashirekha Gundur (shashirekha.j.gundur@intel.com) Shashirekha Gundur (shashirekha.j.gundur@intel.com)
Shilla Saebi (shilla.saebi@gmail.com) Shilla Saebi (shilla.saebi@gmail.com)
Shri Javadekar (shrinand@maginatics.com) Shri Javadekar (shrinand@maginatics.com)
Simeon Gourlin (simeon.gourlin@infomaniak.com)
Sivasathurappan Radhakrishnan (siva.radhakrishnan@intel.com) Sivasathurappan Radhakrishnan (siva.radhakrishnan@intel.com)
Soren Hansen (soren@linux2go.dk) Soren Hansen (soren@linux2go.dk)
Stefan Majewsky (stefan.majewsky@sap.com) Stefan Majewsky (stefan.majewsky@sap.com)
@ -365,7 +371,7 @@ Steve Kowalik (steven@wedontsleep.org)
Steve Martinelli (stevemar@ca.ibm.com) Steve Martinelli (stevemar@ca.ibm.com)
Steven Lang (Steven.Lang@hgst.com) Steven Lang (Steven.Lang@hgst.com)
Sushil Kumar (sushil.kumar2@globallogic.com) Sushil Kumar (sushil.kumar2@globallogic.com)
Takashi Kajinami (kajinamit@nttdata.co.jp) Takashi Kajinami (tkajinam@redhat.com)
Takashi Natsume (natsume.takashi@lab.ntt.co.jp) Takashi Natsume (natsume.takashi@lab.ntt.co.jp)
TheSriram (sriram@klusterkloud.com) TheSriram (sriram@klusterkloud.com)
Thiago da Silva (thiagodasilva@gmail.com) Thiago da Silva (thiagodasilva@gmail.com)
@ -420,6 +426,7 @@ Yushiro FURUKAWA (y.furukawa_2@jp.fujitsu.com)
Yuxin Wang (wang.yuxin@ostorage.com.cn) Yuxin Wang (wang.yuxin@ostorage.com.cn)
Zack M. Davis (zdavis@swiftstack.com) Zack M. Davis (zdavis@swiftstack.com)
Zap Chang (zapchang@gmail.com) Zap Chang (zapchang@gmail.com)
zengjia (zengjia@awcloud.com)
Zhang Guoqing (zhang.guoqing@99cloud.net) Zhang Guoqing (zhang.guoqing@99cloud.net)
Zhang Jinnan (ben.os@99cloud.net) Zhang Jinnan (ben.os@99cloud.net)
zhang.lei (zhang.lei@99cloud.net) zhang.lei (zhang.lei@99cloud.net)
@ -432,6 +439,8 @@ Zhenguo Niu (zhenguo@unitedstack.com)
zhengwei6082 (zhengwei6082@fiberhome.com) zhengwei6082 (zhengwei6082@fiberhome.com)
ZhijunWei (wzj334965317@outlook.com) ZhijunWei (wzj334965317@outlook.com)
ZhiQiang Fan (aji.zqfan@gmail.com) ZhiQiang Fan (aji.zqfan@gmail.com)
ZhongShengping (chdzsp@163.com)
Zhongyue Luo (zhongyue.nah@intel.com) Zhongyue Luo (zhongyue.nah@intel.com)
zhufl (zhu.fanglei@zte.com.cn) zhufl (zhu.fanglei@zte.com.cn)
zhulingjie (easyzlj@gmail.com) zhulingjie (easyzlj@gmail.com)
翟小君 (zhaixiaojun@gohighsec.com)

View File

@ -1,3 +1,78 @@
swift (2.22.0)
* Experimental support for Python 3.6 and 3.7 is now available.
Note that this requires eventlet>=0.25.0. All unit tests pass,
and running functional tests under Python 2 will pass against
services running under Python 3. Expect full support in the
next minor release.
* Log formats are now more configurable and include support for
anonymization. See the log_msg_template option in proxy-server.conf
and https://docs.openstack.org/swift/latest/logs.html#proxy-logs
for more information.
* Added an operator tool, swift-container-deleter, to asynchronously
delete some or all objects in a container using the object expirers.
* Swift-all-in-one Docker images are now built and published to
https://hub.docker.com/r/openstackswift/saio. These are intended
for use as development targets, but will hopefully be useful as a
starting point for other work involving containerizing Swift.
* The object-expirer may now be configured in object-server.conf.
This is in anticipation of a future change to allow the
object-expirer to be deployed on all nodes that run object-servers.
* Correctness improvements
* The proxy-server now ignores 404 responses from handoffs without
databases when deciding on the correct response for account and
container requests.
* Object writes to a container whose existence cannot be verified
now 503 instead of 404.
* Sharding improvements
* The container-replicator now only attempts to fetch shard ranges if
the remote indicates that it has shard ranges. Further, it does so
with a timeout to prevent the process from hanging in certain cases.
* The proxy-server now caches 'updating' shards, improving write
performance for sharded containers. A new config option,
`recheck_updating_shard_ranges`, controls the cache time; set it to
0 to disable caching.
* The container-replicator now correctly enqueues container-reconciler
work for sharded containers.
* S3 API improvements
* Unsigned payloads work with v4 signatures once more.
* Multipart upload parts may now be copied from other multipart uploads.
* CompleteMultipartUpload requests with a Content-MD5 now work.
* Content-Type can now be updated when copying an object.
* Fixed v1 listings that end with a non-ASCII object name.
* Background corruption-detection improvements
* Detect and remove invalid entries from hashes.pkl
* When object path is not a directory, just quarantine it,
rather than the whole suffix.
* Dependency updates: we've increased our minimum supported version
of cryptography to 2.0.2 and netifaces to 0.8. This is largely due
to the difficulty of continuing to test with the old versions.
If running Swift under Python 3, eventlet must be at least 0.25.0.
* Various other minor bug fixes and improvements.
swift (2.21.0, OpenStack Stein release) swift (2.21.0, OpenStack Stein release)
* Change the behavior of the EC reconstructor to perform a * Change the behavior of the EC reconstructor to perform a
@ -162,7 +237,7 @@ swift (2.20.0)
* Various other minor bug fixes and improvements. * Various other minor bug fixes and improvements.
2.19.1 (rocky stable backports) swift (2.19.1, rocky stable backports)
* Prevent PyKMIP's kmip_protocol logger from logging at DEBUG. * Prevent PyKMIP's kmip_protocol logger from logging at DEBUG.
Previously, some versions of PyKMIP would include all wire Previously, some versions of PyKMIP would include all wire
@ -339,7 +414,7 @@ swift (2.18.0)
* Various other minor bug fixes and improvements. * Various other minor bug fixes and improvements.
2.17.1 (queens stable backports) swift (2.17.1, queens stable backports)
* Fix SLO delete for accounts with non-ASCII names. * Fix SLO delete for accounts with non-ASCII names.
@ -541,7 +616,7 @@ swift (2.16.0)
* Various other minor bug fixes and improvements. * Various other minor bug fixes and improvements.
2.15.2 (pike stable backports) swift (2.15.2, pike stable backports)
* Fixed a cache invalidation issue related to GET and PUT requests to * Fixed a cache invalidation issue related to GET and PUT requests to
containers that would occasionally cause object PUTs to a container to containers that would occasionally cause object PUTs to a container to
@ -979,7 +1054,7 @@ swift (2.10.0, OpenStack Newton)
improved in clusters that are not completely healthy. improved in clusters that are not completely healthy.
* Significant improvements to the api-ref doc available at * Significant improvements to the api-ref doc available at
https://developer.openstack.org/api-ref/object-storage/. https://docs.openstack.org/api-ref/object-store/.
* A PUT or POST to a container will now update the container's * A PUT or POST to a container will now update the container's
Last-Modified time, and that value will be included in a Last-Modified time, and that value will be included in a

View File

@ -1,10 +1,10 @@
################################################ ################################################
# #
# Alpine 3.7 Swift-All-In-One # Alpine 3.10.1 Swift-All-In-One
# #
################################################ ################################################
FROM alpine:3.7 FROM alpine:3.10.1
MAINTAINER Openstack Swift MAINTAINER Openstack Swift
ENV S6_LOGGING 1 ENV S6_LOGGING 1
@ -35,6 +35,11 @@ RUN mkdir /etc/swift && \
echo && \ echo && \
echo && \ echo && \
echo && \ echo && \
echo "================ starting apk_install_py2 ===================" && \
/opt/swift/docker/install_scripts/20_apk_install_py2.sh && \
echo && \
echo && \
echo && \
echo "================ starting swift_install ===================" && \ echo "================ starting swift_install ===================" && \
/opt/swift/docker/install_scripts/50_swift_install.sh && \ /opt/swift/docker/install_scripts/50_swift_install.sh && \
echo && \ echo && \

76
Dockerfile-py3 Normal file
View File

@ -0,0 +1,76 @@
################################################
#
# Alpine 3.10.1 Swift-All-In-One
#
################################################
FROM alpine:3.10.1
MAINTAINER Openstack Swift
ENV S6_LOGGING 1
ENV S6_VERSION 1.21.4.0
ENV SOCKLOG_VERSION 3.0.1-1
ENV ARCH amd64
ENV BUILD_DIR "/tmp"
ENV ENV="/etc/profile"
#COPY docker/install_scripts /install_scripts
COPY . /opt/swift
ADD https://github.com/just-containers/s6-overlay/releases/download/v$S6_VERSION/s6-overlay-$ARCH.tar.gz /tmp/
ADD https://github.com/just-containers/s6-overlay/releases/download/v$S6_VERSION/s6-overlay-$ARCH.tar.gz.sig /tmp/
ADD https://github.com/just-containers/socklog-overlay/releases/download/v$SOCKLOG_VERSION/socklog-overlay-$ARCH.tar.gz /tmp/
RUN mkdir /etc/swift && \
echo && \
echo && \
echo && \
echo "================ starting swift_needs ===================" && \
/opt/swift/docker/install_scripts/00_swift_needs.sh && \
echo && \
echo && \
echo && \
echo "================ starting apk_install_prereqs ===================" && \
/opt/swift/docker/install_scripts/10_apk_install_prereqs.sh && \
echo && \
echo && \
echo && \
echo "================ starting apk_install_py3 ===================" && \
/opt/swift/docker/install_scripts/21_apk_install_py3.sh && \
echo && \
echo && \
echo && \
echo "================ starting swift_install ===================" && \
/opt/swift/docker/install_scripts/50_swift_install.sh && \
echo && \
echo && \
echo && \
echo "================ installing s6-overlay ===================" && \
curl https://keybase.io/justcontainers/key.asc | gpg --import && \
gpg --verify /tmp/s6-overlay-$ARCH.tar.gz.sig /tmp/s6-overlay-$ARCH.tar.gz && \
gunzip -c /tmp/s6-overlay-$ARCH.tar.gz | tar -xf - -C / && \
gunzip -c /tmp/socklog-overlay-amd64.tar.gz | tar -xf - -C / && \
rm -rf /tmp/s6-overlay* && \
rm -rf /tmp/socklog-overlay* && \
echo && \
echo && \
echo && \
echo "================ starting pip_uninstall_dev ===================" && \
/opt/swift/docker/install_scripts/60_pip_uninstall_dev.sh && \
echo && \
echo && \
echo && \
echo "================ starting apk_uninstall_dev ===================" && \
/opt/swift/docker/install_scripts/99_apk_uninstall_dev.sh && \
echo && \
echo && \
echo && \
echo "================ clean up ===================" && \
echo "TODO: cleanup"
#rm -rf /opt/swift
# Add Swift required configuration files
COPY docker/rootfs /
ENTRYPOINT ["/init"]

View File

@ -147,7 +147,7 @@ For client applications, official Python language bindings are provided
at https://github.com/openstack/python-swiftclient. at https://github.com/openstack/python-swiftclient.
Complete API documentation at Complete API documentation at
https://developer.openstack.org/api-ref/object-store/ https://docs.openstack.org/api-ref/object-store/
There is a large ecosystem of applications and libraries that support and There is a large ecosystem of applications and libraries that support and
work with OpenStack Swift. Several are listed on the work with OpenStack Swift. Several are listed on the

View File

@ -15,6 +15,7 @@
from __future__ import print_function from __future__ import print_function
import optparse import optparse
import os import os
import re
import signal import signal
import subprocess import subprocess
import sys import sys
@ -56,27 +57,30 @@ Example (sends SIGTERM to all orphaned Swift processes older than two hours):
pids.append(open(os.path.join(root, name)).read().strip()) pids.append(open(os.path.join(root, name)).read().strip())
pids.extend(subprocess.Popen( pids.extend(subprocess.Popen(
['ps', '--ppid', pids[-1], '-o', 'pid', '--no-headers'], ['ps', '--ppid', pids[-1], '-o', 'pid', '--no-headers'],
stdout=subprocess.PIPE).communicate()[0].split()) stdout=subprocess.PIPE).communicate()[0].decode().split())
listing = [] listing = []
swift_cmd_re = re.compile(
'^/usr/bin/python[23]? /usr(?:/local)?/bin/swift-')
for line in subprocess.Popen( for line in subprocess.Popen(
['ps', '-eo', 'etime,pid,args', '--no-headers'], ['ps', '-eo', 'etime,pid,args', '--no-headers'],
stdout=subprocess.PIPE).communicate()[0].split('\n'): stdout=subprocess.PIPE).communicate()[0].split(b'\n'):
if not line: if not line:
continue continue
hours = 0 hours = 0
try: try:
etime, pid, args = line.split(None, 2) etime, pid, args = line.decode('ascii').split(None, 2)
except ValueError: except ValueError:
sys.exit('Could not process ps line %r' % line) sys.exit('Could not process ps line %r' % line)
if pid in pids: if pid in pids:
continue continue
if (not args.startswith('/usr/bin/python /usr/bin/swift-') and if any([
not args.startswith('/usr/bin/python /usr/local/bin/swift-')) or \ not swift_cmd_re.match(args),
'swift-orphans' in args or \ 'swift-orphans' in args,
'once' in args.split(): 'once' in args.split(),
]):
continue continue
args = args.split('-', 1)[1] args = args.split('swift-', 1)[1]
etime = etime.split('-') etime = etime.split('-')
if len(etime) == 2: if len(etime) == 2:
hours = int(etime[0]) * 24 hours = int(etime[0]) * 24
@ -105,11 +109,11 @@ Example (sends SIGTERM to all orphaned Swift processes older than two hours):
args_len = max(args_len, len(args)) args_len = max(args_len, len(args))
args_len = min(args_len, 78 - hours_len - pid_len) args_len = min(args_len, 78 - hours_len - pid_len)
print(('%%%ds %%%ds %%s' % (hours_len, pid_len)) % print('%*s %*s %s' %
('Hours', 'PID', 'Command')) (hours_len, 'Hours', pid_len, 'PID', 'Command'))
for hours, pid, args in listing: for hours, pid, args in listing:
print(('%%%ds %%%ds %%s' % (hours_len, pid_len)) % print('%*s %*s %s' %
(hours, pid, args[:args_len])) (hours_len, hours, pid_len, pid, args[:args_len]))
if options.signal: if options.signal:
try: try:

View File

@ -113,7 +113,7 @@ Swift StatsD logging
~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~
StatsD (see `Measure Anything, Measure Everything StatsD (see `Measure Anything, Measure Everything
<http://codeascraft.etsy.com/2011/02/15/measure-anything-measure-everything/>`_) <https://codeascraft.com/2011/02/15/measure-anything-measure-everything/>`_)
was designed for application code to be deeply instrumented. Meters are was designed for application code to be deeply instrumented. Meters are
sent in real-time by the code that just noticed or did something. The sent in real-time by the code that just noticed or did something. The
overhead of sending a meter is extremely low: a ``sendto`` of one UDP overhead of sending a meter is extremely low: a ``sendto`` of one UDP

View File

@ -833,7 +833,7 @@ this is unnecessary since the port is specified separately). If a hostname
resolves to an IPv4 address, an IPv4 socket will be used to send StatsD UDP resolves to an IPv4 address, an IPv4 socket will be used to send StatsD UDP
packets, even if the hostname would also resolve to an IPv6 address. packets, even if the hostname would also resolve to an IPv6 address.
.. _StatsD: http://codeascraft.etsy.com/2011/02/15/measure-anything-measure-everything/ .. _StatsD: https://codeascraft.com/2011/02/15/measure-anything-measure-everything/
.. _Graphite: http://graphiteapp.org/ .. _Graphite: http://graphiteapp.org/
.. _Ganglia: http://ganglia.sourceforge.net/ .. _Ganglia: http://ganglia.sourceforge.net/

View File

@ -171,14 +171,14 @@ The API Reference describes the operations that you can perform with the
Object Storage API: Object Storage API:
- `Storage - `Storage
accounts <https://developer.openstack.org/api-ref/object-store/index.html#accounts>`__: accounts <https://docs.openstack.org/api-ref/object-store/index.html#accounts>`__:
Use to perform account-level tasks. Use to perform account-level tasks.
Lists containers for a specified account. Creates, updates, and Lists containers for a specified account. Creates, updates, and
deletes account metadata. Shows account metadata. deletes account metadata. Shows account metadata.
- `Storage - `Storage
containers <https://developer.openstack.org/api-ref/object-store/index.html#containers>`__: containers <https://docs.openstack.org/api-ref/object-store/index.html#containers>`__:
Use to perform container-level tasks. Use to perform container-level tasks.
Lists objects in a specified container. Creates, shows details for, Lists objects in a specified container. Creates, shows details for,
@ -186,7 +186,7 @@ Object Storage API:
container metadata. container metadata.
- `Storage - `Storage
objects <https://developer.openstack.org/api-ref/object-store/index.html#objects>`__: objects <https://docs.openstack.org/api-ref/object-store/index.html#objects>`__:
Use to perform object-level tasks. Use to perform object-level tasks.
Creates, replaces, shows details for, and deletes objects. Copies Creates, replaces, shows details for, and deletes objects. Copies

View File

@ -21,7 +21,7 @@ A source tarball for the latest release of Swift is available on the
Prebuilt packages for Ubuntu and RHEL variants are available. Prebuilt packages for Ubuntu and RHEL variants are available.
* `Swift Ubuntu Packages <https://launchpad.net/ubuntu/+source/swift>`_ * `Swift Ubuntu Packages <https://launchpad.net/ubuntu/+source/swift>`_
* `Swift RDO Packages <https://www.rdoproject.org/Repositories>`_ * `Swift RDO Packages <https://www.rdoproject.org/documentation/repositories/>`_
-------------------- --------------------
Source Control Setup Source Control Setup

View File

@ -99,7 +99,7 @@ Administrator Documentation
Object Storage v1 REST API Documentation Object Storage v1 REST API Documentation
======================================== ========================================
See `Complete Reference for the Object Storage REST API <http://developer.openstack.org/api-ref/object-storage/>`_ See `Complete Reference for the Object Storage REST API <https://docs.openstack.org/api-ref/object-store/>`_
The following provides supporting information for the REST API: The following provides supporting information for the REST API:

View File

@ -318,7 +318,7 @@ in Barbican::
--mode ctr --secret-type symmetric --payload <base64_encoded_root_secret> --mode ctr --secret-type symmetric --payload <base64_encoded_root_secret>
Alternatively, the existing root secret can also be stored in Barbican using Alternatively, the existing root secret can also be stored in Barbican using
`curl <http://developer.openstack.org/api-guide/key-manager/secrets.html>`__. `curl <https://docs.openstack.org/api-guide/key-manager/secrets.html>`__.
.. note:: .. note::

View File

@ -646,16 +646,16 @@ multi-phase conversation, that the other nodes have landed enough for a quorum.
The basic flow looks like this: The basic flow looks like this:
* The Proxy Server erasure codes and streams the object fragments #. The Proxy Server erasure codes and streams the object fragments
(ec_ndata + ec_nparity) to the storage nodes. (ec_ndata + ec_nparity) to the storage nodes.
* The storage nodes store objects as EC archives and upon finishing object #. The storage nodes store objects as EC archives and upon finishing object
data/metadata write, send a 1st-phase response to proxy. data/metadata write, send a 1st-phase response to proxy.
* Upon quorum of storage nodes responses, the proxy initiates 2nd-phase by #. Upon quorum of storage nodes responses, the proxy initiates 2nd-phase by
sending commit confirmations to object servers. sending commit confirmations to object servers.
* Upon receipt of commit message, object servers rename ``.data`` files to #. Upon receipt of commit message, object servers rename ``.data`` files to
include the ``#d`` substring, indicating successful PUT, and send a final include the ``#d`` substring, indicating successful PUT, and send a final
response to the proxy server. response to the proxy server.
* The proxy waits for `ec_ndata + 1` object servers to respond with a #. The proxy waits for `ec_ndata + 1` object servers to respond with a
success (2xx) status before responding to the client with a successful success (2xx) status before responding to the client with a successful
status. status.

View File

@ -25,6 +25,3 @@ apk add --update \
libxslt-dev \ libxslt-dev \
libxml2 \ libxml2 \
libxml2-dev \ libxml2-dev \
python \
python-dev \
py-pip

View File

@ -1,57 +0,0 @@
#!/bin/sh
set -e
echo
echo
echo
echo "building python and pip"
# export PATH=$PATH:/usr/include
mkdir $BUILD_DIR/python27
mkdir $BUILD_DIR/python36
echo
echo
echo
echo "building python 2.7.15"
cd $BUILD_DIR/python27
wget https://www.python.org/ftp/python/2.7.15/Python-2.7.15.tgz
tar -zxvf Python-2.7.15.tgz
cd Python-2.7.15
./configure --enable-optimizations
make
make DESTDIR=/opt/python27 install
echo
echo
echo
echo "building python 3.6.5"
cd $BUILD_DIR/python36
wget https://www.python.org/ftp/python/3.6.5/Python-3.6.5.tgz
tar -zxvf Python-3.6.5.tgz
cd Python-3.6.5
./configure --enable-optimizations
make
make DESTDIR=/opt/python36 install
export PATH=$PATH:/opt/python27/usr/local/bin:/opt/python36/usr/local/bin
echo "export PATH=$PATH:/opt/python27/usr/local/bin:/opt/python36/usr/local/bin" >> /etc/profile
echo
echo
echo
echo "building pip"
wget https://bootstrap.pypa.io/get-pip.py
python ./get-pip.py
echo
echo
echo
echo "deleting python internal test dirs"
for f in `cat /opt/swift/docker/install_scripts/python_test_dirs` ; do rm -rf $f; done
rm -rf $BUILD_DIR/python27
rm -rf $BUILD_DIR/python36

View File

@ -0,0 +1,7 @@
#!/bin/sh
set -e
apk add --update \
python \
python-dev \
py-pip

View File

@ -1,32 +0,0 @@
#!/bin/sh
set -e
echo "+ + + + + + + + + + upgrading pip" && \
pip install -U pip && \
cd /opt/swift && \
pip install -r requirements.txt
#echo "+ + + + + + + + + + installing pastedeploy" && \
#pip install pastedeploy && \
#echo "+ + + + + + + + + + installing eventlet" && \
#pip install eventlet && \
#echo "+ + + + + + + + + + installing greenlet" && \
#pip install greenlet && \
#echo "+ + + + + + + + + + installing netifaces" && \
#pip install netifaces && \
#echo "+ + + + + + + + + + installing setuptools" && \
#pip install setuptools && \
#echo "+ + + + + + + + + + installing requests" && \
#pip install requests && \
#echo "+ + + + + + + + + + installing six" && \
#pip install six && \
#echo "+ + + + + + + + + + installing cryptography" && \
#pip install cryptography && \
#echo "+ + + + + + + + + + installing dnspython" && \
#pip install dnspython
#echo "+ + + + + + + + + + installing xattr" && \
#pip install xattr
#echo "+ + + + + + + + + + installing pyeclib" && \
#pip install pyeclib
#echo "+ + + + + + + + + + installing lxml" && \
#pip install lxml

View File

@ -0,0 +1,10 @@
#!/bin/sh
set -e
apk add --update \
python3 \
python3-dev \
py3-pip
if [ ! -e /usr/bin/pip ]; then ln -s pip3 /usr/bin/pip ; fi

View File

@ -14,18 +14,18 @@ cmd2==0.8.1
coverage==3.6 coverage==3.6
cryptography==2.0.2 cryptography==2.0.2
debtcollector==1.19.0 debtcollector==1.19.0
dnspython==1.14.0 dnspython==1.15.0
docutils==0.11 docutils==0.11
dulwich==0.19.0 dulwich==0.19.0
enum-compat==0.0.2 enum-compat==0.0.2
eventlet==0.17.4 eventlet==0.25.0
extras==1.0.0 extras==1.0.0
fixtures==3.0.0 fixtures==3.0.0
flake8==2.5.5 flake8==2.5.5
future==0.16.0 future==0.16.0
gitdb2==2.0.3 gitdb2==2.0.3
GitPython==2.1.8 GitPython==2.1.8
greenlet==0.3.1 greenlet==0.3.2
hacking==0.11.0 hacking==0.11.0
idna==2.6 idna==2.6
imagesize==1.0.0 imagesize==1.0.0
@ -76,7 +76,7 @@ reno==1.8.0
requests==2.14.2 requests==2.14.2
requests-mock==1.2.0 requests-mock==1.2.0
rfc3986==1.1.0 rfc3986==1.1.0
six==1.9.0 six==1.10.0
smmap2==2.0.3 smmap2==2.0.3
snowballstemmer==1.2.1 snowballstemmer==1.2.1
Sphinx==1.6.2 Sphinx==1.6.2

View File

@ -0,0 +1,87 @@
---
features:
- |
Experimental support for Python 3.6 and 3.7 is now available.
Note that this requires ``eventlet>=0.25.0``. All unit tests pass,
and running functional tests under Python 2 will pass against
services running under Python 3. Expect full support in the
next minor release.
- |
Log formats are now more configurable and include support for
anonymization. See the ``log_msg_template`` option in ``proxy-server.conf``
and `the Swift documentation <https://docs.openstack.org/swift/latest/logs.html#proxy-logs>`__
for more information.
- |
Added an operator tool, ``swift-container-deleter``, to asynchronously
delete some or all objects in a container using the object expirers.
- |
Swift-all-in-one Docker images are now built and published to
https://hub.docker.com/r/openstackswift/saio. These are intended
for use as development targets, but will hopefully be useful as a
starting point for other work involving containerizing Swift.
upgrade:
- |
The ``object-expirer`` may now be configured in ``object-server.conf``.
This is in anticipation of a future change to allow the ``object-expirer``
to be deployed on all nodes that run the ``object-server``.
- |
**Dependency updates**: we've increased our minimum supported version
of ``cryptography`` to 2.0.2 and ``netifaces`` to 0.8. This is largely due
to the difficulty of continuing to test with the old versions.
If running Swift under Python 3, ``eventlet`` must be at least 0.25.0.
fixes:
- |
Correctness improvements
* The ``proxy-server`` now ignores 404 responses from handoffs without
databases when deciding on the correct response for account and
container requests.
* Object writes to a container whose existence cannot be verified
now 503 instead of 404.
- |
Sharding improvements
* The ``container-replicator`` now only attempts to fetch shard ranges if
the remote indicates that it has shard ranges. Further, it does so
with a timeout to prevent the process from hanging in certain cases.
* The ``proxy-server`` now caches 'updating' shards, improving write
performance for sharded containers. A new config option,
``recheck_updating_shard_ranges``, controls the cache time; set it to
0 to disable caching.
* The ``container-replicator`` now correctly enqueues
``container-reconciler`` work for sharded containers.
- |
S3 API improvements
* Unsigned payloads work with v4 signatures once more.
* Multipart upload parts may now be copied from other multipart uploads.
* CompleteMultipartUpload requests with a ``Content-MD5`` now work.
* ``Content-Type`` can now be updated when copying an object.
* Fixed v1 listings that end with a non-ASCII object name.
- |
Background corruption-detection improvements
* Detect and remove invalid entries from ``hashes.pkl``
* When object path is not a directory, just quarantine it,
rather than the whole suffix.
- |
Various other minor bug fixes and improvements.

View File

@ -2,14 +2,14 @@
# of appearance. Changing the order has an impact on the overall integration # of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
dnspython>=1.14.0;python_version=='2.7' # http://www.dnspython.org/LICENSE dnspython>=1.15.0;python_version=='2.7' # http://www.dnspython.org/LICENSE
eventlet>=0.17.4,!=0.23.0 # MIT eventlet>=0.25.0 # MIT
greenlet>=0.3.1 greenlet>=0.3.2
netifaces>=0.8,!=0.10.0,!=0.10.1 netifaces>=0.8,!=0.10.0,!=0.10.1
PasteDeploy>=1.3.3 PasteDeploy>=1.3.3
lxml>=3.4.1 lxml>=3.4.1
requests>=2.14.2 # Apache-2.0 requests>=2.14.2 # Apache-2.0
six>=1.9.0 six>=1.10.0
xattr>=0.4;sys_platform!='win32' # MIT xattr>=0.4;sys_platform!='win32' # MIT
PyECLib>=1.3.1 # BSD PyECLib>=1.3.1 # BSD
cryptography>=2.0.2 # BSD/Apache-2.0 cryptography>=2.0.2 # BSD/Apache-2.0

View File

@ -16,6 +16,9 @@ classifier =
Programming Language :: Python Programming Language :: Python
Programming Language :: Python :: 2 Programming Language :: Python :: 2
Programming Language :: Python :: 2.7 Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
[pbr] [pbr]
skip_authors = True skip_authors = True

View File

@ -21,8 +21,6 @@ from swift import gettext_ as _
from eventlet import Timeout from eventlet import Timeout
import six
import swift.common.db import swift.common.db
from swift.account.backend import AccountBroker, DATADIR from swift.account.backend import AccountBroker, DATADIR
from swift.account.utils import account_listing_response, get_response_headers from swift.account.utils import account_listing_response, get_response_headers
@ -110,6 +108,14 @@ class AccountController(BaseStorageServer):
broker.delete_db(req_timestamp.internal) broker.delete_db(req_timestamp.internal)
return self._deleted_response(broker, req, HTTPNoContent) return self._deleted_response(broker, req, HTTPNoContent)
def _update_metadata(self, req, broker, req_timestamp):
metadata = {
wsgi_to_str(key): (wsgi_to_str(value), req_timestamp.internal)
for key, value in req.headers.items()
if is_sys_or_user_meta('account', key)}
if metadata:
broker.update_metadata(metadata, validate_metadata=True)
@public @public
@timing_stats() @timing_stats()
def PUT(self, req): def PUT(self, req):
@ -169,24 +175,7 @@ class AccountController(BaseStorageServer):
broker.update_put_timestamp(timestamp.internal) broker.update_put_timestamp(timestamp.internal)
if broker.is_deleted(): if broker.is_deleted():
return HTTPConflict(request=req) return HTTPConflict(request=req)
metadata = {} self._update_metadata(req, broker, timestamp)
if six.PY2:
metadata.update((key, (value, timestamp.internal))
for key, value in req.headers.items()
if is_sys_or_user_meta('account', key))
else:
for key, value in req.headers.items():
if is_sys_or_user_meta('account', key):
# Cast to native strings, so that json inside
# updata_metadata eats the data.
try:
value = value.encode('latin-1').decode('utf-8')
except UnicodeDecodeError:
raise HTTPBadRequest(
'Metadata must be valid UTF-8')
metadata[key] = (value, timestamp.internal)
if metadata:
broker.update_metadata(metadata, validate_metadata=True)
if created: if created:
return HTTPCreated(request=req) return HTTPCreated(request=req)
else: else:
@ -287,12 +276,7 @@ class AccountController(BaseStorageServer):
broker = self._get_account_broker(drive, part, account) broker = self._get_account_broker(drive, part, account)
if broker.is_deleted(): if broker.is_deleted():
return self._deleted_response(broker, req, HTTPNotFound) return self._deleted_response(broker, req, HTTPNotFound)
metadata = {} self._update_metadata(req, broker, req_timestamp)
metadata.update((key, (value, req_timestamp.internal))
for key, value in req.headers.items()
if is_sys_or_user_meta('account', key))
if metadata:
broker.update_metadata(metadata, validate_metadata=True)
return HTTPNoContent(request=req) return HTTPNoContent(request=req)
def __call__(self, env, start_response): def __call__(self, env, start_response):

View File

@ -18,7 +18,7 @@ import json
import six import six
from swift.common.middleware import listing_formats from swift.common.middleware import listing_formats
from swift.common.swob import HTTPOk, HTTPNoContent from swift.common.swob import HTTPOk, HTTPNoContent, str_to_wsgi
from swift.common.utils import Timestamp from swift.common.utils import Timestamp
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
@ -64,8 +64,8 @@ def get_response_headers(broker):
for key, value in stats.items(): for key, value in stats.items():
header_name = header_prefix % key.replace('_', '-') header_name = header_prefix % key.replace('_', '-')
resp_headers[header_name] = value resp_headers[header_name] = value
resp_headers.update((key, value) resp_headers.update((str_to_wsgi(key), str_to_wsgi(value))
for key, (value, timestamp) in for key, (value, _timestamp) in
broker.metadata.items() if value != '') broker.metadata.items() if value != '')
return resp_headers return resp_headers

View File

@ -100,10 +100,6 @@ def reload_constraints():
reload_constraints() reload_constraints()
# Maximum slo segments in buffer
MAX_BUFFERED_SLO_SEGMENTS = 10000
# By default the maximum number of allowed headers depends on the number of max # By default the maximum number of allowed headers depends on the number of max
# allowed metadata settings plus a default value of 36 for swift internally # allowed metadata settings plus a default value of 36 for swift internally
# generated headers and regular http headers. If for some reason this is not # generated headers and regular http headers. If for some reason this is not

View File

@ -16,10 +16,20 @@
import six import six
def _title(s):
if six.PY2:
return s.title()
else:
return s.encode('latin1').title().decode('latin1')
class HeaderKeyDict(dict): class HeaderKeyDict(dict):
""" """
A dict that title-cases all keys on the way in, so as to be A dict that title-cases all keys on the way in, so as to be
case-insensitive. case-insensitive.
Note that all keys and values are expected to be wsgi strings,
though some allowances are made when setting values.
""" """
def __init__(self, base_headers=None, **kwargs): def __init__(self, base_headers=None, **kwargs):
if base_headers: if base_headers:
@ -29,32 +39,32 @@ class HeaderKeyDict(dict):
def update(self, other): def update(self, other):
if hasattr(other, 'keys'): if hasattr(other, 'keys'):
for key in other.keys(): for key in other.keys():
self[key.title()] = other[key] self[_title(key)] = other[key]
else: else:
for key, value in other: for key, value in other:
self[key.title()] = value self[_title(key)] = value
def __getitem__(self, key): def __getitem__(self, key):
return dict.get(self, key.title()) return dict.get(self, _title(key))
def __setitem__(self, key, value): def __setitem__(self, key, value):
if value is None: if value is None:
self.pop(key.title(), None) self.pop(_title(key), None)
elif six.PY2 and isinstance(value, six.text_type): elif six.PY2 and isinstance(value, six.text_type):
return dict.__setitem__(self, key.title(), value.encode('utf-8')) return dict.__setitem__(self, _title(key), value.encode('utf-8'))
elif six.PY3 and isinstance(value, six.binary_type): elif six.PY3 and isinstance(value, six.binary_type):
return dict.__setitem__(self, key.title(), value.decode('latin-1')) return dict.__setitem__(self, _title(key), value.decode('latin-1'))
else: else:
return dict.__setitem__(self, key.title(), str(value)) return dict.__setitem__(self, _title(key), str(value))
def __contains__(self, key): def __contains__(self, key):
return dict.__contains__(self, key.title()) return dict.__contains__(self, _title(key))
def __delitem__(self, key): def __delitem__(self, key):
return dict.__delitem__(self, key.title()) return dict.__delitem__(self, _title(key))
def get(self, key, default=None): def get(self, key, default=None):
return dict.get(self, key.title(), default) return dict.get(self, _title(key), default)
def setdefault(self, key, value=None): def setdefault(self, key, value=None):
if key not in self: if key not in self:
@ -62,4 +72,4 @@ class HeaderKeyDict(dict):
return self[key] return self[key]
def pop(self, key, default=None): def pop(self, key, default=None):
return dict.pop(self, key.title(), default) return dict.pop(self, _title(key), default)

View File

@ -229,7 +229,7 @@ class InternalClient(object):
def _get_metadata( def _get_metadata(
self, path, metadata_prefix='', acceptable_statuses=(2,), self, path, metadata_prefix='', acceptable_statuses=(2,),
headers=None): headers=None, params=None):
""" """
Gets metadata by doing a HEAD on a path and using the metadata_prefix Gets metadata by doing a HEAD on a path and using the metadata_prefix
to get values from the headers returned. to get values from the headers returned.
@ -252,7 +252,8 @@ class InternalClient(object):
""" """
headers = headers or {} headers = headers or {}
resp = self.make_request('HEAD', path, headers, acceptable_statuses) resp = self.make_request('HEAD', path, headers, acceptable_statuses,
params=params)
metadata_prefix = metadata_prefix.lower() metadata_prefix = metadata_prefix.lower()
metadata = {} metadata = {}
for k, v in resp.headers.items(): for k, v in resp.headers.items():
@ -406,7 +407,8 @@ class InternalClient(object):
int(resp.headers.get('x-account-object-count', 0))) int(resp.headers.get('x-account-object-count', 0)))
def get_account_metadata( def get_account_metadata(
self, account, metadata_prefix='', acceptable_statuses=(2,)): self, account, metadata_prefix='', acceptable_statuses=(2,),
params=None):
"""Gets account metadata. """Gets account metadata.
:param account: Account on which to get the metadata. :param account: Account on which to get the metadata.
@ -425,7 +427,8 @@ class InternalClient(object):
""" """
path = self.make_path(account) path = self.make_path(account)
return self._get_metadata(path, metadata_prefix, acceptable_statuses) return self._get_metadata(path, metadata_prefix, acceptable_statuses,
headers=None, params=params)
def set_account_metadata( def set_account_metadata(
self, account, metadata, metadata_prefix='', self, account, metadata, metadata_prefix='',
@ -516,7 +519,7 @@ class InternalClient(object):
def get_container_metadata( def get_container_metadata(
self, account, container, metadata_prefix='', self, account, container, metadata_prefix='',
acceptable_statuses=(2,)): acceptable_statuses=(2,), params=None):
"""Gets container metadata. """Gets container metadata.
:param account: The container's account. :param account: The container's account.
@ -536,7 +539,8 @@ class InternalClient(object):
""" """
path = self.make_path(account, container) path = self.make_path(account, container)
return self._get_metadata(path, metadata_prefix, acceptable_statuses) return self._get_metadata(path, metadata_prefix, acceptable_statuses,
params=params)
def iter_objects( def iter_objects(
self, account, container, marker='', end_marker='', prefix='', self, account, container, marker='', end_marker='', prefix='',
@ -618,7 +622,7 @@ class InternalClient(object):
def get_object_metadata( def get_object_metadata(
self, account, container, obj, metadata_prefix='', self, account, container, obj, metadata_prefix='',
acceptable_statuses=(2,), headers=None): acceptable_statuses=(2,), headers=None, params=None):
"""Gets object metadata. """Gets object metadata.
:param account: The object's account. :param account: The object's account.
@ -641,7 +645,7 @@ class InternalClient(object):
path = self.make_path(account, container, obj) path = self.make_path(account, container, obj)
return self._get_metadata(path, metadata_prefix, acceptable_statuses, return self._get_metadata(path, metadata_prefix, acceptable_statuses,
headers=headers) headers=headers, params=params)
def get_object(self, account, container, obj, headers, def get_object(self, account, container, obj, headers,
acceptable_statuses=(2,), params=None): acceptable_statuses=(2,), params=None):

View File

@ -123,7 +123,7 @@ from swift.common.http import HTTP_MULTIPLE_CHOICES, is_success, HTTP_OK
from swift.common.constraints import check_account_format, MAX_FILE_SIZE from swift.common.constraints import check_account_format, MAX_FILE_SIZE
from swift.common.request_helpers import copy_header_subset, remove_items, \ from swift.common.request_helpers import copy_header_subset, remove_items, \
is_sys_meta, is_sys_or_user_meta, is_object_transient_sysmeta, \ is_sys_meta, is_sys_or_user_meta, is_object_transient_sysmeta, \
check_path_header check_path_header, OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX
from swift.common.wsgi import WSGIContext, make_subrequest from swift.common.wsgi import WSGIContext, make_subrequest
@ -405,7 +405,7 @@ class ServerSideCopyMiddleware(object):
# since we're not copying the source etag, make sure that any # since we're not copying the source etag, make sure that any
# container update override values are not copied. # container update override values are not copied.
remove_items(sink_req.headers, lambda k: k.startswith( remove_items(sink_req.headers, lambda k: k.startswith(
'X-Object-Sysmeta-Container-Update-Override-')) OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX.title()))
# We no longer need these headers # We no longer need these headers
sink_req.headers.pop('X-Copy-From', None) sink_req.headers.pop('X-Copy-From', None)

View File

@ -23,7 +23,8 @@ from swift.common.middleware.crypto.crypto_utils import CryptoWSGIContext, \
load_crypto_meta, extract_crypto_meta, Crypto load_crypto_meta, extract_crypto_meta, Crypto
from swift.common.exceptions import EncryptionException, UnknownSecretIdError from swift.common.exceptions import EncryptionException, UnknownSecretIdError
from swift.common.request_helpers import get_object_transient_sysmeta, \ from swift.common.request_helpers import get_object_transient_sysmeta, \
get_sys_meta_prefix, get_user_meta_prefix get_sys_meta_prefix, get_user_meta_prefix, \
get_container_update_override_key
from swift.common.swob import Request, HTTPException, \ from swift.common.swob import Request, HTTPException, \
HTTPInternalServerError, wsgi_to_bytes, bytes_to_wsgi HTTPInternalServerError, wsgi_to_bytes, bytes_to_wsgi
from swift.common.utils import get_logger, config_true_value, \ from swift.common.utils import get_logger, config_true_value, \
@ -220,7 +221,7 @@ class DecrypterObjContext(BaseDecrypterContext):
required=True) required=True)
mod_hdr_pairs.append(('Etag', decrypted_etag)) mod_hdr_pairs.append(('Etag', decrypted_etag))
etag_header = 'X-Object-Sysmeta-Container-Update-Override-Etag' etag_header = get_container_update_override_key('etag')
encrypted_etag = self._response_header_value(etag_header) encrypted_etag = self._response_header_value(etag_header)
if encrypted_etag: if encrypted_etag:
decrypted_etag = self._decrypt_header( decrypted_etag = self._decrypt_header(

View File

@ -22,7 +22,8 @@ from swift.common.http import is_success
from swift.common.middleware.crypto.crypto_utils import CryptoWSGIContext, \ from swift.common.middleware.crypto.crypto_utils import CryptoWSGIContext, \
dump_crypto_meta, append_crypto_meta, Crypto dump_crypto_meta, append_crypto_meta, Crypto
from swift.common.request_helpers import get_object_transient_sysmeta, \ from swift.common.request_helpers import get_object_transient_sysmeta, \
strip_user_meta_prefix, is_user_meta, update_etag_is_at_header strip_user_meta_prefix, is_user_meta, update_etag_is_at_header, \
get_container_update_override_key
from swift.common.swob import Request, Match, HTTPException, \ from swift.common.swob import Request, Match, HTTPException, \
HTTPUnprocessableEntity, wsgi_to_bytes, bytes_to_wsgi HTTPUnprocessableEntity, wsgi_to_bytes, bytes_to_wsgi
from swift.common.utils import get_logger, config_true_value, \ from swift.common.utils import get_logger, config_true_value, \
@ -100,8 +101,8 @@ class EncInputWrapper(object):
# remove any Etag from headers, it won't be valid for ciphertext and # remove any Etag from headers, it won't be valid for ciphertext and
# we'll send the ciphertext Etag later in footer metadata # we'll send the ciphertext Etag later in footer metadata
client_etag = req.headers.pop('etag', None) client_etag = req.headers.pop('etag', None)
container_listing_etag_header = req.headers.get( override_header = get_container_update_override_key('etag')
'X-Object-Sysmeta-Container-Update-Override-Etag') container_listing_etag_header = req.headers.get(override_header)
def footers_callback(footers): def footers_callback(footers):
if inner_callback: if inner_callback:
@ -152,8 +153,7 @@ class EncInputWrapper(object):
# This may be None if no override was set and no data was read. An # This may be None if no override was set and no data was read. An
# override value of '' will be passed on. # override value of '' will be passed on.
container_listing_etag = footers.get( container_listing_etag = footers.get(
'X-Object-Sysmeta-Container-Update-Override-Etag', override_header, container_listing_etag_header)
container_listing_etag_header)
if container_listing_etag is None: if container_listing_etag is None:
container_listing_etag = plaintext_etag container_listing_etag = plaintext_etag
@ -174,7 +174,7 @@ class EncInputWrapper(object):
self.crypto, container_listing_etag, self.crypto, container_listing_etag,
self.keys['container']) self.keys['container'])
crypto_meta['key_id'] = self.keys['id'] crypto_meta['key_id'] = self.keys['id']
footers['X-Object-Sysmeta-Container-Update-Override-Etag'] = \ footers[override_header] = \
append_crypto_meta(val, crypto_meta) append_crypto_meta(val, crypto_meta)
# else: no override was set and no data was read # else: no override was set and no data was read

View File

@ -304,6 +304,8 @@ class GetContext(WSGIContext):
actual_content_length = None actual_content_length = None
content_length_for_swob_range = None content_length_for_swob_range = None
req.range = None req.range = None
else:
req.range = None
response_headers = [ response_headers = [
(h, v) for h, v in response_headers (h, v) for h, v in response_headers

View File

@ -70,6 +70,7 @@ import six
from swift.common.swob import Range, bytes_to_wsgi from swift.common.swob import Range, bytes_to_wsgi
from swift.common.utils import json, public, reiterate from swift.common.utils import json, public, reiterate
from swift.common.db import utf8encode from swift.common.db import utf8encode
from swift.common.request_helpers import get_container_update_override_key
from six.moves.urllib.parse import quote, urlparse from six.moves.urllib.parse import quote, urlparse
@ -182,7 +183,7 @@ class PartController(Controller):
'X-Object-Sysmeta-Swift3-Etag': '', # for legacy data 'X-Object-Sysmeta-Swift3-Etag': '', # for legacy data
'X-Object-Sysmeta-Slo-Etag': '', 'X-Object-Sysmeta-Slo-Etag': '',
'X-Object-Sysmeta-Slo-Size': '', 'X-Object-Sysmeta-Slo-Size': '',
'X-Object-Sysmeta-Container-Update-Override-Etag': '', get_container_update_override_key('etag'): '',
}) })
resp = req.get_response(self.app) resp = req.get_response(self.app)
@ -634,7 +635,7 @@ class UploadController(Controller):
headers[sysmeta_header('object', 'etag')] = s3_etag headers[sysmeta_header('object', 'etag')] = s3_etag
# Leave base header value blank; SLO will populate # Leave base header value blank; SLO will populate
c_etag = '; s3_etag=%s' % s3_etag c_etag = '; s3_etag=%s' % s3_etag
headers['X-Object-Sysmeta-Container-Update-Override-Etag'] = c_etag headers[get_container_update_override_key('etag')] = c_etag
too_small_message = ('s3api requires that each segment be at least ' too_small_message = ('s3api requires that each segment be at least '
'%d bytes' % self.conf.min_segment_size) '%d bytes' % self.conf.min_segment_size)

View File

@ -133,7 +133,7 @@ class ObjectController(Controller):
# delete object metadata from response # delete object metadata from response
for key in list(resp.headers.keys()): for key in list(resp.headers.keys()):
if key.startswith('x-amz-meta-'): if key.lower().startswith('x-amz-meta-'):
del resp.headers[key] del resp.headers[key]
resp.status = HTTP_OK resp.status = HTTP_OK

View File

@ -33,7 +33,8 @@ from swift.common.http import HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED, \
HTTP_CONFLICT, HTTP_UNPROCESSABLE_ENTITY, HTTP_REQUEST_ENTITY_TOO_LARGE, \ HTTP_CONFLICT, HTTP_UNPROCESSABLE_ENTITY, HTTP_REQUEST_ENTITY_TOO_LARGE, \
HTTP_PARTIAL_CONTENT, HTTP_NOT_MODIFIED, HTTP_PRECONDITION_FAILED, \ HTTP_PARTIAL_CONTENT, HTTP_NOT_MODIFIED, HTTP_PRECONDITION_FAILED, \
HTTP_REQUESTED_RANGE_NOT_SATISFIABLE, HTTP_LENGTH_REQUIRED, \ HTTP_REQUESTED_RANGE_NOT_SATISFIABLE, HTTP_LENGTH_REQUIRED, \
HTTP_BAD_REQUEST, HTTP_REQUEST_TIMEOUT, is_success HTTP_BAD_REQUEST, HTTP_REQUEST_TIMEOUT, HTTP_SERVICE_UNAVAILABLE, \
is_success
from swift.common.constraints import check_utf8 from swift.common.constraints import check_utf8
from swift.proxy.controllers.base import get_container_info, \ from swift.proxy.controllers.base import get_container_info, \
@ -52,7 +53,8 @@ from swift.common.middleware.s3api.s3response import AccessDenied, \
InternalError, NoSuchBucket, NoSuchKey, PreconditionFailed, InvalidRange, \ InternalError, NoSuchBucket, NoSuchKey, PreconditionFailed, InvalidRange, \
MissingContentLength, InvalidStorageClass, S3NotImplemented, InvalidURI, \ MissingContentLength, InvalidStorageClass, S3NotImplemented, InvalidURI, \
MalformedXML, InvalidRequest, RequestTimeout, InvalidBucketName, \ MalformedXML, InvalidRequest, RequestTimeout, InvalidBucketName, \
BadDigest, AuthorizationHeaderMalformed, AuthorizationQueryParametersError BadDigest, AuthorizationHeaderMalformed, \
AuthorizationQueryParametersError, ServiceUnavailable
from swift.common.middleware.s3api.exception import NotS3Request, \ from swift.common.middleware.s3api.exception import NotS3Request, \
BadSwiftRequest BadSwiftRequest
from swift.common.middleware.s3api.utils import utf8encode, \ from swift.common.middleware.s3api.utils import utf8encode, \
@ -1369,6 +1371,8 @@ class S3Request(swob.Request):
**self.signature_does_not_match_kwargs()) **self.signature_does_not_match_kwargs())
if status == HTTP_FORBIDDEN: if status == HTTP_FORBIDDEN:
raise AccessDenied() raise AccessDenied()
if status == HTTP_SERVICE_UNAVAILABLE:
raise ServiceUnavailable()
raise InternalError('unexpected status code %d' % status) raise InternalError('unexpected status code %d' % status)

View File

@ -330,15 +330,18 @@ from swift.common.middleware.listing_formats import \
from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \ from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \
HTTPMethodNotAllowed, HTTPRequestEntityTooLarge, HTTPLengthRequired, \ HTTPMethodNotAllowed, HTTPRequestEntityTooLarge, HTTPLengthRequired, \
HTTPOk, HTTPPreconditionFailed, HTTPException, HTTPNotFound, \ HTTPOk, HTTPPreconditionFailed, HTTPException, HTTPNotFound, \
HTTPUnauthorized, HTTPConflict, HTTPUnprocessableEntity, Response, Range, \ HTTPUnauthorized, HTTPConflict, HTTPUnprocessableEntity, \
HTTPServiceUnavailable, Response, Range, \
RESPONSE_REASONS, str_to_wsgi, wsgi_to_str, wsgi_quote RESPONSE_REASONS, str_to_wsgi, wsgi_to_str, wsgi_quote
from swift.common.utils import get_logger, config_true_value, \ from swift.common.utils import get_logger, config_true_value, \
get_valid_utf8_str, override_bytes_from_content_type, split_path, \ get_valid_utf8_str, override_bytes_from_content_type, split_path, \
register_swift_info, RateLimitedIterator, quote, close_if_possible, \ register_swift_info, RateLimitedIterator, quote, close_if_possible, \
closing_if_possible, LRUCache, StreamingPile, strict_b64decode closing_if_possible, LRUCache, StreamingPile, strict_b64decode, \
Timestamp
from swift.common.request_helpers import SegmentedIterable, \ from swift.common.request_helpers import SegmentedIterable, \
get_sys_meta_prefix, update_etag_is_at_header, resolve_etag_is_at_header get_sys_meta_prefix, update_etag_is_at_header, resolve_etag_is_at_header, \
from swift.common.constraints import check_utf8, MAX_BUFFERED_SLO_SEGMENTS get_container_update_override_key
from swift.common.constraints import check_utf8
from swift.common.http import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED, is_success from swift.common.http import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED, is_success
from swift.common.wsgi import WSGIContext, make_subrequest from swift.common.wsgi import WSGIContext, make_subrequest
from swift.common.middleware.bulk import get_response_body, \ from swift.common.middleware.bulk import get_response_body, \
@ -500,9 +503,6 @@ def parse_and_validate_input(req_body, req_path):
% (seg_index,)) % (seg_index,))
continue continue
# re-encode to normalize padding # re-encode to normalize padding
if six.PY2:
seg_dict['data'] = base64.b64encode(data)
else:
seg_dict['data'] = base64.b64encode(data).decode('ascii') seg_dict['data'] = base64.b64encode(data).decode('ascii')
if parsed_data and all('data' in d for d in parsed_data): if parsed_data and all('data' in d for d in parsed_data):
@ -734,7 +734,7 @@ class SloGetContext(WSGIContext):
content_range = value content_range = value
break break
# e.g. Content-Range: bytes 0-14289/14290 # e.g. Content-Range: bytes 0-14289/14290
match = re.match('bytes (\d+)-(\d+)/(\d+)$', content_range) match = re.match(r'bytes (\d+)-(\d+)/(\d+)$', content_range)
if not match: if not match:
# Malformed or missing, so we don't know what we got. # Malformed or missing, so we don't know what we got.
return True return True
@ -767,7 +767,7 @@ class SloGetContext(WSGIContext):
resp_iter = self._app_call(req.environ) resp_iter = self._app_call(req.environ)
# make sure this response is for a static large object manifest # make sure this response is for a static large object manifest
slo_marker = slo_etag = slo_size = None slo_marker = slo_etag = slo_size = slo_timestamp = None
for header, value in self._response_headers: for header, value in self._response_headers:
header = header.lower() header = header.lower()
if header == SYSMETA_SLO_ETAG: if header == SYSMETA_SLO_ETAG:
@ -777,8 +777,10 @@ class SloGetContext(WSGIContext):
elif (header == 'x-static-large-object' and elif (header == 'x-static-large-object' and
config_true_value(value)): config_true_value(value)):
slo_marker = value slo_marker = value
elif header == 'x-backend-timestamp':
slo_timestamp = value
if slo_marker and slo_etag and slo_size: if slo_marker and slo_etag and slo_size and slo_timestamp:
break break
if not slo_marker: if not slo_marker:
@ -822,6 +824,7 @@ class SloGetContext(WSGIContext):
conditional_response=True) conditional_response=True)
resp.headers.update({ resp.headers.update({
'Etag': '"%s"' % slo_etag, 'Etag': '"%s"' % slo_etag,
'X-Manifest-Etag': self._response_header_value('etag'),
'Content-Length': slo_size, 'Content-Length': slo_size,
}) })
return resp(req.environ, start_response) return resp(req.environ, start_response)
@ -836,6 +839,35 @@ class SloGetContext(WSGIContext):
headers={'x-auth-token': req.headers.get('x-auth-token')}, headers={'x-auth-token': req.headers.get('x-auth-token')},
agent='%(orig)s SLO MultipartGET', swift_source='SLO') agent='%(orig)s SLO MultipartGET', swift_source='SLO')
resp_iter = self._app_call(get_req.environ) resp_iter = self._app_call(get_req.environ)
slo_marker = config_true_value(self._response_header_value(
'x-static-large-object'))
if not slo_marker: # will also catch non-2xx responses
got_timestamp = self._response_header_value(
'x-backend-timestamp') or '0'
if Timestamp(got_timestamp) >= Timestamp(slo_timestamp):
# We've got a newer response available, so serve that.
# Note that if there's data, it's going to be a 200 now,
# not a 206, and we're not going to drop bytes in the
# proxy on the client's behalf. Fortunately, the RFC is
# pretty forgiving for a server; there's no guarantee that
# a Range header will be respected.
resp = Response(
status=self._response_status,
headers=self._response_headers,
app_iter=resp_iter,
request=req,
conditional_etag=resolve_etag_is_at_header(
req, self._response_headers),
conditional_response=is_success(
int(self._response_status[:3])))
return resp(req.environ, start_response)
else:
# We saw newer data that indicated it's an SLO, but
# couldn't fetch the whole thing; 503 seems reasonable?
close_if_possible(resp_iter)
raise HTTPServiceUnavailable(request=req)
# NB: we might have gotten an out-of-date manifest -- that's OK;
# we'll just try to serve the old data
# Any Content-Range from a manifest is almost certainly wrong for the # Any Content-Range from a manifest is almost certainly wrong for the
# full large object. # full large object.
@ -897,7 +929,9 @@ class SloGetContext(WSGIContext):
response_headers = [] response_headers = []
for header, value in resp_headers: for header, value in resp_headers:
lheader = header.lower() lheader = header.lower()
if lheader not in ('etag', 'content-length'): if lheader == 'etag':
response_headers.append(('X-Manifest-Etag', value))
elif lheader != 'content-length':
response_headers.append((header, value)) response_headers.append((header, value))
if lheader == SYSMETA_SLO_ETAG: if lheader == SYSMETA_SLO_ETAG:
@ -926,7 +960,7 @@ class SloGetContext(WSGIContext):
r = '%s:%s;' % (seg_dict['hash'], seg_dict['range']) r = '%s:%s;' % (seg_dict['hash'], seg_dict['range'])
else: else:
r = seg_dict['hash'] r = seg_dict['hash']
calculated_etag.update(r.encode('ascii') if six.PY3 else r) calculated_etag.update(r.encode('ascii'))
if content_length is None: if content_length is None:
if config_true_value(seg_dict.get('sub_slo')): if config_true_value(seg_dict.get('sub_slo')):
@ -1062,7 +1096,10 @@ class StaticLargeObject(object):
delete_concurrency = int(self.conf.get( delete_concurrency = int(self.conf.get(
'delete_concurrency', self.concurrency)) 'delete_concurrency', self.concurrency))
self.bulk_deleter = Bulk( self.bulk_deleter = Bulk(
app, {}, delete_concurrency=delete_concurrency, logger=self.logger) app, {},
max_deletes_per_request=float('inf'),
delete_concurrency=delete_concurrency,
logger=self.logger)
def handle_multipart_get_or_head(self, req, start_response): def handle_multipart_get_or_head(self, req, start_response):
""" """
@ -1264,9 +1301,7 @@ class StaticLargeObject(object):
resp_dict = {} resp_dict = {}
if heartbeat: if heartbeat:
resp_dict['Response Status'] = err.status resp_dict['Response Status'] = err.status
err_body = err.body err_body = err.body.decode('utf-8')
if six.PY3:
err_body = err_body.decode('utf-8', errors='replace')
resp_dict['Response Body'] = err_body or '\n'.join( resp_dict['Response Body'] = err_body or '\n'.join(
RESPONSE_REASONS.get(err.status_int, [''])) RESPONSE_REASONS.get(err.status_int, ['']))
else: else:
@ -1321,7 +1356,7 @@ class StaticLargeObject(object):
# Ensure container listings have both etags. However, if any # Ensure container listings have both etags. However, if any
# middleware to the left of us touched the base value, trust them. # middleware to the left of us touched the base value, trust them.
override_header = 'X-Object-Sysmeta-Container-Update-Override-Etag' override_header = get_container_update_override_key('etag')
val, sep, params = req.headers.get( val, sep, params = req.headers.get(
override_header, '').partition(';') override_header, '').partition(';')
req.headers[override_header] = '%s; slo_etag=%s' % ( req.headers[override_header] = '%s; slo_etag=%s' % (
@ -1380,7 +1415,13 @@ class StaticLargeObject(object):
'sub_slo': True, 'sub_slo': True,
'name': obj_path}] 'name': obj_path}]
while segments: while segments:
if len(segments) > MAX_BUFFERED_SLO_SEGMENTS: # We chose not to set the limit at max_manifest_segments
# in the case this value was decreased by operators.
# Still it is important to set a limit to avoid this list
# growing too large and causing OOM failures.
# x10 is a best guess as to how much operators would change
# the value of max_manifest_segments.
if len(segments) > self.max_manifest_segments * 10:
raise HTTPBadRequest( raise HTTPBadRequest(
'Too many buffered slo segments to delete.') 'Too many buffered slo segments to delete.')
seg_data = segments.pop(0) seg_data = segments.pop(0)

View File

@ -30,12 +30,20 @@ symlink, the header ``X-Symlink-Target-Account: <account>`` must be included.
If omitted, it is inserted automatically with the account of the symlink If omitted, it is inserted automatically with the account of the symlink
object in the PUT request process. object in the PUT request process.
Symlinks must be zero-byte objects. Attempting to PUT a symlink Symlinks must be zero-byte objects. Attempting to PUT a symlink with a
with a non-empty request body will result in a 400-series error. Also, POST non-empty request body will result in a 400-series error. Also, POST with
with X-Symlink-Target header always results in a 400-series error. The target ``X-Symlink-Target`` header always results in a 400-series error. The target
object need not exist at symlink creation time. It is suggested to set the object need not exist at symlink creation time.
``Content-Type`` of symlink objects to a distinct value such as
``application/symlink``. Clients may optionally include a ``X-Symlink-Target-Etag: <etag>`` header
during the PUT. If present, this will create a "static symlink" instead of a
"dynamic symlink". Static symlinks point to a specific object rather than a
specific name. They do this by using the value set in their
``X-Symlink-Target-Etag`` header when created to verify it still matches the
ETag of the object they're pointing at on a GET. In contrast to a dynamic
symlink the target object referenced in the ``X-Symlink-Target`` header must
exist and its ETag must match the ``X-Symlink-Target-Etag`` or the symlink
creation will return a client error.
A GET/HEAD request to a symlink will result in a request to the target A GET/HEAD request to a symlink will result in a request to the target
object referenced by the symlink's ``X-Symlink-Target-Account`` and object referenced by the symlink's ``X-Symlink-Target-Account`` and
@ -45,12 +53,22 @@ GET/HEAD request to a symlink with the query parameter ``?symlink=get`` will
result in the request targeting the symlink itself. result in the request targeting the symlink itself.
A symlink can point to another symlink. Chained symlinks will be traversed A symlink can point to another symlink. Chained symlinks will be traversed
until target is not a symlink. If the number of chained symlinks exceeds the until the target is not a symlink. If the number of chained symlinks exceeds
limit ``symloop_max`` an error response will be produced. The value of the limit ``symloop_max`` an error response will be produced. The value of
``symloop_max`` can be defined in the symlink config section of ``symloop_max`` can be defined in the symlink config section of
`proxy-server.conf`. If not specified, the default ``symloop_max`` value is 2. `proxy-server.conf`. If not specified, the default ``symloop_max`` value is 2.
If a value less than 1 is specified, the default value will be used. If a value less than 1 is specified, the default value will be used.
If a static symlink (i.e. a symlink created with a ``X-Symlink-Target-Etag``
header) targets another static symlink, both of the ``X-Symlink-Target-Etag``
headers must match the target object for the GET to succeed. If a static
symlink targets a dynamic symlink (i.e. a symlink created without a
``X-Symlink-Target-Etag`` header) then the ``X-Symlink-Target-Etag`` header of
the static symlink must be the Etag of the zero-byte object. If a symlink with
a ``X-Symlink-Target-Etag`` targets a large object manifest it must match the
ETag of the manifest (e.g. the ETag as returned by ``multipart-manifest=get``
or value in the ``X-Manifest-Etag`` header).
A HEAD/GET request to a symlink object behaves as a normal HEAD/GET request A HEAD/GET request to a symlink object behaves as a normal HEAD/GET request
to the target object. Therefore issuing a HEAD request to the symlink will to the target object. Therefore issuing a HEAD request to the symlink will
return the target metadata, and issuing a GET request to the symlink will return the target metadata, and issuing a GET request to the symlink will
@ -58,13 +76,22 @@ return the data and metadata of the target object. To return the symlink
metadata (with its empty body) a GET/HEAD request with the ``?symlink=get`` metadata (with its empty body) a GET/HEAD request with the ``?symlink=get``
query parameter must be sent to a symlink object. query parameter must be sent to a symlink object.
A POST request to a symlink will result in a 307 TemporaryRedirect response. A POST request to a symlink will result in a 307 Temporary Redirect response.
The response will contain a ``Location`` header with the path of the target The response will contain a ``Location`` header with the path of the target
object as the value. The request is never redirected to the target object by object as the value. The request is never redirected to the target object by
Swift. Nevertheless, the metadata in the POST request will be applied to the Swift. Nevertheless, the metadata in the POST request will be applied to the
symlink because object servers cannot know for sure if the current object is a symlink because object servers cannot know for sure if the current object is a
symlink or not in eventual consistency. symlink or not in eventual consistency.
A symlink's ``Content-Type`` is completely independent from its target. As a
convenience Swift will automatically set the ``Content-Type`` on a symlink PUT
if not explicitly set by the client. If the client sends a
``X-Symlink-Target-Etag`` Swift will set the symlink's ``Content-Type`` to that
of the target, otherwise it will be set to ``application/symlink``. You can
review a symlink's ``Content-Type`` using the ``?symlink=get`` interface. You
can change a symlink's ``Content-Type`` using a POST request. The symlink's
``Content-Type`` will appear in the container listing.
A DELETE request to a symlink will delete the symlink itself. The target A DELETE request to a symlink will delete the symlink itself. The target
object will not be deleted. object will not be deleted.
@ -73,7 +100,7 @@ will copy the target object. The same request to a symlink with the query
parameter ``?symlink=get`` will copy the symlink itself. parameter ``?symlink=get`` will copy the symlink itself.
An OPTIONS request to a symlink will respond with the options for the symlink An OPTIONS request to a symlink will respond with the options for the symlink
only, the request will not be redirected to the target object. Please note that only; the request will not be redirected to the target object. Please note that
if the symlink's target object is in another container with CORS settings, the if the symlink's target object is in another container with CORS settings, the
response will not reflect the settings. response will not reflect the settings.
@ -82,7 +109,8 @@ will result in a 400-series error. The GET/HEAD tempurls honor the scope of
the tempurl key. Container tempurl will only work on symlinks where the target the tempurl key. Container tempurl will only work on symlinks where the target
container is the same as the symlink. In case a symlink targets an object container is the same as the symlink. In case a symlink targets an object
in a different container, a GET/HEAD request will result in a 401 Unauthorized in a different container, a GET/HEAD request will result in a 401 Unauthorized
error. The account level tempurl will allow cross container symlinks. error. The account level tempurl will allow cross-container symlinks, but not
cross-account symlinks.
If a symlink object is overwritten while it is in a versioned container, the If a symlink object is overwritten while it is in a versioned container, the
symlink object itself is versioned, not the referenced object. symlink object itself is versioned, not the referenced object.
@ -91,8 +119,19 @@ A GET request with query parameter ``?format=json`` to a container which
contains symlinks will respond with additional information ``symlink_path`` contains symlinks will respond with additional information ``symlink_path``
for each symlink object in the container listing. The ``symlink_path`` value for each symlink object in the container listing. The ``symlink_path`` value
is the target path of the symlink. Clients can differentiate symlinks and is the target path of the symlink. Clients can differentiate symlinks and
other objects by this function. Note that responses of any other format other objects by this function. Note that responses in any other format
(e.g.``?format=xml``) won't include ``symlink_path`` info. (e.g. ``?format=xml``) won't include ``symlink_path`` info. If a
``X-Symlink-Target-Etag`` header was included on the symlink, JSON container
listings will include that value in a ``symlink_etag`` key and the target
object's ``Content-Length`` will be included in the key ``symlink_bytes``.
If a static symlink targets a static large object manifest it will carry
forward the SLO's size and slo_etag in the container listing using the
``symlink_bytes`` and ``slo_etag`` keys. However, manifests created before
swift v2.12.0 (released Dec 2016) do not contain enough metadata to propagate
the extra SLO information to the listing. Clients may recreate the manifest
(COPY w/ ``?multipart-manfiest=get``) before creating a static symlink to add
the requisite metadata.
Errors Errors
@ -105,7 +144,10 @@ Errors
* GET/HEAD traversing more than ``symloop_max`` chained symlinks will * GET/HEAD traversing more than ``symloop_max`` chained symlinks will
produce a 409 Conflict error. produce a 409 Conflict error.
* POSTs will produce a 307 TemporaryRedirect error. * PUT/GET/HEAD on a symlink that inclues a ``X-Symlink-Target-Etag`` header
that does not match the target will poduce a 409 Conflict error.
* POSTs will produce a 307 Temporary Redirect error.
---------- ----------
Deployment Deployment
@ -160,15 +202,15 @@ import os
from cgi import parse_header from cgi import parse_header
from swift.common.utils import get_logger, register_swift_info, split_path, \ from swift.common.utils import get_logger, register_swift_info, split_path, \
MD5_OF_EMPTY_STRING, closing_if_possible MD5_OF_EMPTY_STRING, close_if_possible, closing_if_possible
from swift.common.constraints import check_account_format from swift.common.constraints import check_account_format
from swift.common.wsgi import WSGIContext, make_subrequest from swift.common.wsgi import WSGIContext, make_subrequest
from swift.common.request_helpers import get_sys_meta_prefix, \ from swift.common.request_helpers import get_sys_meta_prefix, \
check_path_header check_path_header, get_container_update_override_key
from swift.common.swob import Request, HTTPBadRequest, HTTPTemporaryRedirect, \ from swift.common.swob import Request, HTTPBadRequest, HTTPTemporaryRedirect, \
HTTPException, HTTPConflict, HTTPPreconditionFailed, wsgi_quote, \ HTTPException, HTTPConflict, HTTPPreconditionFailed, wsgi_quote, \
wsgi_unquote wsgi_unquote
from swift.common.http import is_success from swift.common.http import is_success, HTTP_NOT_FOUND
from swift.common.exceptions import LinkIterError from swift.common.exceptions import LinkIterError
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
@ -176,22 +218,33 @@ DEFAULT_SYMLOOP_MAX = 2
# Header values for symlink target path strings will be quoted values. # Header values for symlink target path strings will be quoted values.
TGT_OBJ_SYMLINK_HDR = 'x-symlink-target' TGT_OBJ_SYMLINK_HDR = 'x-symlink-target'
TGT_ACCT_SYMLINK_HDR = 'x-symlink-target-account' TGT_ACCT_SYMLINK_HDR = 'x-symlink-target-account'
TGT_ETAG_SYMLINK_HDR = 'x-symlink-target-etag'
TGT_BYTES_SYMLINK_HDR = 'x-symlink-target-bytes'
TGT_OBJ_SYSMETA_SYMLINK_HDR = get_sys_meta_prefix('object') + 'symlink-target' TGT_OBJ_SYSMETA_SYMLINK_HDR = get_sys_meta_prefix('object') + 'symlink-target'
TGT_ACCT_SYSMETA_SYMLINK_HDR = \ TGT_ACCT_SYSMETA_SYMLINK_HDR = \
get_sys_meta_prefix('object') + 'symlink-target-account' get_sys_meta_prefix('object') + 'symlink-target-account'
TGT_ETAG_SYSMETA_SYMLINK_HDR = \
get_sys_meta_prefix('object') + 'symlink-target-etag'
TGT_BYTES_SYSMETA_SYMLINK_HDR = \
get_sys_meta_prefix('object') + 'symlink-target-bytes'
def _check_symlink_header(req): def _validate_and_prep_request_headers(req):
""" """
Validate that the value from x-symlink-target header is Validate that the value from x-symlink-target header is well formatted
well formatted. We assume the caller ensures that and that the x-symlink-target-etag header (if present) does not contain
problematic characters. We assume the caller ensures that
x-symlink-target header is present in req.headers. x-symlink-target header is present in req.headers.
:param req: HTTP request object :param req: HTTP request object
:returns: a tuple, the full versioned path to the object (as a WSGI string)
and the X-Symlink-Target-Etag header value which may be None
:raise: HTTPPreconditionFailed if x-symlink-target value :raise: HTTPPreconditionFailed if x-symlink-target value
is not well formatted. is not well formatted.
:raise: HTTPBadRequest if the x-symlink-target value points to the request :raise: HTTPBadRequest if the x-symlink-target value points to the request
path. path.
:raise: HTTPBadRequest if the x-symlink-target-etag value contains
a semicolon, double-quote, or backslash.
""" """
# N.B. check_path_header doesn't assert the leading slash and # N.B. check_path_header doesn't assert the leading slash and
# copy middleware may accept the format. In the symlink, API # copy middleware may accept the format. In the symlink, API
@ -228,43 +281,48 @@ def _check_symlink_header(req):
raise HTTPBadRequest( raise HTTPBadRequest(
body='Symlink cannot target itself', body='Symlink cannot target itself',
request=req, content_type='text/plain') request=req, content_type='text/plain')
etag = req.headers.get(TGT_ETAG_SYMLINK_HDR, None)
if etag and any(c in etag for c in ';"\\'):
# See cgi.parse_header for why the above chars are problematic
raise HTTPBadRequest(
body='Bad %s format' % TGT_ETAG_SYMLINK_HDR.title(),
request=req, content_type='text/plain')
if not (etag or req.headers.get('Content-Type')):
req.headers['Content-Type'] = 'application/symlink'
return '/v1/%s/%s/%s' % (account, container, obj), etag
def symlink_usermeta_to_sysmeta(headers): def symlink_usermeta_to_sysmeta(headers):
""" """
Helper function to translate from X-Symlink-Target and Helper function to translate from client-facing X-Symlink-* headers
X-Symlink-Target-Account to X-Object-Sysmeta-Symlink-Target to cluster-facing X-Object-Sysmeta-Symlink-* headers.
and X-Object-Sysmeta-Symlink-Target-Account.
:param headers: request headers dict. Note that the headers dict :param headers: request headers dict. Note that the headers dict
will be updated directly. will be updated directly.
""" """
# To preseve url-encoded value in the symlink header, use raw value # To preseve url-encoded value in the symlink header, use raw value
if TGT_OBJ_SYMLINK_HDR in headers: for user_hdr, sysmeta_hdr in (
headers[TGT_OBJ_SYSMETA_SYMLINK_HDR] = headers.pop( (TGT_OBJ_SYMLINK_HDR, TGT_OBJ_SYSMETA_SYMLINK_HDR),
TGT_OBJ_SYMLINK_HDR) (TGT_ACCT_SYMLINK_HDR, TGT_ACCT_SYSMETA_SYMLINK_HDR)):
if user_hdr in headers:
if TGT_ACCT_SYMLINK_HDR in headers: headers[sysmeta_hdr] = headers.pop(user_hdr)
headers[TGT_ACCT_SYSMETA_SYMLINK_HDR] = headers.pop(
TGT_ACCT_SYMLINK_HDR)
def symlink_sysmeta_to_usermeta(headers): def symlink_sysmeta_to_usermeta(headers):
""" """
Helper function to translate from X-Object-Sysmeta-Symlink-Target and Helper function to translate from cluster-facing
X-Object-Sysmeta-Symlink-Target-Account to X-Symlink-Target and X-Object-Sysmeta-Symlink-* headers to client-facing X-Symlink-* headers.
X-Sysmeta-Symlink-Target-Account
:param headers: request headers dict. Note that the headers dict :param headers: request headers dict. Note that the headers dict
will be updated directly. will be updated directly.
""" """
if TGT_OBJ_SYSMETA_SYMLINK_HDR in headers: for user_hdr, sysmeta_hdr in (
headers[TGT_OBJ_SYMLINK_HDR] = headers.pop( (TGT_OBJ_SYMLINK_HDR, TGT_OBJ_SYSMETA_SYMLINK_HDR),
TGT_OBJ_SYSMETA_SYMLINK_HDR) (TGT_ACCT_SYMLINK_HDR, TGT_ACCT_SYSMETA_SYMLINK_HDR),
(TGT_ETAG_SYMLINK_HDR, TGT_ETAG_SYSMETA_SYMLINK_HDR),
if TGT_ACCT_SYSMETA_SYMLINK_HDR in headers: (TGT_BYTES_SYMLINK_HDR, TGT_BYTES_SYSMETA_SYMLINK_HDR)):
headers[TGT_ACCT_SYMLINK_HDR] = headers.pop( if sysmeta_hdr in headers:
TGT_ACCT_SYSMETA_SYMLINK_HDR) headers[user_hdr] = headers.pop(sysmeta_hdr)
class SymlinkContainerContext(WSGIContext): class SymlinkContainerContext(WSGIContext):
@ -308,9 +366,10 @@ class SymlinkContainerContext(WSGIContext):
def _extract_symlink_path_json(self, obj_dict, swift_version, account): def _extract_symlink_path_json(self, obj_dict, swift_version, account):
""" """
Extract the symlink path from the hash value Extract the symlink info from the hash value
:return: object dictionary with additional key:value pair if object :return: object dictionary with additional key:value pairs when object
is a symlink. The new key is symlink_path. is a symlink. i.e. new symlink_path, symlink_etag and
symlink_bytes keys
""" """
if 'hash' in obj_dict: if 'hash' in obj_dict:
hash_value, meta = parse_header(obj_dict['hash']) hash_value, meta = parse_header(obj_dict['hash'])
@ -321,6 +380,10 @@ class SymlinkContainerContext(WSGIContext):
target = meta[key] target = meta[key]
elif key == 'symlink_target_account': elif key == 'symlink_target_account':
account = meta[key] account = meta[key]
elif key == 'symlink_target_etag':
obj_dict['symlink_etag'] = meta[key]
elif key == 'symlink_target_bytes':
obj_dict['symlink_bytes'] = int(meta[key])
else: else:
# make sure to add all other (key, values) back in place # make sure to add all other (key, values) back in place
obj_dict['hash'] += '; %s=%s' % (key, meta[key]) obj_dict['hash'] += '; %s=%s' % (key, meta[key])
@ -370,10 +433,11 @@ class SymlinkObjectContext(WSGIContext):
except LinkIterError: except LinkIterError:
errmsg = 'Too many levels of symbolic links, ' \ errmsg = 'Too many levels of symbolic links, ' \
'maximum allowed is %d' % self.symloop_max 'maximum allowed is %d' % self.symloop_max
raise HTTPConflict( raise HTTPConflict(body=errmsg, request=req,
body=errmsg, request=req, content_type='text/plain') content_type='text/plain')
def _recursive_get_head(self, req): def _recursive_get_head(self, req, target_etag=None,
follow_softlinks=True):
resp = self._app_call(req.environ) resp = self._app_call(req.environ)
def build_traversal_req(symlink_target): def build_traversal_req(symlink_target):
@ -396,14 +460,35 @@ class SymlinkObjectContext(WSGIContext):
symlink_target = self._response_header_value( symlink_target = self._response_header_value(
TGT_OBJ_SYSMETA_SYMLINK_HDR) TGT_OBJ_SYSMETA_SYMLINK_HDR)
if symlink_target: resp_etag = self._response_header_value(
TGT_ETAG_SYSMETA_SYMLINK_HDR)
if symlink_target and (resp_etag or follow_softlinks):
close_if_possible(resp)
found_etag = resp_etag or self._response_header_value('etag')
if target_etag and target_etag != found_etag:
raise HTTPConflict(
body='X-Symlink-Target-Etag headers do not match',
headers={
'Content-Type': 'text/plain',
'Content-Location': self._last_target_path})
if self._loop_count >= self.symloop_max: if self._loop_count >= self.symloop_max:
raise LinkIterError() raise LinkIterError()
# format: /<account name>/<container name>/<object name> # format: /<account name>/<container name>/<object name>
new_req = build_traversal_req(symlink_target) new_req = build_traversal_req(symlink_target)
self._loop_count += 1 self._loop_count += 1
return self._recursive_get_head(new_req) return self._recursive_get_head(new_req, target_etag=resp_etag)
else: else:
final_etag = self._response_header_value('etag')
if final_etag and target_etag and target_etag != final_etag:
close_if_possible(resp)
body = ('Object Etag %r does not match '
'X-Symlink-Target-Etag header %r')
raise HTTPConflict(
body=body % (final_etag, target_etag),
headers={
'Content-Type': 'text/plain',
'Content-Location': self._last_target_path})
if self._last_target_path: if self._last_target_path:
# Content-Location will be applied only when one or more # Content-Location will be applied only when one or more
# symlink recursion occurred. # symlink recursion occurred.
@ -417,6 +502,47 @@ class SymlinkObjectContext(WSGIContext):
return resp return resp
def _validate_etag_and_update_sysmeta(self, req, symlink_target_path,
etag):
# next we'll make sure the E-Tag matches a real object
new_req = make_subrequest(
req.environ, path=wsgi_quote(symlink_target_path), method='HEAD',
swift_source='SYM')
self._last_target_path = symlink_target_path
resp = self._recursive_get_head(new_req, target_etag=etag,
follow_softlinks=False)
if self._get_status_int() == HTTP_NOT_FOUND:
raise HTTPConflict(
body='X-Symlink-Target does not exist',
headers={
'Content-Type': 'text/plain',
'Content-Location': self._last_target_path})
if not is_success(self._get_status_int()):
return resp
response_headers = HeaderKeyDict(self._response_headers)
# carry forward any etag update params (e.g. "slo_etag"), we'll append
# symlink_target_* params to this header after this method returns
override_header = get_container_update_override_key('etag')
if override_header in response_headers and \
override_header not in req.headers:
sep, params = response_headers[override_header].partition(';')[1:]
req.headers[override_header] = MD5_OF_EMPTY_STRING + sep + params
# It's troublesome that there's so much leakage with SLO
if 'X-Object-Sysmeta-Slo-Etag' in response_headers and \
override_header not in req.headers:
req.headers[override_header] = '%s; slo_etag=%s' % (
MD5_OF_EMPTY_STRING,
response_headers['X-Object-Sysmeta-Slo-Etag'])
req.headers[TGT_BYTES_SYSMETA_SYMLINK_HDR] = (
response_headers.get('x-object-sysmeta-slo-size') or
response_headers['Content-Length'])
req.headers[TGT_ETAG_SYSMETA_SYMLINK_HDR] = etag
if not req.headers.get('Content-Type'):
req.headers['Content-Type'] = response_headers['Content-Type']
def handle_put(self, req): def handle_put(self, req):
""" """
Handle put request when it contains X-Symlink-Target header. Handle put request when it contains X-Symlink-Target header.
@ -425,13 +551,23 @@ class SymlinkObjectContext(WSGIContext):
:param req: HTTP PUT object request :param req: HTTP PUT object request
:returns: Response Iterator :returns: Response Iterator
""" """
if req.content_length != 0: if req.content_length is None:
has_body = (req.body_file.read(1) != b'')
else:
has_body = (req.content_length != 0)
if has_body:
raise HTTPBadRequest( raise HTTPBadRequest(
body='Symlink requests require a zero byte body', body='Symlink requests require a zero byte body',
request=req, request=req,
content_type='text/plain') content_type='text/plain')
_check_symlink_header(req) symlink_target_path, etag = _validate_and_prep_request_headers(req)
if etag:
resp = self._validate_etag_and_update_sysmeta(
req, symlink_target_path, etag)
if resp is not None:
return resp
# N.B. TGT_ETAG_SYMLINK_HDR was converted as part of verifying it
symlink_usermeta_to_sysmeta(req.headers) symlink_usermeta_to_sysmeta(req.headers)
# Store info in container update that this object is a symlink. # Store info in container update that this object is a symlink.
# We have a design decision to use etag space to store symlink info for # We have a design decision to use etag space to store symlink info for
@ -441,17 +577,31 @@ class SymlinkObjectContext(WSGIContext):
# listing result for clients. # listing result for clients.
# To create override etag easily, we have a constraint that the symlink # To create override etag easily, we have a constraint that the symlink
# must be 0 byte so we can add etag of the empty string + symlink info # must be 0 byte so we can add etag of the empty string + symlink info
# here, simply. Note that this override etag may be encrypted in the # here, simply (if no other override etag was provided). Note that this
# container db by encryption middleware. # override etag may be encrypted in the container db by encryption
# middleware.
etag_override = [ etag_override = [
MD5_OF_EMPTY_STRING, req.headers.get(get_container_update_override_key('etag'),
MD5_OF_EMPTY_STRING),
'symlink_target=%s' % req.headers[TGT_OBJ_SYSMETA_SYMLINK_HDR] 'symlink_target=%s' % req.headers[TGT_OBJ_SYSMETA_SYMLINK_HDR]
] ]
if TGT_ACCT_SYSMETA_SYMLINK_HDR in req.headers: if TGT_ACCT_SYSMETA_SYMLINK_HDR in req.headers:
etag_override.append( etag_override.append(
'symlink_target_account=%s' % 'symlink_target_account=%s' %
req.headers[TGT_ACCT_SYSMETA_SYMLINK_HDR]) req.headers[TGT_ACCT_SYSMETA_SYMLINK_HDR])
req.headers['X-Object-Sysmeta-Container-Update-Override-Etag'] = \ if TGT_ETAG_SYSMETA_SYMLINK_HDR in req.headers:
# if _validate_etag_and_update_sysmeta or a middleware sets
# TGT_ETAG_SYSMETA_SYMLINK_HDR then they need to also set
# TGT_BYTES_SYSMETA_SYMLINK_HDR. If they forget, they get a
# KeyError traceback and client gets a ServerError
etag_override.extend([
'symlink_target_etag=%s' %
req.headers[TGT_ETAG_SYSMETA_SYMLINK_HDR],
'symlink_target_bytes=%s' %
req.headers[TGT_BYTES_SYSMETA_SYMLINK_HDR],
])
req.headers[get_container_update_override_key('etag')] = \
'; '.join(etag_override) '; '.join(etag_override)
return self._app_call(req.environ) return self._app_call(req.environ)
@ -491,11 +641,16 @@ class SymlinkObjectContext(WSGIContext):
TGT_ACCT_SYSMETA_SYMLINK_HDR) or wsgi_quote(account) TGT_ACCT_SYSMETA_SYMLINK_HDR) or wsgi_quote(account)
location_hdr = os.path.join( location_hdr = os.path.join(
'/', version, target_acc, tgt_co) '/', version, target_acc, tgt_co)
headers = {'location': location_hdr}
tgt_etag = self._response_header_value(
TGT_ETAG_SYSMETA_SYMLINK_HDR)
if tgt_etag:
headers[TGT_ETAG_SYMLINK_HDR] = tgt_etag
req.environ['swift.leave_relative_location'] = True req.environ['swift.leave_relative_location'] = True
errmsg = 'The requested POST was applied to a symlink. POST ' +\ errmsg = 'The requested POST was applied to a symlink. POST ' +\
'directly to the target to apply requested metadata.' 'directly to the target to apply requested metadata.'
raise HTTPTemporaryRedirect( raise HTTPTemporaryRedirect(
body=errmsg, headers={'location': location_hdr}) body=errmsg, headers=headers)
else: else:
return resp return resp
@ -508,10 +663,7 @@ class SymlinkObjectContext(WSGIContext):
:returns: Response Iterator after start_response has been called :returns: Response Iterator after start_response has been called
""" """
if req.method in ('GET', 'HEAD'): if req.method in ('GET', 'HEAD'):
# if GET request came from versioned writes, then it should get if req.params.get('symlink') == 'get':
# the symlink only, not the referenced target
if req.params.get('symlink') == 'get' or \
req.environ.get('swift.source') == 'VW':
resp = self.handle_get_head_symlink(req) resp = self.handle_get_head_symlink(req)
else: else:
resp = self.handle_get_head(req) resp = self.handle_get_head(req)
@ -578,7 +730,7 @@ def filter_factory(global_conf, **local_conf):
symloop_max = int(conf.get('symloop_max', DEFAULT_SYMLOOP_MAX)) symloop_max = int(conf.get('symloop_max', DEFAULT_SYMLOOP_MAX))
if symloop_max < 1: if symloop_max < 1:
symloop_max = int(DEFAULT_SYMLOOP_MAX) symloop_max = int(DEFAULT_SYMLOOP_MAX)
register_swift_info('symlink', symloop_max=symloop_max) register_swift_info('symlink', symloop_max=symloop_max, static_links=True)
def symlink_mw(app): def symlink_mw(app):
return SymlinkMiddleware(app, conf, symloop_max) return SymlinkMiddleware(app, conf, symloop_max)

View File

@ -371,7 +371,7 @@ class VersionedWritesContext(WSGIContext):
# to container, but not READ. This was allowed in previous version # to container, but not READ. This was allowed in previous version
# (i.e., before middleware) so keeping the same behavior here # (i.e., before middleware) so keeping the same behavior here
get_req = make_pre_authed_request( get_req = make_pre_authed_request(
req.environ, path=wsgi_quote(path_info), req.environ, path=wsgi_quote(path_info) + '?symlink=get',
headers={'X-Newest': 'True'}, method='GET', swift_source='VW') headers={'X-Newest': 'True'}, method='GET', swift_source='VW')
source_resp = get_req.get_response(self.app) source_resp = get_req.get_response(self.app)

View File

@ -44,6 +44,8 @@ from swift.common.wsgi import make_subrequest
OBJECT_TRANSIENT_SYSMETA_PREFIX = 'x-object-transient-sysmeta-' OBJECT_TRANSIENT_SYSMETA_PREFIX = 'x-object-transient-sysmeta-'
OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX = \
'x-object-sysmeta-container-update-override-'
def get_param(req, name, default=None): def get_param(req, name, default=None):
@ -260,6 +262,17 @@ def get_object_transient_sysmeta(key):
return '%s%s' % (OBJECT_TRANSIENT_SYSMETA_PREFIX, key) return '%s%s' % (OBJECT_TRANSIENT_SYSMETA_PREFIX, key)
def get_container_update_override_key(key):
"""
Returns the full X-Object-Sysmeta-Container-Update-Override-* header key.
:param key: the key you want to override in the container update
:returns: the full header key
"""
header = '%s%s' % (OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX, key)
return header.title()
def remove_items(headers, condition): def remove_items(headers, condition):
""" """
Removes items from a dict whose keys satisfy Removes items from a dict whose keys satisfy

View File

@ -3254,6 +3254,9 @@ class GreenAsyncPileWaitallTimeout(Timeout):
pass pass
DEAD = object()
class GreenAsyncPile(object): class GreenAsyncPile(object):
""" """
Runs jobs in a pool of green threads, and the results can be retrieved by Runs jobs in a pool of green threads, and the results can be retrieved by
@ -3282,6 +3285,8 @@ class GreenAsyncPile(object):
def _run_func(self, func, args, kwargs): def _run_func(self, func, args, kwargs):
try: try:
self._responses.put(func(*args, **kwargs)) self._responses.put(func(*args, **kwargs))
except Exception:
self._responses.put(DEAD)
finally: finally:
self._inflight -= 1 self._inflight -= 1
@ -3332,6 +3337,7 @@ class GreenAsyncPile(object):
return self return self
def next(self): def next(self):
while True:
try: try:
rv = self._responses.get_nowait() rv = self._responses.get_nowait()
except eventlet.queue.Empty: except eventlet.queue.Empty:
@ -3339,6 +3345,8 @@ class GreenAsyncPile(object):
raise StopIteration() raise StopIteration()
rv = self._responses.get() rv = self._responses.get()
self._pending -= 1 self._pending -= 1
if rv is DEAD:
continue
return rv return rv
__next__ = next __next__ = next

View File

@ -619,7 +619,7 @@ def run_server(conf, logger, sock, global_conf=None):
try: try:
wsgi.server(sock, app, wsgi_logger, **server_kwargs) wsgi.server(sock, app, wsgi_logger, **server_kwargs)
except socket.error as err: except socket.error as err:
if err[0] != errno.EINVAL: if err.errno != errno.EINVAL:
raise raise
pool.waitall() pool.waitall()

View File

@ -138,7 +138,7 @@ class ContainerReplicator(db_replicator.Replicator):
def _fetch_and_merge_shard_ranges(self, http, broker): def _fetch_and_merge_shard_ranges(self, http, broker):
with Timeout(self.node_timeout): with Timeout(self.node_timeout):
response = http.replicate('get_shard_ranges') response = http.replicate('get_shard_ranges')
if is_success(response.status): if response and is_success(response.status):
broker.merge_shard_ranges(json.loads( broker.merge_shard_ranges(json.loads(
response.data.decode('ascii'))) response.data.decode('ascii')))

View File

@ -415,12 +415,11 @@ class ContainerController(BaseStorageServer):
return created return created
def _update_metadata(self, req, broker, req_timestamp, method): def _update_metadata(self, req, broker, req_timestamp, method):
metadata = {} metadata = {
metadata.update( wsgi_to_str(key): (wsgi_to_str(value), req_timestamp.internal)
(wsgi_to_str(key), (wsgi_to_str(value), req_timestamp.internal))
for key, value in req.headers.items() for key, value in req.headers.items()
if key.lower() in self.save_headers or if key.lower() in self.save_headers
is_sys_or_user_meta('container', key)) or is_sys_or_user_meta('container', key)}
if metadata: if metadata:
if 'X-Container-Sync-To' in metadata: if 'X-Container-Sync-To' in metadata:
if 'X-Container-Sync-To' not in broker.metadata or \ if 'X-Container-Sync-To' not in broker.metadata or \
@ -706,7 +705,7 @@ class ContainerController(BaseStorageServer):
def create_listing(self, req, out_content_type, info, resp_headers, def create_listing(self, req, out_content_type, info, resp_headers,
metadata, container_list, container): metadata, container_list, container):
for key, (value, timestamp) in metadata.items(): for key, (value, _timestamp) in metadata.items():
if value and (key.lower() in self.save_headers or if value and (key.lower() in self.save_headers or
is_sys_or_user_meta('container', key)): is_sys_or_user_meta('container', key)):
resp_headers[str_to_wsgi(key)] = str_to_wsgi(value) resp_headers[str_to_wsgi(key)] = str_to_wsgi(value)

View File

@ -400,6 +400,7 @@ class ObjectReconstructor(Daemon):
path, headers, full_get_path) path, headers, full_get_path)
buckets = defaultdict(dict) buckets = defaultdict(dict)
durable_buckets = {}
etag_buckets = {} etag_buckets = {}
error_resp_count = 0 error_resp_count = 0
for resp in pile: for resp in pile:
@ -443,6 +444,10 @@ class ObjectReconstructor(Daemon):
continue continue
timestamp = Timestamp(timestamp) timestamp = Timestamp(timestamp)
durable = resp.headers.get('X-Backend-Durable-Timestamp')
if durable:
durable_buckets[Timestamp(durable)] = True
etag = resp.headers.get('X-Object-Sysmeta-Ec-Etag') etag = resp.headers.get('X-Object-Sysmeta-Ec-Etag')
if not etag: if not etag:
self.logger.warning('Invalid resp from %s, frag index %s ' self.logger.warning('Invalid resp from %s, frag index %s '
@ -468,26 +473,29 @@ class ObjectReconstructor(Daemon):
% (fi_to_rebuild, list(buckets[timestamp]))) % (fi_to_rebuild, list(buckets[timestamp])))
break break
else: else:
path = _full_path(node, job['partition'],
datafile_metadata['name'],
job['policy'])
for timestamp, resp in sorted(buckets.items()): for timestamp, resp in sorted(buckets.items()):
etag = etag_buckets[timestamp] etag = etag_buckets[timestamp]
durable = durable_buckets.get(timestamp)
self.logger.error( self.logger.error(
'Unable to get enough responses (%s/%s) ' 'Unable to get enough responses (%s/%s) to reconstruct '
'to reconstruct %s frag#%s with ETag %s' % ( '%s %s frag#%s with ETag %s and timestamp %s' % (
len(resp), job['policy'].ec_ndata, len(resp), job['policy'].ec_ndata,
_full_path(node, job['partition'], 'durable' if durable else 'non-durable',
datafile_metadata['name'], path, fi_to_rebuild, etag, timestamp.internal))
job['policy']),
fi_to_rebuild, etag))
if error_resp_count: if error_resp_count:
durable = durable_buckets.get(Timestamp(
datafile_metadata['X-Timestamp']))
self.logger.error( self.logger.error(
'Unable to get enough responses (%s error responses) ' 'Unable to get enough responses (%s error responses) '
'to reconstruct %s frag#%s' % ( 'to reconstruct %s %s frag#%s' % (
error_resp_count, error_resp_count,
_full_path(node, job['partition'], 'durable' if durable else 'non-durable',
datafile_metadata['name'], path, fi_to_rebuild))
job['policy']),
fi_to_rebuild))
raise DiskFileError('Unable to reconstruct EC archive') raise DiskFileError('Unable to reconstruct EC archive')

View File

@ -393,7 +393,7 @@ class ObjectReplicator(Daemon):
return 1 # failure response code return 1 # failure response code
total_time = time.time() - start_time total_time = time.time() - start_time
for result in results.split('\n'): for result in results.decode('utf8').split('\n'):
if result == '': if result == '':
continue continue
if result.startswith('cd+'): if result.startswith('cd+'):

View File

@ -43,6 +43,8 @@ from swift.common.exceptions import ConnectionTimeout, DiskFileQuarantined, \
DiskFileNotExist, DiskFileCollision, DiskFileNoSpace, DiskFileDeleted, \ DiskFileNotExist, DiskFileCollision, DiskFileNoSpace, DiskFileDeleted, \
DiskFileDeviceUnavailable, DiskFileExpired, ChunkReadTimeout, \ DiskFileDeviceUnavailable, DiskFileExpired, ChunkReadTimeout, \
ChunkReadError, DiskFileXattrNotSupported ChunkReadError, DiskFileXattrNotSupported
from swift.common.request_helpers import \
OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX
from swift.obj import ssync_receiver from swift.obj import ssync_receiver
from swift.common.http import is_success, HTTP_MOVED_PERMANENTLY from swift.common.http import is_success, HTTP_MOVED_PERMANENTLY
from swift.common.base_storage_server import BaseStorageServer from swift.common.base_storage_server import BaseStorageServer
@ -583,7 +585,7 @@ class ObjectController(BaseStorageServer):
# x-object-sysmeta-container-update-override-* headers take precedence # x-object-sysmeta-container-update-override-* headers take precedence
# over x-backend-container-update-override-* headers # over x-backend-container-update-override-* headers
override_prefixes = ['x-backend-container-update-override-', override_prefixes = ['x-backend-container-update-override-',
'x-object-sysmeta-container-update-override-'] OBJECT_SYSMETA_CONTAINER_UPDATE_OVERRIDE_PREFIX]
for override_prefix in override_prefixes: for override_prefix in override_prefixes:
for key, val in metadata.items(): for key, val in metadata.items():
if key.lower().startswith(override_prefix): if key.lower().startswith(override_prefix):

View File

@ -130,17 +130,7 @@ class Receiver(object):
# raised during processing because otherwise the sender could send for # raised during processing because otherwise the sender could send for
# quite some time before realizing it was all in vain. # quite some time before realizing it was all in vain.
self.disconnect = True self.disconnect = True
try:
self.initialize_request() self.initialize_request()
except swob.HTTPException:
# Old (pre-0.18.0) eventlet would try to drain the request body
# in a way that's prone to blowing up when the client has
# disconnected. Trick it into skipping that so we don't trip
# ValueError: invalid literal for int() with base 16
# in tests. Note we disconnect shortly after receiving a non-200
# response in the sender code, so this is not *so* crazy to do.
request.environ['wsgi.input'].chunked_input = False
raise
def __call__(self): def __call__(self):
""" """

View File

@ -1282,10 +1282,10 @@ class ResumingGetter(object):
if not self.newest: # one good source is enough if not self.newest: # one good source is enough
return True return True
else: else:
if self.server_type != 'Object' and 'handoff_index' in node and \ if 'handoff_index' in node and \
possible_source.status == HTTP_NOT_FOUND and \ possible_source.status == HTTP_NOT_FOUND and \
not Timestamp(src_headers.get('x-backend-timestamp', 0)): not Timestamp(src_headers.get('x-backend-timestamp', 0)):
# throw out 404s from handoff nodes unless the db is really # throw out 404s from handoff nodes unless the data is really
# on disk and had been DELETEd # on disk and had been DELETEd
return False return False
self.statuses.append(possible_source.status) self.statuses.append(possible_source.status)

View File

@ -38,7 +38,7 @@ from hashlib import md5
from swift import gettext_ as _ from swift import gettext_ as _
from greenlet import GreenletExit from greenlet import GreenletExit
from eventlet import GreenPile from eventlet import GreenPile, sleep
from eventlet.queue import Queue from eventlet.queue import Queue
from eventlet.timeout import Timeout from eventlet.timeout import Timeout
@ -1066,6 +1066,7 @@ class ECAppIter(object):
# executing the internal_parts_iters. # executing the internal_parts_iters.
if self.stashed_iter: if self.stashed_iter:
self.stashed_iter.close() self.stashed_iter.close()
sleep() # Give the per-frag threads a chance to clean up
for it in self.internal_parts_iters: for it in self.internal_parts_iters:
close_if_possible(it) close_if_possible(it)
@ -2002,6 +2003,10 @@ class ECGetResponseBucket(object):
def set_durable(self): def set_durable(self):
self._durable = True self._durable = True
@property
def durable(self):
return self._durable
def add_response(self, getter, parts_iter): def add_response(self, getter, parts_iter):
if not self.gets: if not self.gets:
self.status = getter.last_status self.status = getter.last_status
@ -2017,7 +2022,8 @@ class ECGetResponseBucket(object):
# metadata headers for self.headers by selecting the source with # metadata headers for self.headers by selecting the source with
# the latest X-Timestamp. # the latest X-Timestamp.
self.headers = getter.last_headers self.headers = getter.last_headers
elif (getter.last_headers.get('X-Object-Sysmeta-Ec-Etag') != elif (self.timestamp_str is not None and # ie, not bad_bucket
getter.last_headers.get('X-Object-Sysmeta-Ec-Etag') !=
self.headers.get('X-Object-Sysmeta-Ec-Etag')): self.headers.get('X-Object-Sysmeta-Ec-Etag')):
# Fragments at the same timestamp with different etags are never # Fragments at the same timestamp with different etags are never
# expected. If somehow it happens then ignore those fragments # expected. If somehow it happens then ignore those fragments
@ -2054,9 +2060,8 @@ class ECGetResponseBucket(object):
@property @property
def shortfall(self): def shortfall(self):
# A non-durable bucket always has a shortfall of at least 1
result = self.policy.ec_ndata - len(self.get_responses()) result = self.policy.ec_ndata - len(self.get_responses())
return max(result, 0 if self._durable else 1) return max(result, 0)
@property @property
def shortfall_with_alts(self): def shortfall_with_alts(self):
@ -2064,7 +2069,7 @@ class ECGetResponseBucket(object):
# for frags on the alt nodes. # for frags on the alt nodes.
alts = set(self.alt_nodes.keys()).difference(set(self.gets.keys())) alts = set(self.alt_nodes.keys()).difference(set(self.gets.keys()))
result = self.policy.ec_ndata - (len(self.get_responses()) + len(alts)) result = self.policy.ec_ndata - (len(self.get_responses()) + len(alts))
return max(result, 0 if self._durable else 1) return max(result, 0)
def __str__(self): def __str__(self):
# return a string summarising bucket state, useful for debugging. # return a string summarising bucket state, useful for debugging.
@ -2141,12 +2146,14 @@ class ECGetResponseCollection(object):
def _sort_buckets(self): def _sort_buckets(self):
def key_fn(bucket): def key_fn(bucket):
# Returns a tuple to use for sort ordering: # Returns a tuple to use for sort ordering:
# buckets with no shortfall sort higher, # durable buckets with no shortfall sort higher,
# then durable buckets with no shortfall_with_alts,
# then non-durable buckets with no shortfall,
# otherwise buckets with lowest shortfall_with_alts sort higher, # otherwise buckets with lowest shortfall_with_alts sort higher,
# finally buckets with newer timestamps sort higher. # finally buckets with newer timestamps sort higher.
return (bucket.shortfall <= 0, return (bucket.durable,
(not (bucket.shortfall <= 0) and bucket.shortfall <= 0,
(-1 * bucket.shortfall_with_alts)), -1 * bucket.shortfall_with_alts,
bucket.timestamp_str) bucket.timestamp_str)
return sorted(self.buckets.values(), key=key_fn, reverse=True) return sorted(self.buckets.values(), key=key_fn, reverse=True)
@ -2196,7 +2203,7 @@ class ECGetResponseCollection(object):
return None return None
bucket = self.best_bucket bucket = self.best_bucket
if (bucket is None) or (bucket.shortfall <= 0): if (bucket is None) or (bucket.shortfall <= 0) or not bucket.durable:
return None return None
alt_frags = set(bucket.alt_nodes.keys()) alt_frags = set(bucket.alt_nodes.keys())
@ -2381,7 +2388,11 @@ class ECObjectController(BaseObjectController):
shortfall = bad_bucket.shortfall shortfall = bad_bucket.shortfall
best_bucket = buckets.best_bucket best_bucket = buckets.best_bucket
if best_bucket: if best_bucket:
shortfall = min(best_bucket.shortfall, shortfall) shortfall = best_bucket.shortfall
if not best_bucket.durable and shortfall <= 0:
# be willing to go a *little* deeper, slowly
shortfall = 1
shortfall = min(shortfall, bad_bucket.shortfall)
if (extra_requests < max_extra_requests and if (extra_requests < max_extra_requests and
shortfall > pile._pending and shortfall > pile._pending and
(node_iter.nodes_left > 0 or (node_iter.nodes_left > 0 or
@ -2395,7 +2406,7 @@ class ECObjectController(BaseObjectController):
buckets.get_extra_headers) buckets.get_extra_headers)
req.range = orig_range req.range = orig_range
if best_bucket and best_bucket.shortfall <= 0: if best_bucket and best_bucket.shortfall <= 0 and best_bucket.durable:
# headers can come from any of the getters # headers can come from any of the getters
resp_headers = best_bucket.headers resp_headers = best_bucket.headers
resp_headers.pop('Content-Range', None) resp_headers.pop('Content-Range', None)
@ -2435,10 +2446,28 @@ class ECObjectController(BaseObjectController):
bodies = [] bodies = []
headers = [] headers = []
for getter, _parts_iter in bad_bucket.get_responses(): for getter, _parts_iter in bad_bucket.get_responses():
if best_bucket and best_bucket.durable:
headers = HeaderKeyDict(getter.last_headers)
t_data_file = headers.get('X-Backend-Data-Timestamp')
t_obj = headers.get('X-Backend-Timestamp',
headers.get('X-Timestamp'))
bad_ts = Timestamp(t_data_file or t_obj or '0')
if bad_ts <= Timestamp(best_bucket.timestamp_str):
# We have reason to believe there's still good data
# out there, it's just currently unavailable
continue
statuses.extend(getter.statuses) statuses.extend(getter.statuses)
reasons.extend(getter.reasons) reasons.extend(getter.reasons)
bodies.extend(getter.bodies) bodies.extend(getter.bodies)
headers.extend(getter.source_headers) headers.extend(getter.source_headers)
if not statuses and best_bucket and not best_bucket.durable:
# pretend that non-durable bucket was 404s
statuses.append(404)
reasons.append('404 Not Found')
bodies.append(b'')
headers.append({})
resp = self.best_response( resp = self.best_response(
req, statuses, reasons, bodies, 'Object', req, statuses, reasons, bodies, 'Object',
headers=headers) headers=headers)

View File

@ -16,6 +16,7 @@
from __future__ import print_function from __future__ import print_function
import mock import mock
import os import os
import six
from six.moves.urllib.parse import urlparse, urlsplit, urlunsplit from six.moves.urllib.parse import urlparse, urlsplit, urlunsplit
import sys import sys
import pickle import pickle
@ -25,6 +26,7 @@ import eventlet
import eventlet.debug import eventlet.debug
import functools import functools
import random import random
import base64
from time import time, sleep from time import time, sleep
from contextlib import closing from contextlib import closing
@ -319,7 +321,9 @@ def _load_encryption(proxy_conf_file, swift_conf_file, **kwargs):
"proxy-logging proxy-server", "proxy-logging proxy-server",
"keymaster encryption proxy-logging proxy-server") "keymaster encryption proxy-logging proxy-server")
conf.set(section, 'pipeline', pipeline) conf.set(section, 'pipeline', pipeline)
root_secret = os.urandom(32).encode("base64") root_secret = base64.b64encode(os.urandom(32))
if not six.PY2:
root_secret = root_secret.decode('ascii')
conf.set('filter:keymaster', 'encryption_root_secret', root_secret) conf.set('filter:keymaster', 'encryption_root_secret', root_secret)
except NoSectionError as err: except NoSectionError as err:
msg = 'Error problem with proxy conf file %s: %s' % \ msg = 'Error problem with proxy conf file %s: %s' % \

View File

@ -81,7 +81,7 @@ class Connection(object):
break break
for bucket in buckets: for bucket in buckets:
if not isinstance(bucket.name, six.binary_type): if six.PY2 and not isinstance(bucket.name, bytes):
bucket.name = bucket.name.encode('utf-8') bucket.name = bucket.name.encode('utf-8')
try: try:
@ -103,7 +103,7 @@ class Connection(object):
exceptions.insert(0, 'Too many errors to continue:') exceptions.insert(0, 'Too many errors to continue:')
raise Exception('\n========\n'.join(exceptions)) raise Exception('\n========\n'.join(exceptions))
def make_request(self, method, bucket='', obj='', headers=None, body='', def make_request(self, method, bucket='', obj='', headers=None, body=b'',
query=None): query=None):
""" """
Wrapper method of S3Connection.make_request. Wrapper method of S3Connection.make_request.
@ -123,7 +123,9 @@ class Connection(object):
query_args=query, sender=None, query_args=query, sender=None,
override_num_retries=RETRY_COUNT, override_num_retries=RETRY_COUNT,
retry_handler=None) retry_handler=None)
return response.status, dict(response.getheaders()), response.read() return (response.status,
{h.lower(): v for h, v in response.getheaders()},
response.read())
def generate_url_and_headers(self, method, bucket='', obj='', def generate_url_and_headers(self, method, bucket='', obj='',
expires_in=3600): expires_in=3600):

View File

@ -40,7 +40,8 @@ class TestS3Acl(S3ApiBase):
raise tf.SkipTest( raise tf.SkipTest(
'TestS3Acl requires s3_access_key3 and s3_secret_key3 ' 'TestS3Acl requires s3_access_key3 and s3_secret_key3 '
'configured for reduced-access user') 'configured for reduced-access user')
self.conn.make_request('PUT', self.bucket) status, headers, body = self.conn.make_request('PUT', self.bucket)
self.assertEqual(status, 200, body)
access_key3 = tf.config['s3_access_key3'] access_key3 = tf.config['s3_access_key3']
secret_key3 = tf.config['s3_secret_key3'] secret_key3 = tf.config['s3_secret_key3']
self.conn3 = Connection(access_key3, secret_key3, access_key3) self.conn3 = Connection(access_key3, secret_key3, access_key3)

View File

@ -14,6 +14,7 @@
# limitations under the License. # limitations under the License.
import base64 import base64
import binascii
import unittest2 import unittest2
import os import os
import boto import boto
@ -23,7 +24,7 @@ import boto
from distutils.version import StrictVersion from distutils.version import StrictVersion
from hashlib import md5 from hashlib import md5
from itertools import izip, izip_longest from six.moves import zip, zip_longest
import test.functional as tf import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \ from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \
@ -67,7 +68,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
headers = [None] * len(keys) headers = [None] * len(keys)
self.conn.make_request('PUT', bucket) self.conn.make_request('PUT', bucket)
query = 'uploads' query = 'uploads'
for key, key_headers in izip_longest(keys, headers): for key, key_headers in zip_longest(keys, headers):
for i in range(trials): for i in range(trials):
status, resp_headers, body = \ status, resp_headers, body = \
self.conn.make_request('POST', bucket, key, self.conn.make_request('POST', bucket, key,
@ -76,7 +77,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
def _upload_part(self, bucket, key, upload_id, content=None, part_num=1): def _upload_part(self, bucket, key, upload_id, content=None, part_num=1):
query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id) query = 'partNumber=%s&uploadId=%s' % (part_num, upload_id)
content = content if content else 'a' * self.min_segment_size content = content if content else b'a' * self.min_segment_size
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', bucket, key, body=content, self.conn.make_request('PUT', bucket, key, body=content,
query=query) query=query)
@ -108,8 +109,9 @@ class TestS3ApiMultiUpload(S3ApiBase):
def test_object_multi_upload(self): def test_object_multi_upload(self):
bucket = 'bucket' bucket = 'bucket'
keys = ['obj1', 'obj2', 'obj3'] keys = ['obj1', 'obj2', 'obj3']
bad_content_md5 = base64.b64encode(b'a' * 16).strip().decode('ascii')
headers = [None, headers = [None,
{'Content-MD5': base64.b64encode('a' * 16).strip()}, {'Content-MD5': bad_content_md5},
{'Etag': 'nonsense'}] {'Etag': 'nonsense'}]
uploads = [] uploads = []
@ -118,20 +120,20 @@ class TestS3ApiMultiUpload(S3ApiBase):
# Initiate Multipart Upload # Initiate Multipart Upload
for expected_key, (status, headers, body) in \ for expected_key, (status, headers, body) in \
izip(keys, results_generator): zip(keys, results_generator):
self.assertEqual(status, 200) self.assertEqual(status, 200, body)
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers) self.assertIn('content-type', headers)
self.assertEqual(headers['content-type'], 'application/xml') self.assertEqual(headers['content-type'], 'application/xml')
self.assertTrue('content-length' in headers) self.assertIn('content-length', headers)
self.assertEqual(headers['content-length'], str(len(body))) self.assertEqual(headers['content-length'], str(len(body)))
elem = fromstring(body, 'InitiateMultipartUploadResult') elem = fromstring(body, 'InitiateMultipartUploadResult')
self.assertEqual(elem.find('Bucket').text, bucket) self.assertEqual(elem.find('Bucket').text, bucket)
key = elem.find('Key').text key = elem.find('Key').text
self.assertEqual(expected_key, key) self.assertEqual(expected_key, key)
upload_id = elem.find('UploadId').text upload_id = elem.find('UploadId').text
self.assertTrue(upload_id is not None) self.assertIsNotNone(upload_id)
self.assertTrue((key, upload_id) not in uploads) self.assertNotIn((key, upload_id), uploads)
uploads.append((key, upload_id)) uploads.append((key, upload_id))
self.assertEqual(len(uploads), len(keys)) # sanity self.assertEqual(len(uploads), len(keys)) # sanity
@ -157,7 +159,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertEqual(elem.find('IsTruncated').text, 'false') self.assertEqual(elem.find('IsTruncated').text, 'false')
self.assertEqual(len(elem.findall('Upload')), 3) self.assertEqual(len(elem.findall('Upload')), 3)
for (expected_key, expected_upload_id), u in \ for (expected_key, expected_upload_id), u in \
izip(uploads, elem.findall('Upload')): zip(uploads, elem.findall('Upload')):
key = u.find('Key').text key = u.find('Key').text
upload_id = u.find('UploadId').text upload_id = u.find('UploadId').text
self.assertEqual(expected_key, key) self.assertEqual(expected_key, key)
@ -174,7 +176,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
# Upload Part # Upload Part
key, upload_id = uploads[0] key, upload_id = uploads[0]
content = 'a' * self.min_segment_size content = b'a' * self.min_segment_size
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
status, headers, body = \ status, headers, body = \
self._upload_part(bucket, key, upload_id, content) self._upload_part(bucket, key, upload_id, content)
@ -190,7 +192,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
key, upload_id = uploads[1] key, upload_id = uploads[1]
src_bucket = 'bucket2' src_bucket = 'bucket2'
src_obj = 'obj3' src_obj = 'obj3'
src_content = 'b' * self.min_segment_size src_content = b'b' * self.min_segment_size
etag = md5(src_content).hexdigest() etag = md5(src_content).hexdigest()
# prepare src obj # prepare src obj
@ -266,7 +268,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
# etags will be used to generate xml for Complete Multipart Upload # etags will be used to generate xml for Complete Multipart Upload
etags = [] etags = []
for (expected_etag, expected_date), p in \ for (expected_etag, expected_date), p in \
izip(expected_parts_list, elem.findall('Part')): zip(expected_parts_list, elem.findall('Part')):
last_modified = p.find('LastModified').text last_modified = p.find('LastModified').text
self.assertTrue(last_modified is not None) self.assertTrue(last_modified is not None)
# TODO: sanity check # TODO: sanity check
@ -295,9 +297,9 @@ class TestS3ApiMultiUpload(S3ApiBase):
else: else:
self.assertIn('transfer-encoding', headers) self.assertIn('transfer-encoding', headers)
self.assertEqual(headers['transfer-encoding'], 'chunked') self.assertEqual(headers['transfer-encoding'], 'chunked')
lines = body.split('\n') lines = body.split(b'\n')
self.assertTrue(lines[0].startswith('<?xml'), body) self.assertTrue(lines[0].startswith(b'<?xml'), body)
self.assertTrue(lines[0].endswith('?>'), body) self.assertTrue(lines[0].endswith(b'?>'), body)
elem = fromstring(body, 'CompleteMultipartUploadResult') elem = fromstring(body, 'CompleteMultipartUploadResult')
# TODO: use tf.config value # TODO: use tf.config value
self.assertEqual( self.assertEqual(
@ -305,9 +307,10 @@ class TestS3ApiMultiUpload(S3ApiBase):
elem.find('Location').text) elem.find('Location').text)
self.assertEqual(elem.find('Bucket').text, bucket) self.assertEqual(elem.find('Bucket').text, bucket)
self.assertEqual(elem.find('Key').text, key) self.assertEqual(elem.find('Key').text, key)
concatted_etags = ''.join(etag.strip('"') for etag in etags) concatted_etags = b''.join(
etag.strip('"').encode('ascii') for etag in etags)
exp_etag = '"%s-%s"' % ( exp_etag = '"%s-%s"' % (
md5(concatted_etags.decode('hex')).hexdigest(), len(etags)) md5(binascii.unhexlify(concatted_etags)).hexdigest(), len(etags))
etag = elem.find('ETag').text etag = elem.find('ETag').text
self.assertEqual(etag, exp_etag) self.assertEqual(etag, exp_etag)
@ -332,7 +335,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
last_modified = elem.find('LastModified').text last_modified = elem.find('LastModified').text
self.assertIsNotNone(last_modified) self.assertIsNotNone(last_modified)
exp_content = 'a' * self.min_segment_size exp_content = b'a' * self.min_segment_size
etag = md5(exp_content).hexdigest() etag = md5(exp_content).hexdigest()
self.assertEqual(resp_etag, etag) self.assertEqual(resp_etag, etag)
@ -723,7 +726,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id) query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query, self.conn.make_request('PUT', bucket, key, query=query,
body='A' * body_size[i]) body=b'A' * body_size[i])
etags.append(headers['etag']) etags.append(headers['etag'])
xml = self._gen_comp_xml(etags) xml = self._gen_comp_xml(etags)
@ -747,7 +750,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
query = 'partNumber=%s&uploadId=%s' % (i, upload_id) query = 'partNumber=%s&uploadId=%s' % (i, upload_id)
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', bucket, key, query=query, self.conn.make_request('PUT', bucket, key, query=query,
body='A' * body_size[i]) body=b'A' * body_size[i])
etags.append(headers['etag']) etags.append(headers['etag'])
xml = self._gen_comp_xml(etags) xml = self._gen_comp_xml(etags)
@ -770,9 +773,9 @@ class TestS3ApiMultiUpload(S3ApiBase):
etags = [] etags = []
for i in range(1, 4): for i in range(1, 4):
query = 'partNumber=%s&uploadId=%s' % (2 * i - 1, upload_id) query = 'partNumber=%s&uploadId=%s' % (2 * i - 1, upload_id)
status, headers, body = \ status, headers, body = self.conn.make_request(
self.conn.make_request('PUT', bucket, key, 'PUT', bucket, key, body=b'A' * 1024 * 1024 * 5,
body='A' * 1024 * 1024 * 5, query=query) query=query)
etags.append(headers['etag']) etags.append(headers['etag'])
query = 'uploadId=%s' % upload_id query = 'uploadId=%s' % upload_id
xml = self._gen_comp_xml(etags[:-1], step=2) xml = self._gen_comp_xml(etags[:-1], step=2)
@ -791,7 +794,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
# Initiate Multipart Upload # Initiate Multipart Upload
for expected_key, (status, headers, body) in \ for expected_key, (status, headers, body) in \
izip(keys, results_generator): zip(keys, results_generator):
self.assertEqual(status, 200) self.assertEqual(status, 200)
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(headers)
self.assertTrue('content-type' in headers) self.assertTrue('content-type' in headers)
@ -813,7 +816,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
key, upload_id = uploads[0] key, upload_id = uploads[0]
src_bucket = 'bucket2' src_bucket = 'bucket2'
src_obj = 'obj4' src_obj = 'obj4'
src_content = 'y' * (self.min_segment_size / 2) + 'z' * \ src_content = b'y' * (self.min_segment_size // 2) + b'z' * \
self.min_segment_size self.min_segment_size
src_range = 'bytes=0-%d' % (self.min_segment_size - 1) src_range = 'bytes=0-%d' % (self.min_segment_size - 1)
etag = md5(src_content[:self.min_segment_size]).hexdigest() etag = md5(src_content[:self.min_segment_size]).hexdigest()
@ -901,7 +904,7 @@ class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
# Initiate Multipart Upload # Initiate Multipart Upload
for expected_key, (status, _, body) in \ for expected_key, (status, _, body) in \
izip(keys, results_generator): zip(keys, results_generator):
self.assertEqual(status, 200) # sanity self.assertEqual(status, 200) # sanity
elem = fromstring(body, 'InitiateMultipartUploadResult') elem = fromstring(body, 'InitiateMultipartUploadResult')
key = elem.find('Key').text key = elem.find('Key').text
@ -915,7 +918,7 @@ class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
# Upload Part # Upload Part
key, upload_id = uploads[0] key, upload_id = uploads[0]
content = 'a' * self.min_segment_size content = b'a' * self.min_segment_size
status, headers, body = \ status, headers, body = \
self._upload_part(bucket, key, upload_id, content) self._upload_part(bucket, key, upload_id, content)
self.assertEqual(status, 200) self.assertEqual(status, 200)

View File

@ -25,7 +25,8 @@ import email.parser
from email.utils import formatdate, parsedate from email.utils import formatdate, parsedate
from time import mktime from time import mktime
from hashlib import md5 from hashlib import md5
from urllib import quote import six
from six.moves.urllib.parse import quote
import test.functional as tf import test.functional as tf
@ -59,7 +60,7 @@ class TestS3ApiObject(S3ApiBase):
def test_object(self): def test_object(self):
obj = 'object name with %-sign' obj = 'object name with %-sign'
content = 'abc123' content = b'abc123'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
# PUT Object # PUT Object
@ -219,19 +220,19 @@ class TestS3ApiObject(S3ApiBase):
status, headers, body = \ status, headers, body = \
auth_error_conn.make_request('HEAD', self.bucket, obj) auth_error_conn.make_request('HEAD', self.bucket, obj)
self.assertEqual(status, 403) self.assertEqual(status, 403)
self.assertEqual(body, '') # sanity self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml') self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \ status, headers, body = \
self.conn.make_request('HEAD', self.bucket, 'invalid') self.conn.make_request('HEAD', self.bucket, 'invalid')
self.assertEqual(status, 404) self.assertEqual(status, 404)
self.assertEqual(body, '') # sanity self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml') self.assertEqual(headers['content-type'], 'application/xml')
status, headers, body = \ status, headers, body = \
self.conn.make_request('HEAD', 'invalid', obj) self.conn.make_request('HEAD', 'invalid', obj)
self.assertEqual(status, 404) self.assertEqual(status, 404)
self.assertEqual(body, '') # sanity self.assertEqual(body, b'') # sanity
self.assertEqual(headers['content-type'], 'application/xml') self.assertEqual(headers['content-type'], 'application/xml')
def test_delete_object_error(self): def test_delete_object_error(self):
@ -265,7 +266,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_content_md5(self): def test_put_object_content_md5(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
headers = {'Content-MD5': calculate_md5(content)} headers = {'Content-MD5': calculate_md5(content)}
status, headers, body = \ status, headers, body = \
@ -276,7 +277,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_content_type(self): def test_put_object_content_type(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
headers = {'Content-Type': 'text/plain'} headers = {'Content-Type': 'text/plain'}
status, headers, body = \ status, headers, body = \
@ -290,7 +291,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_conditional_requests(self): def test_put_object_conditional_requests(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
headers = {'If-None-Match': '*'} headers = {'If-None-Match': '*'}
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content) self.conn.make_request('PUT', self.bucket, obj, headers, content)
@ -318,7 +319,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_expect(self): def test_put_object_expect(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
headers = {'Expect': '100-continue'} headers = {'Expect': '100-continue'}
status, headers, body = \ status, headers, body = \
@ -331,7 +332,7 @@ class TestS3ApiObject(S3ApiBase):
if expected_headers is None: if expected_headers is None:
expected_headers = req_headers expected_headers = req_headers
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, self.conn.make_request('PUT', self.bucket, obj,
@ -387,7 +388,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_storage_class(self): def test_put_object_storage_class(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
headers = {'X-Amz-Storage-Class': 'STANDARD'} headers = {'X-Amz-Storage-Class': 'STANDARD'}
status, headers, body = \ status, headers, body = \
@ -399,7 +400,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_copy_source_params(self): def test_put_object_copy_source_params(self):
obj = 'object' obj = 'object'
src_headers = {'X-Amz-Meta-Test': 'src'} src_headers = {'X-Amz-Meta-Test': 'src'}
src_body = 'some content' src_body = b'some content'
dst_bucket = 'dst-bucket' dst_bucket = 'dst-bucket'
dst_obj = 'dst_object' dst_obj = 'dst_object'
self.conn.make_request('PUT', self.bucket, obj, src_headers, src_body) self.conn.make_request('PUT', self.bucket, obj, src_headers, src_body)
@ -433,7 +434,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_copy_source(self): def test_put_object_copy_source(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content).hexdigest()
self.conn.make_request('PUT', self.bucket, obj, body=content) self.conn.make_request('PUT', self.bucket, obj, body=content)
@ -648,7 +649,7 @@ class TestS3ApiObject(S3ApiBase):
def test_get_object_range(self): def test_get_object_range(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
headers = {'x-amz-meta-test': 'swift'} headers = {'x-amz-meta-test': 'swift'}
self.conn.make_request( self.conn.make_request(
'PUT', self.bucket, obj, headers=headers, body=content) 'PUT', self.bucket, obj, headers=headers, body=content)
@ -662,7 +663,7 @@ class TestS3ApiObject(S3ApiBase):
self.assertEqual(headers['content-length'], '5') self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers) self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test']) self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, 'bcdef') self.assertEqual(body, b'bcdef')
headers = {'Range': 'bytes=5-'} headers = {'Range': 'bytes=5-'}
status, headers, body = \ status, headers, body = \
@ -673,7 +674,7 @@ class TestS3ApiObject(S3ApiBase):
self.assertEqual(headers['content-length'], '5') self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers) self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test']) self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, 'fghij') self.assertEqual(body, b'fghij')
headers = {'Range': 'bytes=-5'} headers = {'Range': 'bytes=-5'}
status, headers, body = \ status, headers, body = \
@ -684,7 +685,7 @@ class TestS3ApiObject(S3ApiBase):
self.assertEqual(headers['content-length'], '5') self.assertEqual(headers['content-length'], '5')
self.assertTrue('x-amz-meta-test' in headers) self.assertTrue('x-amz-meta-test' in headers)
self.assertEqual('swift', headers['x-amz-meta-test']) self.assertEqual('swift', headers['x-amz-meta-test'])
self.assertEqual(body, 'fghij') self.assertEqual(body, b'fghij')
ranges = ['1-2', '4-5'] ranges = ['1-2', '4-5']
@ -693,9 +694,9 @@ class TestS3ApiObject(S3ApiBase):
self.conn.make_request('GET', self.bucket, obj, headers=headers) self.conn.make_request('GET', self.bucket, obj, headers=headers)
self.assertEqual(status, 206) self.assertEqual(status, 206)
self.assertCommonResponseHeaders(headers) self.assertCommonResponseHeaders(headers)
self.assertTrue('content-length' in headers) self.assertIn('content-length', headers)
self.assertTrue('content-type' in headers) # sanity self.assertIn('content-type', headers) # sanity
content_type, boundary = headers['content-type'].split(';') content_type, boundary = headers['content-type'].split(';')
self.assertEqual('multipart/byteranges', content_type) self.assertEqual('multipart/byteranges', content_type)
@ -704,10 +705,13 @@ class TestS3ApiObject(S3ApiBase):
# TODO: Using swift.common.utils.multipart_byteranges_to_document_iters # TODO: Using swift.common.utils.multipart_byteranges_to_document_iters
# could be easy enough. # could be easy enough.
if six.PY2:
parser = email.parser.FeedParser() parser = email.parser.FeedParser()
else:
parser = email.parser.BytesFeedParser()
parser.feed( parser.feed(
"Content-Type: multipart/byterange; boundary=%s\r\n\r\n" % b"Content-Type: multipart/byterange; boundary=%s\r\n\r\n" %
boundary_str) boundary_str.encode('ascii'))
parser.feed(body) parser.feed(body)
message = parser.close() message = parser.close()
@ -727,7 +731,7 @@ class TestS3ApiObject(S3ApiBase):
self.assertEqual( self.assertEqual(
expected_range, part.get('Content-Range')) expected_range, part.get('Content-Range'))
# rest # rest
payload = part.get_payload().strip() payload = part.get_payload(decode=True).strip()
self.assertEqual(content[start:end + 1], payload) self.assertEqual(content[start:end + 1], payload)
def test_get_object_if_modified_since(self): def test_get_object_if_modified_since(self):
@ -783,7 +787,7 @@ class TestS3ApiObject(S3ApiBase):
def test_head_object_range(self): def test_head_object_range(self):
obj = 'object' obj = 'object'
content = 'abcdefghij' content = b'abcdefghij'
self.conn.make_request('PUT', self.bucket, obj, body=content) self.conn.make_request('PUT', self.bucket, obj, body=content)
headers = {'Range': 'bytes=1-5'} headers = {'Range': 'bytes=1-5'}

View File

@ -190,7 +190,7 @@ class TestS3ApiPresignedUrls(S3ApiBase):
# PUT empty object # PUT empty object
put_url, headers = self.conn.generate_url_and_headers( put_url, headers = self.conn.generate_url_and_headers(
'PUT', bucket, obj) 'PUT', bucket, obj)
resp = requests.put(put_url, data='', headers=headers) resp = requests.put(put_url, data=b'', headers=headers)
self.assertEqual(resp.status_code, 200, self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content)) 'Got %d %s' % (resp.status_code, resp.content))
# GET empty object # GET empty object
@ -199,10 +199,10 @@ class TestS3ApiPresignedUrls(S3ApiBase):
resp = requests.get(get_url, headers=headers) resp = requests.get(get_url, headers=headers)
self.assertEqual(resp.status_code, 200, self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content)) 'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(resp.content, '') self.assertEqual(resp.content, b'')
# PUT over object # PUT over object
resp = requests.put(put_url, data='foobar', headers=headers) resp = requests.put(put_url, data=b'foobar', headers=headers)
self.assertEqual(resp.status_code, 200, self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content)) 'Got %d %s' % (resp.status_code, resp.content))
@ -210,7 +210,7 @@ class TestS3ApiPresignedUrls(S3ApiBase):
resp = requests.get(get_url, headers=headers) resp = requests.get(get_url, headers=headers)
self.assertEqual(resp.status_code, 200, self.assertEqual(resp.status_code, 200,
'Got %d %s' % (resp.status_code, resp.content)) 'Got %d %s' % (resp.status_code, resp.content))
self.assertEqual(resp.content, 'foobar') self.assertEqual(resp.content, b'foobar')
# DELETE Object # DELETE Object
delete_url, headers = self.conn.generate_url_and_headers( delete_url, headers = self.conn.generate_url_and_headers(

View File

@ -80,8 +80,8 @@ class TestS3ApiService(S3ApiBase):
'GET', headers={'Date': '', 'x-amz-date': ''}) 'GET', headers={'Date': '', 'x-amz-date': ''})
self.assertEqual(status, 403) self.assertEqual(status, 403)
self.assertEqual(get_error_code(body), 'AccessDenied') self.assertEqual(get_error_code(body), 'AccessDenied')
self.assertIn('AWS authentication requires a valid Date ' self.assertIn(b'AWS authentication requires a valid Date '
'or x-amz-date header', body) b'or x-amz-date header', body)
class TestS3ApiServiceSigV4(TestS3ApiService): class TestS3ApiServiceSigV4(TestS3ApiService):

View File

@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from base64 import b64encode
from hashlib import md5 from hashlib import md5
from swift.common.middleware.s3api.etree import fromstring from swift.common.middleware.s3api.etree import fromstring
@ -28,4 +29,4 @@ def get_error_msg(body):
def calculate_md5(body): def calculate_md5(body):
return md5(body).digest().encode('base64').strip() return b64encode(md5(body).digest()).strip().decode('ascii')

View File

@ -15,7 +15,7 @@
# limitations under the License. # limitations under the License.
import unittest import unittest
from urlparse import urlparse, urlunparse from six.moves.urllib.parse import urlparse, urlunparse
import uuid import uuid
from random import shuffle from random import shuffle

View File

@ -20,8 +20,10 @@ import json
from uuid import uuid4 from uuid import uuid4
from string import ascii_letters from string import ascii_letters
import six
from six.moves import range from six.moves import range
from swift.common.middleware.acl import format_acl from swift.common.middleware.acl import format_acl
from swift.common.utils import distribute_evenly
from test.functional import check_response, retry, requires_acls, \ from test.functional import check_response, retry, requires_acls, \
load_constraint, SkipTest load_constraint, SkipTest
@ -57,8 +59,8 @@ class TestAccount(unittest2.TestCase):
conn.request('POST', parsed.path, '', headers) conn.request('POST', parsed.path, '', headers)
return check_response(conn) return check_response(conn)
for i in range(0, len(remove_metadata_keys), 90): buckets = (len(remove_metadata_keys) - 1) // 90 + 1
batch = remove_metadata_keys[i:i + 90] for batch in distribute_evenly(remove_metadata_keys, buckets):
resp = retry(post, batch) resp = retry(post, batch)
resp.read() resp.read()
@ -148,7 +150,7 @@ class TestAccount(unittest2.TestCase):
# needs to be an acceptable header size # needs to be an acceptable header size
num_keys = 8 num_keys = 8
max_key_size = load_constraint('max_header_size') / num_keys max_key_size = load_constraint('max_header_size') // num_keys
acl = {'admin': [c * max_key_size for c in ascii_letters[:num_keys]]} acl = {'admin': [c * max_key_size for c in ascii_letters[:num_keys]]}
headers = {'x-account-access-control': format_acl( headers = {'x-account-access-control': format_acl(
version=2, acl_dict=acl)} version=2, acl_dict=acl)}
@ -717,7 +719,9 @@ class TestAccount(unittest2.TestCase):
return check_response(conn) return check_response(conn)
uni_key = u'X-Account-Meta-uni\u0E12' uni_key = u'X-Account-Meta-uni\u0E12'
uni_value = u'uni\u0E12' uni_value = u'uni\u0E12'
if (tf.web_front_end == 'integral'): # Note that py3 has issues with non-ascii header names; see
# https://bugs.python.org/issue37093
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, '1') resp = retry(post, uni_key, '1')
resp.read() resp.read()
self.assertIn(resp.status, (201, 204)) self.assertIn(resp.status, (201, 204))
@ -731,9 +735,14 @@ class TestAccount(unittest2.TestCase):
resp = retry(head) resp = retry(head)
resp.read() resp.read()
self.assertIn(resp.status, (200, 204)) self.assertIn(resp.status, (200, 204))
if six.PY2:
self.assertEqual(resp.getheader('X-Account-Meta-uni'), self.assertEqual(resp.getheader('X-Account-Meta-uni'),
uni_value.encode('utf-8')) uni_value.encode('utf8'))
if (tf.web_front_end == 'integral'): else:
self.assertEqual(resp.getheader('X-Account-Meta-uni'),
uni_value)
# See above note about py3 and non-ascii header names
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, uni_value) resp = retry(post, uni_key, uni_value)
resp.read() resp.read()
self.assertEqual(resp.status, 204) self.assertEqual(resp.status, 204)

View File

@ -23,6 +23,7 @@ from test.functional import check_response, cluster_info, retry, \
requires_acls, load_constraint, requires_policies, SkipTest requires_acls, load_constraint, requires_policies, SkipTest
import test.functional as tf import test.functional as tf
import six
from six.moves import range from six.moves import range
@ -71,8 +72,11 @@ class TestContainer(unittest2.TestCase):
return check_response(conn) return check_response(conn)
def delete(url, token, parsed, conn, container, obj): def delete(url, token, parsed, conn, container, obj):
path = '/'.join([parsed.path, container, if six.PY2:
obj['name'].encode('utf8')]) obj_name = obj['name'].encode('utf8')
else:
obj_name = obj['name']
path = '/'.join([parsed.path, container, obj_name])
conn.request('DELETE', path, '', {'X-Auth-Token': token}) conn.request('DELETE', path, '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
@ -153,7 +157,9 @@ class TestContainer(unittest2.TestCase):
uni_key = u'X-Container-Meta-uni\u0E12' uni_key = u'X-Container-Meta-uni\u0E12'
uni_value = u'uni\u0E12' uni_value = u'uni\u0E12'
if (tf.web_front_end == 'integral'): # Note that py3 has issues with non-ascii header names; see
# https://bugs.python.org/issue37093
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, '1') resp = retry(post, uni_key, '1')
resp.read() resp.read()
self.assertEqual(resp.status, 204) self.assertEqual(resp.status, 204)
@ -167,9 +173,14 @@ class TestContainer(unittest2.TestCase):
resp = retry(head) resp = retry(head)
resp.read() resp.read()
self.assertIn(resp.status, (200, 204)) self.assertIn(resp.status, (200, 204))
if six.PY2:
self.assertEqual(resp.getheader('X-Container-Meta-uni'), self.assertEqual(resp.getheader('X-Container-Meta-uni'),
uni_value.encode('utf-8')) uni_value.encode('utf-8'))
if (tf.web_front_end == 'integral'): else:
self.assertEqual(resp.getheader('X-Container-Meta-uni'),
uni_value)
# See above note about py3 and non-ascii header names
if (tf.web_front_end == 'integral' and six.PY2):
resp = retry(post, uni_key, uni_value) resp = retry(post, uni_key, uni_value)
resp.read() resp.read()
self.assertEqual(resp.status, 204) self.assertEqual(resp.status, 204)
@ -763,6 +774,8 @@ class TestContainer(unittest2.TestCase):
# read-only can list containers # read-only can list containers
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing) self.assertIn(self.name, listing)
@ -778,6 +791,8 @@ class TestContainer(unittest2.TestCase):
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing) self.assertIn(new_container_name, listing)
@ -878,6 +893,8 @@ class TestContainer(unittest2.TestCase):
# can list containers # can list containers
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing) self.assertIn(self.name, listing)
@ -888,6 +905,8 @@ class TestContainer(unittest2.TestCase):
self.assertIn(resp.status, (201, 202)) self.assertIn(resp.status, (201, 202))
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing) self.assertIn(new_container_name, listing)
@ -897,6 +916,8 @@ class TestContainer(unittest2.TestCase):
self.assertIn(resp.status, (204, 404)) self.assertIn(resp.status, (204, 404))
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertNotIn(new_container_name, listing) self.assertNotIn(new_container_name, listing)
@ -1020,6 +1041,8 @@ class TestContainer(unittest2.TestCase):
# can list containers # can list containers
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(self.name, listing) self.assertIn(self.name, listing)
@ -1030,6 +1053,8 @@ class TestContainer(unittest2.TestCase):
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(new_container_name, listing) self.assertIn(new_container_name, listing)
@ -1039,6 +1064,8 @@ class TestContainer(unittest2.TestCase):
self.assertEqual(resp.status, 204) self.assertEqual(resp.status, 204)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertNotIn(new_container_name, listing) self.assertNotIn(new_container_name, listing)
@ -1408,7 +1435,7 @@ class TestContainer(unittest2.TestCase):
if (tf.web_front_end == 'apache2'): if (tf.web_front_end == 'apache2'):
self.assertEqual(resp.status, 404) self.assertEqual(resp.status, 404)
else: else:
self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL') self.assertEqual(resp.read(), b'Invalid UTF8 or contains NULL')
self.assertEqual(resp.status, 412) self.assertEqual(resp.status, 412)
def test_create_container_gets_default_policy_by_default(self): def test_create_container_gets_default_policy_by_default(self):
@ -1604,12 +1631,12 @@ class TestContainer(unittest2.TestCase):
return check_response(conn) return check_response(conn)
# upload 11 bytes object # upload 11 bytes object
resp = retry(put, '01234567890') resp = retry(put, b'01234567890')
resp.read() resp.read()
self.assertEqual(resp.status, 413) self.assertEqual(resp.status, 413)
# upload 10 bytes object # upload 10 bytes object
resp = retry(put, '0123456789') resp = retry(put, b'0123456789')
resp.read() resp.read()
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
@ -1622,7 +1649,7 @@ class TestContainer(unittest2.TestCase):
resp = retry(get) resp = retry(get)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(body, '0123456789') self.assertEqual(body, b'0123456789')
class BaseTestContainerACLs(unittest2.TestCase): class BaseTestContainerACLs(unittest2.TestCase):

View File

@ -14,7 +14,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import unittest from swift.common.swob import str_to_wsgi
import test.functional as tf import test.functional as tf
from test.functional.tests import Utils, Base, Base2, BaseEnv from test.functional.tests import Utils, Base, Base2, BaseEnv
from test.functional.swift_test_client import Connection, ResponseError from test.functional.swift_test_client import Connection, ResponseError
@ -86,25 +86,25 @@ class TestDlo(Base):
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual( self.assertEqual(
file_contents, file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee") b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
file_item = self.env.container.file('man2') file_item = self.env.container.file('man2')
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual( self.assertEqual(
file_contents, file_contents,
"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE") b"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")
file_item = self.env.container.file('manall') file_item = self.env.container.file('manall')
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual( self.assertEqual(
file_contents, file_contents,
("aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" + (b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee" +
"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE")) b"AAAAAAAAAABBBBBBBBBBCCCCCCCCCCDDDDDDDDDDEEEEEEEEEE"))
def test_get_manifest_document_itself(self): def test_get_manifest_document_itself(self):
file_item = self.env.container.file('man1') file_item = self.env.container.file('man1')
file_contents = file_item.read(parms={'multipart-manifest': 'get'}) file_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual(file_contents, "man1-contents") self.assertEqual(file_contents, b"man1-contents")
self.assertEqual(file_item.info()['x_object_manifest'], self.assertEqual(file_item.info()['x_object_manifest'],
"%s/%s/seg_lower" % "%s/%s/seg_lower" %
(self.env.container.name, self.env.segment_prefix)) (self.env.container.name, self.env.segment_prefix))
@ -112,10 +112,19 @@ class TestDlo(Base):
def test_get_range(self): def test_get_range(self):
file_item = self.env.container.file('man1') file_item = self.env.container.file('man1')
file_contents = file_item.read(size=25, offset=8) file_contents = file_item.read(size=25, offset=8)
self.assertEqual(file_contents, "aabbbbbbbbbbccccccccccddd") self.assertEqual(file_contents, b"aabbbbbbbbbbccccccccccddd")
file_contents = file_item.read(size=1, offset=47) file_contents = file_item.read(size=1, offset=47)
self.assertEqual(file_contents, "e") self.assertEqual(file_contents, b"e")
def test_get_multiple_ranges(self):
file_item = self.env.container.file('man1')
file_contents = file_item.read(
hdrs={'Range': 'bytes=0-4,10-14'})
self.assert_status(200) # *not* 206
self.assertEqual(
file_contents,
b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
def test_get_range_out_of_range(self): def test_get_range_out_of_range(self):
file_item = self.env.container.file('man1') file_item = self.env.container.file('man1')
@ -130,7 +139,7 @@ class TestDlo(Base):
# segments and not just a manifest. # segments and not just a manifest.
f_segment = self.env.container.file("%s/seg_lowerf" % f_segment = self.env.container.file("%s/seg_lowerf" %
(self.env.segment_prefix)) (self.env.segment_prefix))
f_segment.write('ffffffffff') f_segment.write(b'ffffffffff')
try: try:
man1_item = self.env.container.file('man1') man1_item = self.env.container.file('man1')
man1_item.copy(self.env.container.name, "copied-man1") man1_item.copy(self.env.container.name, "copied-man1")
@ -142,7 +151,7 @@ class TestDlo(Base):
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual( self.assertEqual(
file_contents, file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff") b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
# The copied object must not have X-Object-Manifest # The copied object must not have X-Object-Manifest
self.assertNotIn("x_object_manifest", file_item.info()) self.assertNotIn("x_object_manifest", file_item.info())
@ -154,7 +163,7 @@ class TestDlo(Base):
# segments and not just a manifest. # segments and not just a manifest.
f_segment = self.env.container.file("%s/seg_lowerf" % f_segment = self.env.container.file("%s/seg_lowerf" %
(self.env.segment_prefix)) (self.env.segment_prefix))
f_segment.write('ffffffffff') f_segment.write(b'ffffffffff')
try: try:
man1_item = self.env.container.file('man1') man1_item = self.env.container.file('man1')
man1_item.copy_account(acct, man1_item.copy_account(acct,
@ -168,7 +177,7 @@ class TestDlo(Base):
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual( self.assertEqual(
file_contents, file_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff") b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeeeffffffffff")
# The copied object must not have X-Object-Manifest # The copied object must not have X-Object-Manifest
self.assertNotIn("x_object_manifest", file_item.info()) self.assertNotIn("x_object_manifest", file_item.info())
@ -182,12 +191,12 @@ class TestDlo(Base):
copied = self.env.container.file("copied-man1") copied = self.env.container.file("copied-man1")
copied_contents = copied.read(parms={'multipart-manifest': 'get'}) copied_contents = copied.read(parms={'multipart-manifest': 'get'})
self.assertEqual(copied_contents, "man1-contents") self.assertEqual(copied_contents, b"man1-contents")
copied_contents = copied.read() copied_contents = copied.read()
self.assertEqual( self.assertEqual(
copied_contents, copied_contents,
"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee") b"aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee")
self.assertEqual(man1_item.info()['x_object_manifest'], self.assertEqual(man1_item.info()['x_object_manifest'],
copied.info()['x_object_manifest']) copied.info()['x_object_manifest'])
finally: finally:
@ -239,8 +248,9 @@ class TestDlo(Base):
manifest.info(hdrs={'If-None-Match': "not-%s" % etag}) manifest.info(hdrs={'If-None-Match': "not-%s" % etag})
self.assert_status(200) self.assert_status(200)
@unittest.skipIf('username3' not in tf.config, "Requires user 3")
def test_dlo_referer_on_segment_container(self): def test_dlo_referer_on_segment_container(self):
if 'username3' not in tf.config:
raise tf.SkipTest('Requires user 3')
# First the account2 (test3) should fail # First the account2 (test3) should fail
config2 = tf.config.copy() config2 = tf.config.copy()
config2['username'] = tf.config['username3'] config2['username'] = tf.config['username3']
@ -268,7 +278,7 @@ class TestDlo(Base):
contents = dlo_file.read(hdrs=headers) contents = dlo_file.read(hdrs=headers)
self.assertEqual( self.assertEqual(
contents, contents,
"ffffffffffgggggggggghhhhhhhhhhiiiiiiiiiijjjjjjjjjj") b"ffffffffffgggggggggghhhhhhhhhhiiiiiiiiiijjjjjjjjjj")
def test_dlo_post_with_manifest_header(self): def test_dlo_post_with_manifest_header(self):
# verify that performing a POST to a DLO manifest # verify that performing a POST to a DLO manifest
@ -278,12 +288,13 @@ class TestDlo(Base):
# create a new manifest for this test to avoid test coupling. # create a new manifest for this test to avoid test coupling.
x_o_m = self.env.container.file('man1').info()['x_object_manifest'] x_o_m = self.env.container.file('man1').info()['x_object_manifest']
file_item = self.env.container.file(Utils.create_name()) file_item = self.env.container.file(Utils.create_name())
file_item.write('manifest-contents', hdrs={"X-Object-Manifest": x_o_m}) file_item.write(b'manifest-contents',
hdrs={"X-Object-Manifest": x_o_m})
# sanity checks # sanity checks
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'}) manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual('manifest-contents', manifest_contents) self.assertEqual(b'manifest-contents', manifest_contents)
expected_contents = ''.join([(c * 10) for c in 'abcde']) expected_contents = ''.join((c * 10) for c in 'abcde').encode('ascii')
contents = file_item.read(parms={}) contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents) self.assertEqual(expected_contents, contents)
@ -294,16 +305,18 @@ class TestDlo(Base):
# verify that x-object-manifest was updated # verify that x-object-manifest was updated
file_item.info() file_item.info()
resp_headers = file_item.conn.response.getheaders() resp_headers = [(h.lower(), v)
self.assertIn(('x-object-manifest', new_x_o_m), resp_headers) for h, v in file_item.conn.response.getheaders()]
self.assertIn(('x-object-manifest', str_to_wsgi(new_x_o_m)),
resp_headers)
self.assertIn(('x-object-meta-foo', 'bar'), resp_headers) self.assertIn(('x-object-meta-foo', 'bar'), resp_headers)
# verify that manifest content was not changed # verify that manifest content was not changed
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'}) manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual('manifest-contents', manifest_contents) self.assertEqual(b'manifest-contents', manifest_contents)
# verify that updated manifest points to new content # verify that updated manifest points to new content
expected_contents = ''.join([(c * 10) for c in 'ABCDE']) expected_contents = ''.join((c * 10) for c in 'ABCDE').encode('ascii')
contents = file_item.read(parms={}) contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents) self.assertEqual(expected_contents, contents)
@ -319,10 +332,10 @@ class TestDlo(Base):
# verify that manifest content was not changed # verify that manifest content was not changed
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'}) manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual('manifest-contents', manifest_contents) self.assertEqual(b'manifest-contents', manifest_contents)
# verify that updated manifest points new content # verify that updated manifest points new content
expected_contents = ''.join([(c * 10) for c in 'abcde']) expected_contents = ''.join((c * 10) for c in 'abcde').encode('ascii')
contents = file_item.read(parms={}) contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents) self.assertEqual(expected_contents, contents)
@ -334,12 +347,13 @@ class TestDlo(Base):
# create a new manifest for this test to avoid test coupling. # create a new manifest for this test to avoid test coupling.
x_o_m = self.env.container.file('man1').info()['x_object_manifest'] x_o_m = self.env.container.file('man1').info()['x_object_manifest']
file_item = self.env.container.file(Utils.create_name()) file_item = self.env.container.file(Utils.create_name())
file_item.write('manifest-contents', hdrs={"X-Object-Manifest": x_o_m}) file_item.write(b'manifest-contents',
hdrs={"X-Object-Manifest": x_o_m})
# sanity checks # sanity checks
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'}) manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual('manifest-contents', manifest_contents) self.assertEqual(b'manifest-contents', manifest_contents)
expected_contents = ''.join([(c * 10) for c in 'abcde']) expected_contents = ''.join((c * 10) for c in 'abcde').encode('ascii')
contents = file_item.read(parms={}) contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents) self.assertEqual(expected_contents, contents)
@ -352,11 +366,11 @@ class TestDlo(Base):
# verify that object content was not changed # verify that object content was not changed
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'}) manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual('manifest-contents', manifest_contents) self.assertEqual(b'manifest-contents', manifest_contents)
# verify that object is no longer a manifest # verify that object is no longer a manifest
contents = file_item.read(parms={}) contents = file_item.read(parms={})
self.assertEqual('manifest-contents', contents) self.assertEqual(b'manifest-contents', contents)
def test_dlo_post_with_manifest_regular_object(self): def test_dlo_post_with_manifest_regular_object(self):
# verify that performing a POST to a regular object # verify that performing a POST to a regular object
@ -364,11 +378,11 @@ class TestDlo(Base):
# Put a regular object # Put a regular object
file_item = self.env.container.file(Utils.create_name()) file_item = self.env.container.file(Utils.create_name())
file_item.write('file contents', hdrs={}) file_item.write(b'file contents', hdrs={})
# sanity checks # sanity checks
file_contents = file_item.read(parms={}) file_contents = file_item.read(parms={})
self.assertEqual('file contents', file_contents) self.assertEqual(b'file contents', file_contents)
# get the path associated with man1 # get the path associated with man1
x_o_m = self.env.container.file('man1').info()['x_object_manifest'] x_o_m = self.env.container.file('man1').info()['x_object_manifest']
@ -378,13 +392,14 @@ class TestDlo(Base):
# verify that the file is now a manifest # verify that the file is now a manifest
manifest_contents = file_item.read(parms={'multipart-manifest': 'get'}) manifest_contents = file_item.read(parms={'multipart-manifest': 'get'})
self.assertEqual('file contents', manifest_contents) self.assertEqual(b'file contents', manifest_contents)
expected_contents = ''.join([(c * 10) for c in 'abcde']) expected_contents = ''.join([(c * 10) for c in 'abcde']).encode()
contents = file_item.read(parms={}) contents = file_item.read(parms={})
self.assertEqual(expected_contents, contents) self.assertEqual(expected_contents, contents)
file_item.info() file_item.info()
resp_headers = file_item.conn.response.getheaders() resp_headers = [(h.lower(), v)
self.assertIn(('x-object-manifest', x_o_m), resp_headers) for h, v in file_item.conn.response.getheaders()]
self.assertIn(('x-object-manifest', str_to_wsgi(x_o_m)), resp_headers)
class TestDloUTF8(Base2, TestDlo): class TestDloUTF8(Base2, TestDlo):

View File

@ -22,6 +22,7 @@ from uuid import uuid4
import time import time
from xml.dom import minidom from xml.dom import minidom
import six
from six.moves import range from six.moves import range
from test.functional import check_response, retry, requires_acls, \ from test.functional import check_response, retry, requires_acls, \
@ -105,8 +106,11 @@ class TestObject(unittest2.TestCase):
# delete an object # delete an object
def delete(url, token, parsed, conn, container, obj): def delete(url, token, parsed, conn, container, obj):
path = '/'.join([parsed.path, container, if six.PY2:
obj['name'].encode('utf8')]) obj_name = obj['name'].encode('utf8')
else:
obj_name = obj['name']
path = '/'.join([parsed.path, container, obj_name])
conn.request('DELETE', path, '', {'X-Auth-Token': token}) conn.request('DELETE', path, '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
@ -176,7 +180,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {}) self.assertEqual(metadata(resp), {})
# empty post # empty post
@ -184,7 +188,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 202) self.assertEqual(resp.status, 202)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), {}) self.assertEqual(metadata(resp), {})
@ -197,7 +201,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), { self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'blUe', 'X-Object-Meta-Color': 'blUe',
@ -209,7 +213,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 202) self.assertEqual(resp.status, 202)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), { self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'oraNge' 'X-Object-Meta-Color': 'oraNge'
@ -225,7 +229,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), { self.assertEqual(metadata(resp), {
'X-Object-Meta-Color': 'Red', 'X-Object-Meta-Color': 'Red',
@ -241,7 +245,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 202) self.assertEqual(resp.status, 202)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), { self.assertEqual(metadata(resp), {
'X-Object-Meta-Food': 'Burger', 'X-Object-Meta-Food': 'Burger',
@ -256,7 +260,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), { self.assertEqual(metadata(resp), {
'X-Object-Meta-Foo': 'B\xc3\xa2r', 'X-Object-Meta-Foo': 'B\xc3\xa2r',
@ -269,7 +273,7 @@ class TestObject(unittest2.TestCase):
resp.read() resp.read()
self.assertEqual(resp.status, 202) self.assertEqual(resp.status, 202)
resp = retry(get) resp = retry(get)
self.assertEqual('', resp.read()) self.assertEqual(b'', resp.read())
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(metadata(resp), { self.assertEqual(metadata(resp), {
'X-Object-Meta-Foo': 'B\xc3\xa5z', 'X-Object-Meta-Foo': 'B\xc3\xa5z',
@ -341,7 +345,7 @@ class TestObject(unittest2.TestCase):
'X-Timestamp should be a UNIX timestamp float value', body) 'X-Timestamp should be a UNIX timestamp float value', body)
else: else:
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
self.assertEqual(body, '') self.assertEqual(body, b'')
resp = retry(head) resp = retry(head)
resp.read() resp.read()
self.assertGreater(float(resp.headers['x-timestamp']), ts_before) self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
@ -374,7 +378,7 @@ class TestObject(unittest2.TestCase):
'X-Timestamp should be a UNIX timestamp float value', body) 'X-Timestamp should be a UNIX timestamp float value', body)
else: else:
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
self.assertEqual(body, '') self.assertEqual(body, b'')
resp = retry(head) resp = retry(head)
resp.read() resp.read()
self.assertGreater(float(resp.headers['x-timestamp']), ts_before) self.assertGreater(float(resp.headers['x-timestamp']), ts_before)
@ -470,7 +474,7 @@ class TestObject(unittest2.TestCase):
resp = retry(put) resp = retry(put)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 400) self.assertEqual(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-After') self.assertEqual(body, b'Non-integer X-Delete-After')
def test_non_integer_x_delete_at(self): def test_non_integer_x_delete_at(self):
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
@ -483,7 +487,7 @@ class TestObject(unittest2.TestCase):
resp = retry(put) resp = retry(put)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 400) self.assertEqual(resp.status, 400)
self.assertEqual(body, 'Non-integer X-Delete-At') self.assertEqual(body, b'Non-integer X-Delete-At')
def test_x_delete_at_in_the_past(self): def test_x_delete_at_in_the_past(self):
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
@ -496,7 +500,7 @@ class TestObject(unittest2.TestCase):
resp = retry(put) resp = retry(put)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 400) self.assertEqual(resp.status, 400)
self.assertEqual(body, 'X-Delete-At in past') self.assertEqual(body, b'X-Delete-At in past')
def test_copy_object(self): def test_copy_object(self):
if tf.skip: if tf.skip:
@ -514,7 +518,7 @@ class TestObject(unittest2.TestCase):
resp = retry(get_source) resp = retry(get_source)
source_contents = resp.read() source_contents = resp.read()
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, 'test') self.assertEqual(source_contents, b'test')
# copy source to dest with X-Copy-From # copy source to dest with X-Copy-From
def put(url, token, parsed, conn): def put(url, token, parsed, conn):
@ -605,7 +609,7 @@ class TestObject(unittest2.TestCase):
resp = retry(get_source) resp = retry(get_source)
source_contents = resp.read() source_contents = resp.read()
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(source_contents, 'test') self.assertEqual(source_contents, b'test')
acct = tf.parsed[0].path.split('/', 2)[2] acct = tf.parsed[0].path.split('/', 2)[2]
@ -964,14 +968,16 @@ class TestObject(unittest2.TestCase):
# can list objects # can list objects
resp = retry(get_listing, use_account=3) resp = retry(get_listing, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing) self.assertIn(self.obj, listing.split('\n'))
# can get object # can get object
resp = retry(get, self.obj, use_account=3) resp = retry(get, self.obj, use_account=3)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(body, 'test') self.assertEqual(body, b'test')
# can not put an object # can not put an object
obj_name = str(uuid4()) obj_name = str(uuid4())
@ -987,9 +993,11 @@ class TestObject(unittest2.TestCase):
# sanity with account1 # sanity with account1
resp = retry(get_listing, use_account=3) resp = retry(get_listing, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertNotIn(obj_name, listing) self.assertNotIn(obj_name, listing.split('\n'))
self.assertIn(self.obj, listing) self.assertIn(self.obj, listing.split('\n'))
@requires_acls @requires_acls
def test_read_write(self): def test_read_write(self):
@ -1045,14 +1053,16 @@ class TestObject(unittest2.TestCase):
# can list objects # can list objects
resp = retry(get_listing, use_account=3) resp = retry(get_listing, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing) self.assertIn(self.obj, listing.split('\n'))
# can get object # can get object
resp = retry(get, self.obj, use_account=3) resp = retry(get, self.obj, use_account=3)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(body, 'test') self.assertEqual(body, b'test')
# can put an object # can put an object
obj_name = str(uuid4()) obj_name = str(uuid4())
@ -1068,9 +1078,11 @@ class TestObject(unittest2.TestCase):
# sanity with account1 # sanity with account1
resp = retry(get_listing, use_account=3) resp = retry(get_listing, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(obj_name, listing) self.assertIn(obj_name, listing.split('\n'))
self.assertNotIn(self.obj, listing) self.assertNotIn(self.obj, listing.split('\n'))
@requires_acls @requires_acls
def test_admin(self): def test_admin(self):
@ -1126,14 +1138,16 @@ class TestObject(unittest2.TestCase):
# can list objects # can list objects
resp = retry(get_listing, use_account=3) resp = retry(get_listing, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(self.obj, listing) self.assertIn(self.obj, listing.split('\n'))
# can get object # can get object
resp = retry(get, self.obj, use_account=3) resp = retry(get, self.obj, use_account=3)
body = resp.read() body = resp.read()
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(body, 'test') self.assertEqual(body, b'test')
# can put an object # can put an object
obj_name = str(uuid4()) obj_name = str(uuid4())
@ -1149,17 +1163,19 @@ class TestObject(unittest2.TestCase):
# sanity with account1 # sanity with account1
resp = retry(get_listing, use_account=3) resp = retry(get_listing, use_account=3)
listing = resp.read() listing = resp.read()
if not six.PY2:
listing = listing.decode('utf8')
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertIn(obj_name, listing) self.assertIn(obj_name, listing.split('\n'))
self.assertNotIn(self.obj, listing) self.assertNotIn(self.obj, listing)
def test_manifest(self): def test_manifest(self):
if tf.skip: if tf.skip:
raise SkipTest raise SkipTest
# Data for the object segments # Data for the object segments
segments1 = ['one', 'two', 'three', 'four', 'five'] segments1 = [b'one', b'two', b'three', b'four', b'five']
segments2 = ['six', 'seven', 'eight'] segments2 = [b'six', b'seven', b'eight']
segments3 = ['nine', 'ten', 'eleven'] segments3 = [b'nine', b'ten', b'eleven']
# Upload the first set of segments # Upload the first set of segments
def put(url, token, parsed, conn, objnum): def put(url, token, parsed, conn, objnum):
@ -1190,7 +1206,7 @@ class TestObject(unittest2.TestCase):
parsed.path, self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1)) self.assertEqual(resp.read(), b''.join(segments1))
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('content-type'), 'text/jibberish') self.assertEqual(resp.getheader('content-type'), 'text/jibberish')
@ -1201,7 +1217,7 @@ class TestObject(unittest2.TestCase):
'X-Auth-Token': token, 'Range': 'bytes=3-'}) 'X-Auth-Token': token, 'Range': 'bytes=3-'})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1[1:])) self.assertEqual(resp.read(), b''.join(segments1[1:]))
self.assertEqual(resp.status, 206) self.assertEqual(resp.status, 206)
# Get with a range in the middle of the second segment # Get with a range in the middle of the second segment
@ -1211,7 +1227,7 @@ class TestObject(unittest2.TestCase):
'X-Auth-Token': token, 'Range': 'bytes=5-'}) 'X-Auth-Token': token, 'Range': 'bytes=5-'})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1)[5:]) self.assertEqual(resp.read(), b''.join(segments1)[5:])
self.assertEqual(resp.status, 206) self.assertEqual(resp.status, 206)
# Get with a full start and stop range # Get with a full start and stop range
@ -1221,7 +1237,7 @@ class TestObject(unittest2.TestCase):
'X-Auth-Token': token, 'Range': 'bytes=5-10'}) 'X-Auth-Token': token, 'Range': 'bytes=5-10'})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1)[5:11]) self.assertEqual(resp.read(), b''.join(segments1)[5:11])
self.assertEqual(resp.status, 206) self.assertEqual(resp.status, 206)
# Upload the second set of segments # Upload the second set of segments
@ -1241,7 +1257,7 @@ class TestObject(unittest2.TestCase):
parsed.path, self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments1)) self.assertEqual(resp.read(), b''.join(segments1))
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
# Update the manifest # Update the manifest
@ -1262,7 +1278,7 @@ class TestObject(unittest2.TestCase):
parsed.path, self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments2)) self.assertEqual(resp.read(), b''.join(segments2))
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
if not tf.skip3: if not tf.skip3:
@ -1292,7 +1308,7 @@ class TestObject(unittest2.TestCase):
parsed.path, self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
self.assertEqual(resp.read(), ''.join(segments2)) self.assertEqual(resp.read(), b''.join(segments2))
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
# Create another container for the third set of segments # Create another container for the third set of segments
@ -1335,7 +1351,7 @@ class TestObject(unittest2.TestCase):
parsed.path, self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get) resp = retry(get)
self.assertEqual(resp.read(), ''.join(segments3)) self.assertEqual(resp.read(), b''.join(segments3))
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
if not tf.skip3: if not tf.skip3:
@ -1368,7 +1384,7 @@ class TestObject(unittest2.TestCase):
parsed.path, self.container), '', {'X-Auth-Token': token}) parsed.path, self.container), '', {'X-Auth-Token': token})
return check_response(conn) return check_response(conn)
resp = retry(get, use_account=3) resp = retry(get, use_account=3)
self.assertEqual(resp.read(), ''.join(segments3)) self.assertEqual(resp.read(), b''.join(segments3))
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
# Delete the manifest # Delete the manifest
@ -1480,7 +1496,7 @@ class TestObject(unittest2.TestCase):
if (tf.web_front_end == 'apache2'): if (tf.web_front_end == 'apache2'):
self.assertEqual(resp.status, 404) self.assertEqual(resp.status, 404)
else: else:
self.assertEqual(resp.read(), 'Invalid UTF8 or contains NULL') self.assertEqual(resp.read(), b'Invalid UTF8 or contains NULL')
self.assertEqual(resp.status, 412) self.assertEqual(resp.status, 412)
def test_cors(self): def test_cors(self):
@ -1645,6 +1661,8 @@ class TestObject(unittest2.TestCase):
for c, o, body in validate_requests: for c, o, body in validate_requests:
resp = retry(get_obj, c, o) resp = retry(get_obj, c, o)
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
if not six.PY2:
body = body.encode('utf8')
self.assertEqual(body, resp.read()) self.assertEqual(body, resp.read())
@requires_bulk @requires_bulk

View File

@ -21,6 +21,8 @@ import itertools
import json import json
from copy import deepcopy from copy import deepcopy
import six
import test.functional as tf import test.functional as tf
from test.functional import cluster_info, SkipTest from test.functional import cluster_info, SkipTest
from test.functional.tests import Utils, Base, Base2, BaseEnv from test.functional.tests import Utils, Base, Base2, BaseEnv
@ -35,6 +37,16 @@ def tearDownModule():
tf.teardown_package() tf.teardown_package()
def group_file_contents(file_contents):
# This looks a little funny, but iterating through a byte string on py3
# yields a sequence of ints, not a sequence of single-byte byte strings
# as it did on py2.
byte_iter = (file_contents[i:i + 1] for i in range(len(file_contents)))
return [
(char, sum(1 for _ in grp))
for char, grp in itertools.groupby(byte_iter)]
class TestSloEnv(BaseEnv): class TestSloEnv(BaseEnv):
slo_enabled = None # tri-state: None initially, then True/False slo_enabled = None # tri-state: None initially, then True/False
@ -250,33 +262,36 @@ class TestSlo(Base):
(self.env.slo_enabled,)) (self.env.slo_enabled,))
manifest_abcde_hash = hashlib.md5() manifest_abcde_hash = hashlib.md5()
manifest_abcde_hash.update(hashlib.md5('a' * 1024 * 1024).hexdigest()) for letter in (b'a', b'b', b'c', b'd'):
manifest_abcde_hash.update(hashlib.md5('b' * 1024 * 1024).hexdigest()) manifest_abcde_hash.update(hashlib.md5(
manifest_abcde_hash.update(hashlib.md5('c' * 1024 * 1024).hexdigest()) letter * 1024 * 1024).hexdigest().encode('ascii'))
manifest_abcde_hash.update(hashlib.md5('d' * 1024 * 1024).hexdigest()) manifest_abcde_hash.update(hashlib.md5(
manifest_abcde_hash.update(hashlib.md5('e').hexdigest()) b'e').hexdigest().encode('ascii'))
self.manifest_abcde_etag = manifest_abcde_hash.hexdigest() self.manifest_abcde_etag = manifest_abcde_hash.hexdigest()
def test_slo_get_simple_manifest(self): def test_slo_get_simple_manifest(self):
file_item = self.env.container.file('manifest-abcde') file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual(file_item.conn.response.status, 200) self.assertEqual(file_item.conn.response.status, 200)
headers = dict(file_item.conn.response.getheaders()) headers = dict(
(h.lower(), v)
for h, v in file_item.conn.response.getheaders())
self.assertIn('etag', headers) self.assertIn('etag', headers)
self.assertEqual(headers['etag'], '"%s"' % self.manifest_abcde_etag) self.assertEqual(headers['etag'], '"%s"' % self.manifest_abcde_etag)
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents)) self.assertEqual([
self.assertEqual('a', file_contents[0]) (b'a', 1024 * 1024),
self.assertEqual('a', file_contents[1024 * 1024 - 1]) (b'b', 1024 * 1024),
self.assertEqual('b', file_contents[1024 * 1024]) (b'c', 1024 * 1024),
self.assertEqual('d', file_contents[-2]) (b'd', 1024 * 1024),
self.assertEqual('e', file_contents[-1]) (b'e', 1),
], group_file_contents(file_contents))
def test_slo_container_listing(self): def test_slo_container_listing(self):
# the listing object size should equal the sum of the size of the # the listing object size should equal the sum of the size of the
# segments, not the size of the manifest body # segments, not the size of the manifest body
file_item = self.env.container.file(Utils.create_name()) file_item = self.env.container.file(Utils.create_name())
file_item.write( file_item.write(
json.dumps([self.env.seg_info['seg_a']]), json.dumps([self.env.seg_info['seg_a']]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
# The container listing exposes BOTH the MD5 of the manifest content # The container listing exposes BOTH the MD5 of the manifest content
# and the SLO MD5-of-MD5s by splitting the latter out into a separate # and the SLO MD5-of-MD5s by splitting the latter out into a separate
@ -346,93 +361,85 @@ class TestSlo(Base):
file_item = self.env.container.file('manifest-abcde-submanifest') file_item = self.env.container.file('manifest-abcde-submanifest')
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents)) self.assertEqual(4 * 1024 * 1024 + 1, len(file_contents))
self.assertEqual('a', file_contents[0]) self.assertEqual([
self.assertEqual('a', file_contents[1024 * 1024 - 1]) (b'a', 1024 * 1024),
self.assertEqual('b', file_contents[1024 * 1024]) (b'b', 1024 * 1024),
self.assertEqual('d', file_contents[-2]) (b'c', 1024 * 1024),
self.assertEqual('e', file_contents[-1]) (b'd', 1024 * 1024),
(b'e', 1),
], group_file_contents(file_item.read()))
def test_slo_get_ranged_manifest(self): def test_slo_get_ranged_manifest(self):
file_item = self.env.container.file('ranged-manifest') file_item = self.env.container.file('ranged-manifest')
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(file_item.read())]
self.assertEqual([ self.assertEqual([
('c', 1), (b'c', 1),
('d', 1024 * 1024), (b'd', 1024 * 1024),
('e', 1), (b'e', 1),
('a', 512 * 1024), (b'a', 512 * 1024),
('b', 512 * 1024), (b'b', 512 * 1024),
('c', 1), (b'c', 1),
('d', 1)], grouped_file_contents) (b'd', 1),
], group_file_contents(file_item.read()))
def test_slo_get_ranged_manifest_repeated_segment(self): def test_slo_get_ranged_manifest_repeated_segment(self):
file_item = self.env.container.file('ranged-manifest-repeated-segment') file_item = self.env.container.file('ranged-manifest-repeated-segment')
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(file_item.read())]
self.assertEqual( self.assertEqual(
[('a', 2097152), ('b', 1048576)], [(b'a', 2097152), (b'b', 1048576)],
grouped_file_contents) group_file_contents(file_item.read()))
def test_slo_get_ranged_submanifest(self): def test_slo_get_ranged_submanifest(self):
file_item = self.env.container.file('ranged-submanifest') file_item = self.env.container.file('ranged-submanifest')
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(file_item.read())]
self.assertEqual([ self.assertEqual([
('c', 1024 * 1024 + 1), (b'c', 1024 * 1024 + 1),
('d', 1024 * 1024), (b'd', 1024 * 1024),
('e', 1), (b'e', 1),
('a', 512 * 1024), (b'a', 512 * 1024),
('b', 512 * 1024), (b'b', 512 * 1024),
('c', 1), (b'c', 1),
('d', 512 * 1024 + 1), (b'd', 512 * 1024 + 1),
('e', 1), (b'e', 1),
('a', 512 * 1024), (b'a', 512 * 1024),
('b', 1), (b'b', 1),
('c', 1), (b'c', 1),
('d', 1)], grouped_file_contents) (b'd', 1),
], group_file_contents(file_item.read()))
def test_slo_ranged_get(self): def test_slo_ranged_get(self):
file_item = self.env.container.file('manifest-abcde') file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read(size=1024 * 1024 + 2, file_contents = file_item.read(size=1024 * 1024 + 2,
offset=1024 * 1024 - 1) offset=1024 * 1024 - 1)
self.assertEqual(file_item.conn.response.status, 206) self.assertEqual(file_item.conn.response.status, 206)
headers = dict(file_item.conn.response.getheaders()) headers = dict(
(h.lower(), v)
for h, v in file_item.conn.response.getheaders())
self.assertIn('etag', headers) self.assertIn('etag', headers)
self.assertEqual(headers['etag'], '"%s"' % self.manifest_abcde_etag) self.assertEqual(headers['etag'], '"%s"' % self.manifest_abcde_etag)
self.assertEqual('a', file_contents[0]) self.assertEqual([
self.assertEqual('b', file_contents[1]) (b'a', 1),
self.assertEqual('b', file_contents[-2]) (b'b', 1048576),
self.assertEqual('c', file_contents[-1]) (b'c', 1),
], group_file_contents(file_contents))
def test_slo_ranged_get_half_open_on_right(self): def test_slo_ranged_get_half_open_on_right(self):
file_item = self.env.container.file('manifest-abcde') file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read( file_contents = file_item.read(
hdrs={"Range": "bytes=1048571-"}) hdrs={"Range": "bytes=1048571-"})
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(file_contents)]
self.assertEqual([ self.assertEqual([
('a', 5), (b'a', 5),
('b', 1048576), (b'b', 1048576),
('c', 1048576), (b'c', 1048576),
('d', 1048576), (b'd', 1048576),
('e', 1) (b'e', 1)
], grouped_file_contents) ], group_file_contents(file_contents))
def test_slo_ranged_get_half_open_on_left(self): def test_slo_ranged_get_half_open_on_left(self):
file_item = self.env.container.file('manifest-abcde') file_item = self.env.container.file('manifest-abcde')
file_contents = file_item.read( file_contents = file_item.read(
hdrs={"Range": "bytes=-123456"}) hdrs={"Range": "bytes=-123456"})
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(file_contents)]
self.assertEqual([ self.assertEqual([
('d', 123455), (b'd', 123455),
('e', 1), (b'e', 1),
], grouped_file_contents) ], group_file_contents(file_contents))
def test_slo_multi_ranged_get(self): def test_slo_multi_ranged_get(self):
file_item = self.env.container.file('manifest-abcde') file_item = self.env.container.file('manifest-abcde')
@ -440,8 +447,12 @@ class TestSlo(Base):
hdrs={"Range": "bytes=1048571-1048580,2097147-2097156"}) hdrs={"Range": "bytes=1048571-1048580,2097147-2097156"})
# See testMultiRangeGets for explanation # See testMultiRangeGets for explanation
if six.PY2:
parser = email.parser.FeedParser() parser = email.parser.FeedParser()
parser.feed("Content-Type: %s\r\n\r\n" % file_item.content_type) else:
parser = email.parser.BytesFeedParser()
parser.feed((
"Content-Type: %s\r\n\r\n" % file_item.content_type).encode())
parser.feed(file_contents) parser.feed(file_contents)
root_message = parser.close() root_message = parser.close()
@ -454,22 +465,23 @@ class TestSlo(Base):
"application/octet-stream") "application/octet-stream")
self.assertEqual( self.assertEqual(
byteranges[0]['Content-Range'], "bytes 1048571-1048580/4194305") byteranges[0]['Content-Range'], "bytes 1048571-1048580/4194305")
self.assertEqual(byteranges[0].get_payload(), "aaaaabbbbb") self.assertEqual(byteranges[0].get_payload(decode=True), b"aaaaabbbbb")
self.assertEqual(byteranges[1]['Content-Type'], self.assertEqual(byteranges[1]['Content-Type'],
"application/octet-stream") "application/octet-stream")
self.assertEqual( self.assertEqual(
byteranges[1]['Content-Range'], "bytes 2097147-2097156/4194305") byteranges[1]['Content-Range'], "bytes 2097147-2097156/4194305")
self.assertEqual(byteranges[1].get_payload(), "bbbbbccccc") self.assertEqual(byteranges[1].get_payload(decode=True), b"bbbbbccccc")
def test_slo_ranged_submanifest(self): def test_slo_ranged_submanifest(self):
file_item = self.env.container.file('manifest-abcde-submanifest') file_item = self.env.container.file('manifest-abcde-submanifest')
file_contents = file_item.read(size=1024 * 1024 + 2, file_contents = file_item.read(size=1024 * 1024 + 2,
offset=1024 * 1024 * 2 - 1) offset=1024 * 1024 * 2 - 1)
self.assertEqual('b', file_contents[0]) self.assertEqual([
self.assertEqual('c', file_contents[1]) (b'b', 1),
self.assertEqual('c', file_contents[-2]) (b'c', 1024 * 1024),
self.assertEqual('d', file_contents[-1]) (b'd', 1),
], group_file_contents(file_contents))
def test_slo_etag_is_quote_wrapped_hash_of_etags(self): def test_slo_etag_is_quote_wrapped_hash_of_etags(self):
# we have this check in test_slo_get_simple_manifest, too, # we have this check in test_slo_get_simple_manifest, too,
@ -481,16 +493,17 @@ class TestSlo(Base):
def test_slo_etag_is_quote_wrapped_hash_of_etags_submanifests(self): def test_slo_etag_is_quote_wrapped_hash_of_etags_submanifests(self):
def hd(x): def hd(x):
return hashlib.md5(x).hexdigest() return hashlib.md5(x).hexdigest().encode('ascii')
expected_etag = hd(hd('a' * 1024 * 1024) + expected_etag = hd(hd(b'a' * 1024 * 1024) +
hd(hd('b' * 1024 * 1024) + hd(hd(b'b' * 1024 * 1024) +
hd(hd('c' * 1024 * 1024) + hd(hd(b'c' * 1024 * 1024) +
hd('d' * 1024 * 1024))) + hd(b'd' * 1024 * 1024))) +
hd('e')) hd(b'e'))
file_item = self.env.container.file('manifest-abcde-submanifest') file_item = self.env.container.file('manifest-abcde-submanifest')
self.assertEqual('"%s"' % expected_etag, file_item.info()['etag']) self.assertEqual('"%s"' % expected_etag.decode('ascii'),
file_item.info()['etag'])
def test_slo_etag_mismatch(self): def test_slo_etag_mismatch(self):
file_item = self.env.container.file("manifest-a-bad-etag") file_item = self.env.container.file("manifest-a-bad-etag")
@ -499,7 +512,8 @@ class TestSlo(Base):
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'etag': 'not it', 'etag': 'not it',
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
except ResponseError as err: except ResponseError as err:
self.assertEqual(400, err.status) self.assertEqual(400, err.status)
@ -512,8 +526,9 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024 - 1, 'size_bytes': 1024 * 1024 - 1,
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
except ResponseError as err: except ResponseError as err:
self.assertEqual(400, err.status) self.assertEqual(400, err.status)
@ -526,8 +541,9 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}, parms={'multipart-manifest': 'put'},
hdrs={'Etag': 'NOTetagofthesegments'}) hdrs={'Etag': 'NOTetagofthesegments'})
except ResponseError as err: except ResponseError as err:
@ -535,8 +551,8 @@ class TestSlo(Base):
def test_slo_client_etag(self): def test_slo_client_etag(self):
file_item = self.env.container.file("manifest-a-b-etag") file_item = self.env.container.file("manifest-a-b-etag")
etag_a = hashlib.md5('a' * 1024 * 1024).hexdigest() etag_a = hashlib.md5(b'a' * 1024 * 1024).hexdigest()
etag_b = hashlib.md5('b' * 1024 * 1024).hexdigest() etag_b = hashlib.md5(b'b' * 1024 * 1024).hexdigest()
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
@ -544,9 +560,10 @@ class TestSlo(Base):
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}, { 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}, {
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'etag': etag_b, 'etag': etag_b,
'path': '/%s/%s' % (self.env.container.name, 'seg_b')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_b'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}, parms={'multipart-manifest': 'put'},
hdrs={'Etag': hashlib.md5(etag_a + etag_b).hexdigest()}) hdrs={'Etag': hashlib.md5((etag_a + etag_b).encode()).hexdigest()})
self.assert_status(201) self.assert_status(201)
def test_slo_unspecified_etag(self): def test_slo_unspecified_etag(self):
@ -555,7 +572,8 @@ class TestSlo(Base):
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'etag': None, 'etag': None,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assert_status(201) self.assert_status(201)
@ -564,8 +582,9 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': None, 'size_bytes': None,
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assert_status(201) self.assert_status(201)
@ -574,18 +593,20 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'path': '/%s/%s' % (self.env.container.name, 'path': '/%s/%s' % (self.env.container.name,
'seg_with_%ff_funky_name')}]), 'seg_with_%ff_funky_name'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assert_status(201) self.assert_status(201)
self.assertEqual('z' * 10, file_item.read()) self.assertEqual(b'z' * 10, file_item.read())
def test_slo_missing_etag(self): def test_slo_missing_etag(self):
file_item = self.env.container.file("manifest-a-missing-etag") file_item = self.env.container.file("manifest-a-missing-etag")
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assert_status(201) self.assert_status(201)
@ -593,8 +614,9 @@ class TestSlo(Base):
file_item = self.env.container.file("manifest-a-missing-size") file_item = self.env.container.file("manifest-a-missing-size")
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assert_status(201) self.assert_status(201)
@ -602,7 +624,8 @@ class TestSlo(Base):
file_item = self.env.container.file("manifest-a-path-only") file_item = self.env.container.file("manifest-a-path-only")
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assert_status(201) self.assert_status(201)
@ -611,9 +634,10 @@ class TestSlo(Base):
try: try:
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'teag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'teag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
except ResponseError as err: except ResponseError as err:
self.assertEqual(400, err.status) self.assertEqual(400, err.status)
@ -625,9 +649,10 @@ class TestSlo(Base):
try: try:
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'siz_bytes': 1024 * 1024, 'siz_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
except ResponseError as err: except ResponseError as err:
self.assertEqual(400, err.status) self.assertEqual(400, err.status)
@ -640,14 +665,15 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([ json.dumps([
{'size_bytes': 1024 * 1024, {'size_bytes': 1024 * 1024,
'etag': hashlib.md5('a' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}, 'path': '/%s/%s' % (self.env.container.name, 'seg_a')},
{'size_bytes': 1024 * 1024, {'size_bytes': 1024 * 1024,
'etag': hashlib.md5('b' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'b' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_b')}, 'path': '/%s/%s' % (self.env.container.name, 'seg_b')},
{'size_bytes': 1024 * 1024, {'size_bytes': 1024 * 1024,
'etag': hashlib.md5('c' * 1024 * 1024).hexdigest(), 'etag': hashlib.md5(b'c' * 1024 * 1024).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_c')}]), 'path': '/%s/%s' % (self.env.container.name, 'seg_c')},
]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
self.assertEqual(400, catcher.exception.status) self.assertEqual(400, catcher.exception.status)
@ -752,8 +778,8 @@ class TestSlo(Base):
except ValueError: except ValueError:
self.fail("COPY didn't copy the manifest (invalid json on GET)") self.fail("COPY didn't copy the manifest (invalid json on GET)")
self.assertEqual(source_contents, copied_contents) self.assertEqual(source_contents, copied_contents)
self.assertEqual(copied_json[0], self.assertEqual(copied_json[0], {
{'data': base64.b64encode('APRE' * 8)}) 'data': base64.b64encode(b'APRE' * 8).decode('ascii')})
def test_slo_copy_the_manifest_updating_metadata(self): def test_slo_copy_the_manifest_updating_metadata(self):
source = self.env.container.file("manifest-abcde") source = self.env.container.file("manifest-abcde")
@ -847,7 +873,7 @@ class TestSlo(Base):
parms={'multipart-manifest': 'get'}) parms={'multipart-manifest': 'get'})
self.assertEqual(400, file_item.conn.response.status) self.assertEqual(400, file_item.conn.response.status)
resp_body = file_item.conn.response.read() resp_body = file_item.conn.response.read()
self.assertEqual(5, resp_body.count('403 Forbidden'), self.assertEqual(5, resp_body.count(b'403 Forbidden'),
'Unexpected response body %r' % resp_body) 'Unexpected response body %r' % resp_body)
# create segments container in account2 with read access for account1 # create segments container in account2 with read access for account1
@ -863,7 +889,7 @@ class TestSlo(Base):
parms={'multipart-manifest': 'get'}) parms={'multipart-manifest': 'get'})
self.assertEqual(400, file_item.conn.response.status) self.assertEqual(400, file_item.conn.response.status)
resp_body = file_item.conn.response.read() resp_body = file_item.conn.response.read()
self.assertEqual(5, resp_body.count('404 Not Found'), self.assertEqual(5, resp_body.count(b'404 Not Found'),
'Unexpected response body %r' % resp_body) 'Unexpected response body %r' % resp_body)
# create segments in account2 container with same name as in account1, # create segments in account2 container with same name as in account1,
@ -895,23 +921,24 @@ class TestSlo(Base):
if include_error: if include_error:
manifest_data.append({'path': 'non-existent/segment'}) manifest_data.append({'path': 'non-existent/segment'})
resp = file_item.write( resp = file_item.write(
json.dumps(manifest_data), json.dumps(manifest_data).encode('ascii'),
parms={'multipart-manifest': 'put', 'heartbeat': 'on'}, parms={'multipart-manifest': 'put', 'heartbeat': 'on'},
hdrs=headers, return_resp=True) hdrs=headers, return_resp=True)
self.assertEqual(resp.status, 202) self.assertEqual(resp.status, 202)
self.assertTrue(resp.chunked) self.assertTrue(resp.chunked)
body_lines = resp.body.split('\n', 2) body_lines = resp.body.split(b'\n', 2)
self.assertFalse(body_lines[0].strip()) # all whitespace self.assertFalse(body_lines[0].strip()) # all whitespace
self.assertEqual('\r', body_lines[1]) self.assertEqual(b'\r', body_lines[1])
return body_lines[2] return body_lines[2]
body_lines = do_put().split('\n') body_lines = do_put().decode('utf8').split('\n')
self.assertIn('Response Status: 201 Created', body_lines) self.assertIn('Response Status: 201 Created', body_lines)
self.assertIn('Etag', [line.split(':', 1)[0] for line in body_lines]) self.assertIn('Etag', [line.split(':', 1)[0] for line in body_lines])
self.assertIn('Last Modified', [line.split(':', 1)[0] self.assertIn('Last Modified', [line.split(':', 1)[0]
for line in body_lines]) for line in body_lines])
body_lines = do_put({'Accept': 'text/plain'}).split('\n') body_lines = do_put(
{'Accept': 'text/plain'}).decode('utf8').split('\n')
self.assertIn('Response Status: 201 Created', body_lines) self.assertIn('Response Status: 201 Created', body_lines)
self.assertIn('Etag', [line.split(':', 1)[0] for line in body_lines]) self.assertIn('Etag', [line.split(':', 1)[0] for line in body_lines])
self.assertIn('Last Modified', [line.split(':', 1)[0] self.assertIn('Last Modified', [line.split(':', 1)[0]
@ -932,7 +959,7 @@ class TestSlo(Base):
'Errors': [], 'Errors': [],
}) })
body_lines = do_put(include_error=True).split('\n') body_lines = do_put(include_error=True).decode('utf8').split('\n')
self.assertIn('Response Status: 400 Bad Request', body_lines) self.assertIn('Response Status: 400 Bad Request', body_lines)
self.assertIn('Response Body: Bad Request', body_lines) self.assertIn('Response Body: Bad Request', body_lines)
self.assertNotIn('Etag', [line.split(':', 1)[0] self.assertNotIn('Etag', [line.split(':', 1)[0]
@ -982,7 +1009,7 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([seg_info['seg_a'], seg_info['seg_b'], json.dumps([seg_info['seg_a'], seg_info['seg_b'],
seg_info['seg_c'], seg_info['seg_d'], seg_info['seg_c'], seg_info['seg_d'],
seg_info['seg_e']]), seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
return file_item return file_item
@ -1053,15 +1080,19 @@ class TestSlo(Base):
self.assertEqual(len(value), 2) self.assertEqual(len(value), 2)
self.assertEqual(value[0]['bytes'], 1024 * 1024) self.assertEqual(value[0]['bytes'], 1024 * 1024)
self.assertEqual(value[0]['hash'], self.assertEqual(value[0]['hash'],
hashlib.md5('d' * 1024 * 1024).hexdigest()) hashlib.md5(b'd' * 1024 * 1024).hexdigest())
self.assertEqual(value[0]['name'], expected_name = '/%s/seg_d' % self.env.container.name
'/%s/seg_d' % self.env.container.name.decode("utf-8")) if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[0]['name'], expected_name)
self.assertEqual(value[1]['bytes'], 1024 * 1024) self.assertEqual(value[1]['bytes'], 1024 * 1024)
self.assertEqual(value[1]['hash'], self.assertEqual(value[1]['hash'],
hashlib.md5('b' * 1024 * 1024).hexdigest()) hashlib.md5(b'b' * 1024 * 1024).hexdigest())
self.assertEqual(value[1]['name'], expected_name = '/%s/seg_b' % self.env.container.name
'/%s/seg_b' % self.env.container.name.decode("utf-8")) if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[1]['name'], expected_name)
def test_slo_get_raw_the_manifest_with_details_from_server(self): def test_slo_get_raw_the_manifest_with_details_from_server(self):
manifest = self.env.container.file("manifest-db") manifest = self.env.container.file("manifest-db")
@ -1081,14 +1112,18 @@ class TestSlo(Base):
self.assertEqual(len(value), 2) self.assertEqual(len(value), 2)
self.assertEqual(value[0]['size_bytes'], 1024 * 1024) self.assertEqual(value[0]['size_bytes'], 1024 * 1024)
self.assertEqual(value[0]['etag'], self.assertEqual(value[0]['etag'],
hashlib.md5('d' * 1024 * 1024).hexdigest()) hashlib.md5(b'd' * 1024 * 1024).hexdigest())
self.assertEqual(value[0]['path'], expected_name = '/%s/seg_d' % self.env.container.name
'/%s/seg_d' % self.env.container.name.decode("utf-8")) if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[0]['path'], expected_name)
self.assertEqual(value[1]['size_bytes'], 1024 * 1024) self.assertEqual(value[1]['size_bytes'], 1024 * 1024)
self.assertEqual(value[1]['etag'], self.assertEqual(value[1]['etag'],
hashlib.md5('b' * 1024 * 1024).hexdigest()) hashlib.md5(b'b' * 1024 * 1024).hexdigest())
self.assertEqual(value[1]['path'], expected_name = '/%s/seg_b' % self.env.container.name
'/%s/seg_b' % self.env.container.name.decode("utf-8")) if six.PY2:
expected_name = expected_name.decode("utf-8")
self.assertEqual(value[1]['path'], expected_name)
file_item = self.env.container.file("manifest-from-get-raw") file_item = self.env.container.file("manifest-from-get-raw")
file_item.write(got_body, parms={'multipart-manifest': 'put'}) file_item.write(got_body, parms={'multipart-manifest': 'put'})
@ -1121,16 +1156,19 @@ class TestSlo(Base):
'etag': None, 'etag': None,
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}]) 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}])
self.assertRaises(ResponseError, file_item.write, manifest, self.assertRaises(ResponseError, file_item.write,
manifest.encode('ascii'),
parms={'multipart-manifest': 'put'}, parms={'multipart-manifest': 'put'},
hdrs={'If-None-Match': '"not-star"'}) hdrs={'If-None-Match': '"not-star"'})
self.assert_status(400) self.assert_status(400)
file_item.write(manifest, parms={'multipart-manifest': 'put'}, file_item.write(manifest.encode('ascii'),
parms={'multipart-manifest': 'put'},
hdrs={'If-None-Match': '*'}) hdrs={'If-None-Match': '*'})
self.assert_status(201) self.assert_status(201)
self.assertRaises(ResponseError, file_item.write, manifest, self.assertRaises(ResponseError, file_item.write,
manifest.encode('ascii'),
parms={'multipart-manifest': 'put'}, parms={'multipart-manifest': 'put'},
hdrs={'If-None-Match': '*'}) hdrs={'If-None-Match': '*'})
self.assert_status(412) self.assert_status(412)
@ -1191,11 +1229,11 @@ class TestSlo(Base):
self.env.container.update_metadata(referer_metadata) self.env.container.update_metadata(referer_metadata)
contents = slo_file.read(hdrs=headers) contents = slo_file.read(hdrs=headers)
self.assertEqual(4 * 1024 * 1024 + 1, len(contents)) self.assertEqual(4 * 1024 * 1024 + 1, len(contents))
self.assertEqual('a', contents[0]) self.assertEqual(b'a', contents[:1])
self.assertEqual('a', contents[1024 * 1024 - 1]) self.assertEqual(b'a', contents[1024 * 1024 - 1:1024 * 1024])
self.assertEqual('b', contents[1024 * 1024]) self.assertEqual(b'b', contents[1024 * 1024:1024 * 1024 + 1])
self.assertEqual('d', contents[-2]) self.assertEqual(b'd', contents[-2:-1])
self.assertEqual('e', contents[-1]) self.assertEqual(b'e', contents[-1:])
def test_slo_data_segments(self): def test_slo_data_segments(self):
# len('APRE' * 8) == 32 # len('APRE' * 8) == 32
@ -1212,40 +1250,37 @@ class TestSlo(Base):
file_item = self.env.container.file(file_name) file_item = self.env.container.file(file_name)
file_contents = file_item.read(size=3 * 1024 ** 2 + 456, file_contents = file_item.read(size=3 * 1024 ** 2 + 456,
offset=28) offset=28)
grouped_file_contents = [
(char, sum(1 for _char in grp))
for char, grp in itertools.groupby(file_contents)]
self.assertEqual([ self.assertEqual([
('A', 1), (b'A', 1),
('P', 1), (b'P', 1),
('R', 1), (b'R', 1),
('E', 1), (b'E', 1),
('a', 1024 * 1024), (b'a', 1024 * 1024),
] + [ ] + [
('A', 1), (b'A', 1),
('P', 1), (b'P', 1),
('O', 1), (b'O', 1),
('S', 1), (b'S', 1),
] * 16 + [ ] * 16 + [
('b', 1024 * 1024), (b'b', 1024 * 1024),
] + [ ] + [
('B', 1), (b'B', 1),
('P', 1), (b'P', 1),
('O', 1), (b'O', 1),
('S', 1), (b'S', 1),
] * 32 + [ ] * 32 + [
('C', 1), (b'C', 1),
('P', 1), (b'P', 1),
('R', 1), (b'R', 1),
('E', 1), (b'E', 1),
] * 64 + [ ] * 64 + [
('c', 1024 * 1024), (b'c', 1024 * 1024),
] + [ ] + [
('C', 1), (b'C', 1),
('P', 1), (b'P', 1),
('O', 1), (b'O', 1),
('S', 1), (b'S', 1),
], grouped_file_contents) ], group_file_contents(file_contents))
class TestSloUTF8(Base2, TestSlo): class TestSloUTF8(Base2, TestSlo):

View File

@ -73,8 +73,10 @@ class TestSymlinkEnv(BaseEnv):
return (cls.link_cont, cls.tgt_cont) return (cls.link_cont, cls.tgt_cont)
@classmethod @classmethod
def target_content_location(cls): def target_content_location(cls, override_obj=None, override_account=None):
return '%s/%s' % (cls.tgt_cont, cls.tgt_obj) account = override_account or tf.parsed[0].path.split('/', 2)[2]
return '/v1/%s/%s/%s' % (account, cls.tgt_cont,
override_obj or cls.tgt_obj)
@classmethod @classmethod
def _make_request(cls, url, token, parsed, conn, method, def _make_request(cls, url, token, parsed, conn, method,
@ -102,20 +104,21 @@ class TestSymlinkEnv(BaseEnv):
return name return name
@classmethod @classmethod
def _create_tgt_object(cls): def _create_tgt_object(cls, body=TARGET_BODY):
resp = retry(cls._make_request, method='PUT', resp = retry(cls._make_request, method='PUT',
headers={'Content-Type': 'application/target'},
container=cls.tgt_cont, obj=cls.tgt_obj, container=cls.tgt_cont, obj=cls.tgt_obj,
body=TARGET_BODY) body=body)
if resp.status != 201: if resp.status != 201:
raise ResponseError(resp) raise ResponseError(resp)
# sanity: successful put response has content-length 0 # sanity: successful put response has content-length 0
cls.tgt_length = str(len(TARGET_BODY)) cls.tgt_length = str(len(body))
cls.tgt_etag = resp.getheader('etag') cls.tgt_etag = resp.getheader('etag')
resp = retry(cls._make_request, method='GET', resp = retry(cls._make_request, method='GET',
container=cls.tgt_cont, obj=cls.tgt_obj) container=cls.tgt_cont, obj=cls.tgt_obj)
if resp.status != 200 and resp.content != TARGET_BODY: if resp.status != 200 and resp.content != body:
raise ResponseError(resp) raise ResponseError(resp)
@classmethod @classmethod
@ -176,10 +179,17 @@ class TestSymlink(Base):
yield uuid4().hex yield uuid4().hex
self.obj_name_gen = object_name_generator() self.obj_name_gen = object_name_generator()
self._account_name = None
def tearDown(self): def tearDown(self):
self.env.tearDown() self.env.tearDown()
@property
def account_name(self):
if not self._account_name:
self._account_name = tf.parsed[0].path.split('/', 2)[2]
return self._account_name
def _make_request(self, url, token, parsed, conn, method, def _make_request(self, url, token, parsed, conn, method,
container, obj='', headers=None, body=b'', container, obj='', headers=None, body=b'',
query_args=None, allow_redirects=True): query_args=None, allow_redirects=True):
@ -210,21 +220,29 @@ class TestSymlink(Base):
headers=headers) headers=headers)
self.assertEqual(resp.status, 201) self.assertEqual(resp.status, 201)
def _test_put_symlink_with_etag(self, link_cont, link_obj, tgt_cont,
tgt_obj, etag, headers=None):
headers = headers or {}
headers.update({'X-Symlink-Target': '%s/%s' % (tgt_cont, tgt_obj),
'X-Symlink-Target-Etag': etag})
resp = retry(self._make_request, method='PUT',
container=link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 201, resp.content)
def _test_get_as_target_object( def _test_get_as_target_object(
self, link_cont, link_obj, expected_content_location, self, link_cont, link_obj, expected_content_location,
use_account=1): use_account=1):
resp = retry( resp = retry(
self._make_request, method='GET', self._make_request, method='GET',
container=link_cont, obj=link_obj, use_account=use_account) container=link_cont, obj=link_obj, use_account=use_account)
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200, resp.content)
self.assertEqual(resp.content, TARGET_BODY) self.assertEqual(resp.content, TARGET_BODY)
self.assertEqual(resp.getheader('content-length'), self.assertEqual(resp.getheader('content-length'),
str(self.env.tgt_length)) str(self.env.tgt_length))
self.assertEqual(resp.getheader('etag'), self.env.tgt_etag) self.assertEqual(resp.getheader('etag'), self.env.tgt_etag)
self.assertIn('Content-Location', resp.headers) self.assertIn('Content-Location', resp.headers)
# TODO: content-location is a full path so it's better to assert self.assertEqual(expected_content_location,
# with the value, instead of assertIn
self.assertIn(expected_content_location,
resp.getheader('content-location')) resp.getheader('content-location'))
return resp return resp
@ -299,8 +317,8 @@ class TestSymlink(Base):
# and it's normalized # and it's normalized
self._assertSymlink( self._assertSymlink(
self.env.link_cont, link_obj, self.env.link_cont, link_obj,
expected_content_location='%s/%s' % ( expected_content_location=self.env.target_content_location(
self.env.tgt_cont, normalized_quoted_obj)) normalized_quoted_obj))
# create a symlink using the normalized target path # create a symlink using the normalized target path
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj, self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
@ -309,8 +327,8 @@ class TestSymlink(Base):
# and it's ALSO normalized # and it's ALSO normalized
self._assertSymlink( self._assertSymlink(
self.env.link_cont, link_obj, self.env.link_cont, link_obj,
expected_content_location='%s/%s' % ( expected_content_location=self.env.target_content_location(
self.env.tgt_cont, normalized_quoted_obj)) normalized_quoted_obj))
def test_symlink_put_head_get(self): def test_symlink_put_head_get(self):
link_obj = uuid4().hex link_obj = uuid4().hex
@ -322,6 +340,195 @@ class TestSymlink(Base):
self._assertSymlink(self.env.link_cont, link_obj) self._assertSymlink(self.env.link_cont, link_obj)
def test_symlink_with_etag_put_head_get(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
self._assertSymlink(self.env.link_cont, link_obj)
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': self.env.tgt_etag})
self.assertEqual(resp.status, 200)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': 'not-the-etag'})
self.assertEqual(resp.status, 412)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
def test_static_symlink_with_bad_etag_put_head_get(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
# overwrite tgt object
self.env._create_tgt_object(body='updated target body')
resp = retry(
self._make_request, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 409)
# but we still know where it points
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
# uses a mechanism entirely divorced from if-match
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': self.env.tgt_etag})
self.assertEqual(resp.status, 409)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj,
headers={'If-Match': 'not-the-etag'})
self.assertEqual(resp.status, 409)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
resp = retry(
self._make_request, method='DELETE',
container=self.env.tgt_cont, obj=self.env.tgt_obj)
# not-found-ness trumps if-match-ness
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 404)
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
def test_dynamic_link_to_static_link(self):
static_link_obj = uuid4().hex
# PUT static_link to tgt_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=static_link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
symlink_obj = uuid4().hex
# PUT symlink to static_link
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=symlink_obj,
tgt_cont=self.env.link_cont,
tgt_obj=static_link_obj)
self._test_get_as_target_object(
link_cont=self.env.link_cont, link_obj=symlink_obj,
expected_content_location=self.env.target_content_location())
def test_static_link_to_dynamic_link(self):
symlink_obj = uuid4().hex
# PUT symlink to tgt_obj
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=symlink_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
static_link_obj = uuid4().hex
# PUT a static_link to the symlink
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=static_link_obj,
tgt_cont=self.env.link_cont,
tgt_obj=symlink_obj,
etag=MD5_OF_EMPTY_STRING)
self._test_get_as_target_object(
link_cont=self.env.link_cont, link_obj=static_link_obj,
expected_content_location=self.env.target_content_location())
def test_static_link_to_nowhere(self):
missing_obj = uuid4().hex
static_link_obj = uuid4().hex
# PUT a static_link to the missing name
headers = {
'X-Symlink-Target': '%s/%s' % (self.env.link_cont, missing_obj),
'X-Symlink-Target-Etag': MD5_OF_EMPTY_STRING}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=static_link_obj,
headers=headers)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content, b'X-Symlink-Target does not exist')
def test_static_link_to_broken_symlink(self):
symlink_obj = uuid4().hex
# PUT symlink to tgt_obj
self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=symlink_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj)
static_link_obj = uuid4().hex
# PUT a static_link to the symlink
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=static_link_obj,
tgt_cont=self.env.link_cont,
tgt_obj=symlink_obj,
etag=MD5_OF_EMPTY_STRING)
# break the symlink
resp = retry(
self._make_request, method='DELETE',
container=self.env.tgt_cont, obj=self.env.tgt_obj)
self.assertEqual(resp.status // 100, 2)
# sanity
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=symlink_obj)
self.assertEqual(resp.status, 404)
# static_link is broken too!
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=static_link_obj)
self.assertEqual(resp.status, 404)
# interestingly you may create a static_link to a broken symlink
broken_static_link_obj = uuid4().hex
# PUT a static_link to the broken symlink
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=broken_static_link_obj,
tgt_cont=self.env.link_cont,
tgt_obj=symlink_obj,
etag=MD5_OF_EMPTY_STRING)
def test_symlink_get_ranged(self): def test_symlink_get_ranged(self):
link_obj = uuid4().hex link_obj = uuid4().hex
@ -353,8 +560,7 @@ class TestSymlink(Base):
container=self.env.link_cont, obj=link_obj, use_account=1) container=self.env.link_cont, obj=link_obj, use_account=1)
self.assertEqual(resp.status, 404) self.assertEqual(resp.status, 404)
self.assertIn('Content-Location', resp.headers) self.assertIn('Content-Location', resp.headers)
expected_location_hdr = "%s/%s" % (self.env.tgt_cont, target_obj) self.assertEqual(self.env.target_content_location(target_obj),
self.assertIn(expected_location_hdr,
resp.getheader('content-location')) resp.getheader('content-location'))
# HEAD on target object via symlink should return a 404 since target # HEAD on target object via symlink should return a 404 since target
@ -396,7 +602,7 @@ class TestSymlink(Base):
self.assertEqual(resp.getheader('content-length'), str(target_length)) self.assertEqual(resp.getheader('content-length'), str(target_length))
self.assertEqual(resp.getheader('etag'), target_etag) self.assertEqual(resp.getheader('etag'), target_etag)
self.assertIn('Content-Location', resp.headers) self.assertIn('Content-Location', resp.headers)
self.assertIn(expected_location_hdr, self.assertEqual(self.env.target_content_location(target_obj),
resp.getheader('content-location')) resp.getheader('content-location'))
def test_symlink_chain(self): def test_symlink_chain(self):
@ -448,6 +654,66 @@ class TestSymlink(Base):
# However, HEAD/GET to the (just) link is still ok # However, HEAD/GET to the (just) link is still ok
self._assertLinkObject(container, too_many_chain_link) self._assertLinkObject(container, too_many_chain_link)
def test_symlink_chain_with_etag(self):
# Testing to symlink chain like symlink -> symlink -> target.
symloop_max = cluster_info['symlink']['symloop_max']
# create symlink chain in a container. To simplify,
# use target container for all objects (symlinks and target) here
previous = self.env.tgt_obj
container = self.env.tgt_cont
for link_obj in itertools.islice(self.obj_name_gen, symloop_max):
# PUT link_obj point to tgt_obj
self._test_put_symlink_with_etag(link_cont=container,
link_obj=link_obj,
tgt_cont=container,
tgt_obj=previous,
etag=self.env.tgt_etag)
# set current link_obj to previous
previous = link_obj
# the last link is valid for symloop_max constraint
max_chain_link = link_obj
self._assertSymlink(link_cont=container, link_obj=max_chain_link)
# chained etag validation works as long as the target symlink works
headers = {'X-Symlink-Target': '%s/%s' % (container, max_chain_link),
'X-Symlink-Target-Etag': 'not-the-real-etag'}
resp = retry(self._make_request, method='PUT',
container=container, obj=uuid4().hex,
headers=headers)
self.assertEqual(resp.status, 409)
# PUT a new link_obj pointing to the max_chain_link can validate the
# ETag but will result in 409 error on the HEAD/GET.
too_many_chain_link = next(self.obj_name_gen)
self._test_put_symlink_with_etag(
link_cont=container, link_obj=too_many_chain_link,
tgt_cont=container, tgt_obj=max_chain_link,
etag=self.env.tgt_etag)
# try to HEAD to target object via too_many_chain_link
resp = retry(self._make_request, method='HEAD',
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content, b'')
# try to GET to target object via too_many_chain_link
resp = retry(self._make_request, method='GET',
container=container,
obj=too_many_chain_link)
self.assertEqual(resp.status, 409)
self.assertEqual(
resp.content,
b'Too many levels of symbolic links, maximum allowed is %d' %
symloop_max)
# However, HEAD/GET to the (just) link is still ok
self._assertLinkObject(container, too_many_chain_link)
def test_symlink_and_slo_manifest_chain(self): def test_symlink_and_slo_manifest_chain(self):
if 'slo' not in cluster_info: if 'slo' not in cluster_info:
raise SkipTest raise SkipTest
@ -557,7 +823,7 @@ class TestSymlink(Base):
'%s/%s' % (self.env.tgt_cont, self.env.tgt_obj)} '%s/%s' % (self.env.tgt_cont, self.env.tgt_obj)}
resp = retry( resp = retry(
self._make_request, method='PUT', container=self.env.link_cont, self._make_request, method='PUT', container=self.env.link_cont,
obj=link_obj, body='non-zero-length', headers=headers) obj=link_obj, body=b'non-zero-length', headers=headers)
self.assertEqual(resp.status, 400) self.assertEqual(resp.status, 400)
self.assertEqual(resp.content, self.assertEqual(resp.content,
@ -636,7 +902,6 @@ class TestSymlink(Base):
tgt_obj=self.env.tgt_obj) tgt_obj=self.env.tgt_obj)
copy_src = '%s/%s' % (self.env.link_cont, link_obj1) copy_src = '%s/%s' % (self.env.link_cont, link_obj1)
account_one = tf.parsed[0].path.split('/', 2)[2]
perm_two = tf.swift_test_perm[1] perm_two = tf.swift_test_perm[1]
# add X-Content-Read to account 1 link_cont and tgt_cont # add X-Content-Read to account 1 link_cont and tgt_cont
@ -659,7 +924,7 @@ class TestSymlink(Base):
# symlink to the account 2 container that points to the # symlink to the account 2 container that points to the
# container/object in the account 2. # container/object in the account 2.
# (the container/object is not prepared) # (the container/object is not prepared)
headers = {'X-Copy-From-Account': account_one, headers = {'X-Copy-From-Account': self.account_name,
'X-Copy-From': copy_src} 'X-Copy-From': copy_src}
resp = retry(self._make_request_with_symlink_get, method='PUT', resp = retry(self._make_request_with_symlink_get, method='PUT',
container=self.env.link_cont, obj=link_obj2, container=self.env.link_cont, obj=link_obj2,
@ -669,6 +934,7 @@ class TestSymlink(Base):
# sanity: HEAD/GET on link_obj itself # sanity: HEAD/GET on link_obj itself
self._assertLinkObject(self.env.link_cont, link_obj2, use_account=2) self._assertLinkObject(self.env.link_cont, link_obj2, use_account=2)
account_two = tf.parsed[1].path.split('/', 2)[2]
# no target object in the account 2 # no target object in the account 2
for method in ('HEAD', 'GET'): for method in ('HEAD', 'GET'):
resp = retry( resp = retry(
@ -676,14 +942,15 @@ class TestSymlink(Base):
container=self.env.link_cont, obj=link_obj2, use_account=2) container=self.env.link_cont, obj=link_obj2, use_account=2)
self.assertEqual(resp.status, 404) self.assertEqual(resp.status, 404)
self.assertIn('content-location', resp.headers) self.assertIn('content-location', resp.headers)
self.assertIn(self.env.target_content_location(), self.assertEqual(
self.env.target_content_location(override_account=account_two),
resp.getheader('content-location')) resp.getheader('content-location'))
# copy symlink itself to a different account with target account # copy symlink itself to a different account with target account
# the target path will be in account 1 # the target path will be in account 1
# the target path will have an object # the target path will have an object
headers = {'X-Symlink-target-Account': account_one, headers = {'X-Symlink-target-Account': self.account_name,
'X-Copy-From-Account': account_one, 'X-Copy-From-Account': self.account_name,
'X-Copy-From': copy_src} 'X-Copy-From': copy_src}
resp = retry( resp = retry(
self._make_request_with_symlink_get, method='PUT', self._make_request_with_symlink_get, method='PUT',
@ -780,7 +1047,8 @@ class TestSymlink(Base):
link_obj = uuid4().hex link_obj = uuid4().hex
value1 = uuid4().hex value1 = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj, self._test_put_symlink(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont, tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj) tgt_obj=self.env.tgt_obj)
@ -821,6 +1089,73 @@ class TestSymlink(Base):
# sanity: no X-Object-Meta-Alpha exists in the response header # sanity: no X-Object-Meta-Alpha exists in the response header
self.assertNotIn('X-Object-Meta-Alpha', resp.headers) self.assertNotIn('X-Object-Meta-Alpha', resp.headers)
def test_post_to_broken_dynamic_symlink(self):
# create a symlink to nowhere
link_obj = '%s-the-link' % uuid4().hex
tgt_obj = '%s-no-where' % uuid4().hex
headers = {'X-Symlink-Target': '%s/%s' % (self.env.tgt_cont, tgt_obj)}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 201)
# it's a real link!
self._assertLinkObject(self.env.link_cont, link_obj)
# ... it's just broken
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 404)
target_path = '/v1/%s/%s/%s' % (
self.account_name, self.env.tgt_cont, tgt_obj)
self.assertEqual(target_path, resp.headers['Content-Location'])
# we'll redirect with the Location header to the (invalid) target
headers = {'X-Object-Meta-Alpha': 'apple'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
self.assertEqual(target_path, resp.headers['Location'])
# and of course metadata *is* applied to the link
resp = retry(
self._make_request_with_symlink_get, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 200)
self.assertTrue(resp.getheader('X-Object-Meta-Alpha'), 'apple')
def test_post_to_broken_static_symlink(self):
link_obj = uuid4().hex
# PUT link_obj
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
# overwrite tgt object
old_tgt_etag = self.env.tgt_etag
self.env._create_tgt_object(body='updated target body')
# sanity
resp = retry(
self._make_request, method='HEAD',
container=self.env.link_cont, obj=link_obj)
self.assertEqual(resp.status, 409)
# but POST will still 307
headers = {'X-Object-Meta-Alpha': 'apple'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
target_path = '/v1/%s/%s/%s' % (
self.account_name, self.env.tgt_cont, self.env.tgt_obj)
self.assertEqual(target_path, resp.headers['Location'])
# but we give you the Etag just like... FYI?
self.assertEqual(old_tgt_etag, resp.headers['X-Symlink-Target-Etag'])
def test_post_with_symlink_header(self): def test_post_with_symlink_header(self):
# POSTing to a symlink is not allowed and should return a 307 # POSTing to a symlink is not allowed and should return a 307
# updating the symlink target with a POST should always fail # updating the symlink target with a POST should always fail
@ -878,11 +1213,9 @@ class TestSymlink(Base):
raise SkipTest raise SkipTest
link_obj = uuid4().hex link_obj = uuid4().hex
account_one = tf.parsed[0].path.split('/', 2)[2]
# create symlink in account 2 # create symlink in account 2
# pointing to account 1 # pointing to account 1
headers = {'X-Symlink-Target-Account': account_one, headers = {'X-Symlink-Target-Account': self.account_name,
'X-Symlink-Target': 'X-Symlink-Target':
'%s/%s' % (self.env.tgt_cont, self.env.tgt_obj)} '%s/%s' % (self.env.tgt_cont, self.env.tgt_obj)}
resp = retry(self._make_request, method='PUT', resp = retry(self._make_request, method='PUT',
@ -900,6 +1233,9 @@ class TestSymlink(Base):
container=self.env.link_cont, obj=link_obj, use_account=2) container=self.env.link_cont, obj=link_obj, use_account=2)
self.assertEqual(resp.status, 403) self.assertEqual(resp.status, 403)
# still know where it's pointing
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
# add X-Content-Read to account 1 tgt_cont # add X-Content-Read to account 1 tgt_cont
# permit account 2 to read account 1 tgt_cont # permit account 2 to read account 1 tgt_cont
@ -917,11 +1253,96 @@ class TestSymlink(Base):
self.env.link_cont, link_obj, self.env.link_cont, link_obj,
expected_content_location=self.env.target_content_location(), expected_content_location=self.env.target_content_location(),
use_account=2) use_account=2)
self.assertIn(account_one, resp.getheader('content-location'))
@requires_acls
def test_symlink_with_etag_put_target_account(self):
if tf.skip or tf.skip2:
raise SkipTest
link_obj = uuid4().hex
# try to create a symlink in account 2 pointing to account 1
symlink_headers = {
'X-Symlink-Target-Account': self.account_name,
'X-Symlink-Target':
'%s/%s' % (self.env.tgt_cont, self.env.tgt_obj),
'X-Symlink-Target-Etag': self.env.tgt_etag}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=symlink_headers, use_account=2)
# since we don't have read access to verify the object we get the
# permissions error
self.assertEqual(resp.status, 403)
perm_two = tf.swift_test_perm[1]
# add X-Content-Read to account 1 tgt_cont
# permit account 2 to read account 1 tgt_cont
# add acl to allow reading from source
acl_headers = {'X-Container-Read': perm_two}
resp = retry(self._make_request, method='POST',
container=self.env.tgt_cont, headers=acl_headers)
self.assertEqual(resp.status, 204)
# now we can create the symlink
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=symlink_headers, use_account=2)
self.assertEqual(resp.status, 201)
self._assertLinkObject(self.env.link_cont, link_obj, use_account=2)
# GET to target object via symlink
resp = self._test_get_as_target_object(
self.env.link_cont, link_obj,
expected_content_location=self.env.target_content_location(),
use_account=2)
# Overwrite target
resp = retry(self._make_request, method='PUT',
container=self.env.tgt_cont, obj=self.env.tgt_obj,
body='some other content')
self.assertEqual(resp.status, 201)
# link is now broken
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj, use_account=2)
self.assertEqual(resp.status, 409)
# but we still know where it points
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
# sanity test, remove permissions
headers = {'X-Remove-Container-Read': 'remove'}
resp = retry(self._make_request, method='POST',
container=self.env.tgt_cont, headers=headers)
self.assertEqual(resp.status, 204)
# it should be ok to get the symlink itself, but not the target object
# because the read acl has been revoked
self._assertLinkObject(self.env.link_cont, link_obj, use_account=2)
resp = retry(
self._make_request, method='GET',
container=self.env.link_cont, obj=link_obj, use_account=2)
self.assertEqual(resp.status, 403)
# Still know where it is, though
self.assertEqual(resp.getheader('content-location'),
self.env.target_content_location())
def test_symlink_invalid_etag(self):
link_obj = uuid4().hex
headers = {'X-Symlink-Target': '%s/%s' % (self.env.tgt_cont,
self.env.tgt_obj),
'X-Symlink-Target-Etag': 'not-the-real-etag'}
resp = retry(self._make_request, method='PUT',
container=self.env.link_cont, obj=link_obj,
headers=headers)
self.assertEqual(resp.status, 409)
self.assertEqual(resp.content,
b"Object Etag 'ab706c400731332bffa67ed4bc15dcac' "
b"does not match X-Symlink-Target-Etag header "
b"'not-the-real-etag'")
def test_symlink_object_listing(self): def test_symlink_object_listing(self):
link_obj = uuid4().hex link_obj = uuid4().hex
self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj, self._test_put_symlink(link_cont=self.env.link_cont, link_obj=link_obj,
tgt_cont=self.env.tgt_cont, tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj) tgt_obj=self.env.tgt_obj)
@ -933,9 +1354,53 @@ class TestSymlink(Base):
self.assertEqual(resp.status, 200) self.assertEqual(resp.status, 200)
object_list = json.loads(resp.content) object_list = json.loads(resp.content)
self.assertEqual(len(object_list), 1) self.assertEqual(len(object_list), 1)
obj_info = object_list[0]
self.assertIn('symlink_path', obj_info)
self.assertEqual(self.env.target_content_location(),
obj_info['symlink_path'])
self.assertNotIn('symlink_etag', obj_info)
def test_static_link_object_listing(self):
link_obj = uuid4().hex
self._test_put_symlink_with_etag(link_cont=self.env.link_cont,
link_obj=link_obj,
tgt_cont=self.env.tgt_cont,
tgt_obj=self.env.tgt_obj,
etag=self.env.tgt_etag)
# sanity
self._assertSymlink(self.env.link_cont, link_obj)
resp = retry(self._make_request, method='GET',
container=self.env.link_cont,
query_args='format=json')
self.assertEqual(resp.status, 200)
object_list = json.loads(resp.content)
self.assertEqual(len(object_list), 1)
self.assertIn('symlink_path', object_list[0]) self.assertIn('symlink_path', object_list[0])
self.assertIn(self.env.target_content_location(), self.assertEqual(self.env.target_content_location(),
object_list[0]['symlink_path']) object_list[0]['symlink_path'])
obj_info = object_list[0]
self.assertIn('symlink_etag', obj_info)
self.assertEqual(self.env.tgt_etag,
obj_info['symlink_etag'])
self.assertEqual(int(self.env.tgt_length),
obj_info['symlink_bytes'])
self.assertEqual(obj_info['content_type'], 'application/target')
# POSTing to a static_link can change the listing Content-Type
headers = {'Content-Type': 'application/foo'}
resp = retry(
self._make_request, method='POST', container=self.env.link_cont,
obj=link_obj, headers=headers, allow_redirects=False)
self.assertEqual(resp.status, 307)
resp = retry(self._make_request, method='GET',
container=self.env.link_cont,
query_args='format=json')
self.assertEqual(resp.status, 200)
object_list = json.loads(resp.content)
self.assertEqual(len(object_list), 1)
obj_info = object_list[0]
self.assertEqual(obj_info['content_type'], 'application/foo')
class TestCrossPolicySymlinkEnv(TestSymlinkEnv): class TestCrossPolicySymlinkEnv(TestSymlinkEnv):
@ -1007,6 +1472,8 @@ class TestSymlinkSlo(Base):
"Expected slo_enabled to be True/False, got %r" % "Expected slo_enabled to be True/False, got %r" %
(self.env.slo_enabled,)) (self.env.slo_enabled,))
self.file_symlink = self.env.container.file(uuid4().hex) self.file_symlink = self.env.container.file(uuid4().hex)
self.account_name = self.env.container.conn.storage_path.rsplit(
'/', 1)[-1]
def test_symlink_target_slo_manifest(self): def test_symlink_target_slo_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target': self.file_symlink.write(hdrs={'X-Symlink-Target':
@ -1020,6 +1487,142 @@ class TestSymlinkSlo(Base):
(b'e', 1), (b'e', 1),
], group_by_byte(self.file_symlink.read())) ], group_by_byte(self.file_symlink.read()))
manifest_body = self.file_symlink.read(parms={
'multipart-manifest': 'get'})
self.assertEqual(
[seg['hash'] for seg in json.loads(manifest_body)],
[self.env.seg_info['seg_%s' % c]['etag'] for c in 'abcde'])
for obj_info in self.env.container.files(parms={'format': 'json'}):
if obj_info['name'] == self.file_symlink.name:
break
else:
self.fail('Unable to find file_symlink in listing.')
obj_info.pop('last_modified')
self.assertEqual(obj_info, {
'name': self.file_symlink.name,
'content_type': 'application/octet-stream',
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': 0,
'symlink_path': '/v1/%s/%s/manifest-abcde' % (
self.account_name, self.env.container.name),
})
def test_static_link_target_slo_manifest(self):
manifest_info = self.env.container2.file(
"manifest-abcde").info(parms={
'multipart-manifest': 'get'})
manifest_etag = manifest_info['etag']
self.file_symlink.write(hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container2.name, 'manifest-abcde'),
'X-Symlink-Target-Etag': manifest_etag,
})
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(self.file_symlink.read()))
manifest_body = self.file_symlink.read(parms={
'multipart-manifest': 'get'})
self.assertEqual(
[seg['hash'] for seg in json.loads(manifest_body)],
[self.env.seg_info['seg_%s' % c]['etag'] for c in 'abcde'])
# check listing
for obj_info in self.env.container.files(parms={'format': 'json'}):
if obj_info['name'] == self.file_symlink.name:
break
else:
self.fail('Unable to find file_symlink in listing.')
obj_info.pop('last_modified')
self.maxDiff = None
slo_info = self.env.container2.file("manifest-abcde").info()
self.assertEqual(obj_info, {
'name': self.file_symlink.name,
'content_type': 'application/octet-stream',
'hash': u'd41d8cd98f00b204e9800998ecf8427e',
'bytes': 0,
'slo_etag': slo_info['etag'],
'symlink_path': '/v1/%s/%s/manifest-abcde' % (
self.account_name, self.env.container2.name),
'symlink_bytes': 4 * 2 ** 20 + 1,
'symlink_etag': manifest_etag,
})
def test_static_link_target_slo_manifest_wrong_etag(self):
# try the slo "etag"
slo_etag = self.env.container2.file(
"manifest-abcde").info()['etag']
self.assertRaises(ResponseError, self.file_symlink.write, hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container2.name, 'manifest-abcde'),
'X-Symlink-Target-Etag': slo_etag,
})
self.assert_status(400) # no quotes allowed!
# try the slo etag w/o the quotes
slo_etag = slo_etag.strip('"')
self.assertRaises(ResponseError, self.file_symlink.write, hdrs={
'X-Symlink-Target': '%s/%s' % (
self.env.container2.name, 'manifest-abcde'),
'X-Symlink-Target-Etag': slo_etag,
})
self.assert_status(409) # that just doesn't match
def test_static_link_target_symlink_to_slo_manifest(self):
# write symlink
self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name,
'manifest-abcde')})
# write static_link
file_static_link = self.env.container.file(uuid4().hex)
file_static_link.write(hdrs={
'X-Symlink-Target': '%s/%s' % (
self.file_symlink.container, self.file_symlink.name),
'X-Symlink-Target-Etag': MD5_OF_EMPTY_STRING,
})
# validate reads
self.assertEqual([
(b'a', 1024 * 1024),
(b'b', 1024 * 1024),
(b'c', 1024 * 1024),
(b'd', 1024 * 1024),
(b'e', 1),
], group_by_byte(file_static_link.read()))
manifest_body = file_static_link.read(parms={
'multipart-manifest': 'get'})
self.assertEqual(
[seg['hash'] for seg in json.loads(manifest_body)],
[self.env.seg_info['seg_%s' % c]['etag'] for c in 'abcde'])
# check listing
for obj_info in self.env.container.files(parms={'format': 'json'}):
if obj_info['name'] == file_static_link.name:
break
else:
self.fail('Unable to find file_symlink in listing.')
obj_info.pop('last_modified')
self.maxDiff = None
self.assertEqual(obj_info, {
'name': file_static_link.name,
'content_type': 'application/octet-stream',
'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'bytes': 0,
'symlink_path': u'/v1/%s/%s/%s' % (
self.account_name, self.file_symlink.container,
self.file_symlink.name),
# the only time bytes/etag aren't the target object are when they
# validate through another static_link
'symlink_bytes': 0,
'symlink_etag': MD5_OF_EMPTY_STRING,
})
def test_symlink_target_slo_nested_manifest(self): def test_symlink_target_slo_nested_manifest(self):
self.file_symlink.write(hdrs={'X-Symlink-Target': self.file_symlink.write(hdrs={'X-Symlink-Target':
'%s/%s' % (self.env.container.name, '%s/%s' % (self.env.container.name,

View File

@ -18,6 +18,7 @@ from copy import deepcopy
import json import json
import time import time
import unittest2 import unittest2
import six
from six.moves.urllib.parse import quote, unquote from six.moves.urllib.parse import quote, unquote
import test.functional as tf import test.functional as tf
@ -54,9 +55,12 @@ class TestObjectVersioningEnv(BaseEnv):
cls.conn2 = Connection(config2) cls.conn2 = Connection(config2)
cls.conn2.authenticate() cls.conn2.authenticate()
if six.PY2:
# avoid getting a prefix that stops halfway through an encoded # avoid getting a prefix that stops halfway through an encoded
# character # character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
else:
prefix = Utils.create_name()[:10]
cls.versions_container = cls.account.container(prefix + "-versions") cls.versions_container = cls.account.container(prefix + "-versions")
if not cls.versions_container.create(): if not cls.versions_container.create():
@ -143,9 +147,12 @@ class TestCrossPolicyObjectVersioningEnv(BaseEnv):
cls.conn2 = Connection(config2) cls.conn2 = Connection(config2)
cls.conn2.authenticate() cls.conn2.authenticate()
if six.PY2:
# avoid getting a prefix that stops halfway through an encoded # avoid getting a prefix that stops halfway through an encoded
# character # character
prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8") prefix = Utils.create_name().decode("utf-8")[:10].encode("utf-8")
else:
prefix = Utils.create_name()[:10]
cls.versions_container = cls.account.container(prefix + "-versions") cls.versions_container = cls.account.container(prefix + "-versions")
if not cls.versions_container.create( if not cls.versions_container.create(
@ -254,7 +261,7 @@ class TestObjectVersioning(Base):
put_headers = {'Content-Type': 'text/jibberish01', put_headers = {'Content-Type': 'text/jibberish01',
'Content-Encoding': 'gzip', 'Content-Encoding': 'gzip',
'Content-Disposition': 'attachment; filename=myfile'} 'Content-Disposition': 'attachment; filename=myfile'}
versioned_obj.write("aaaaa", hdrs=put_headers) versioned_obj.write(b"aaaaa", hdrs=put_headers)
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type']) self.assertEqual('text/jibberish01', obj_info['content_type'])
expected_content_types.append('text/jibberish01') expected_content_types.append('text/jibberish01')
@ -263,14 +270,15 @@ class TestObjectVersioning(Base):
# assert that content-encoding or content-disposition get *copied* to # assert that content-encoding or content-disposition get *copied* to
# the object version unless they were set on the original PUT, so # the object version unless they were set on the original PUT, so
# populate expected_headers by making a HEAD on the original object # populate expected_headers by making a HEAD on the original object
resp_headers = dict(versioned_obj.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
expected_headers = {} expected_headers = {}
for k, v in put_headers.items(): for k, v in put_headers.items():
if k.lower() in resp_headers: if k.lower() in resp_headers:
expected_headers[k] = v expected_headers[k] = v
self.assertEqual(0, versions_container.info()['object_count']) self.assertEqual(0, versions_container.info()['object_count'])
versioned_obj.write("bbbbb", hdrs={'Content-Type': 'text/jibberish02', versioned_obj.write(b"bbbbb", hdrs={'Content-Type': 'text/jibberish02',
'X-Object-Meta-Foo': 'Bar'}) 'X-Object-Meta-Foo': 'Bar'})
versioned_obj.initialize() versioned_obj.initialize()
self.assertEqual(versioned_obj.content_type, 'text/jibberish02') self.assertEqual(versioned_obj.content_type, 'text/jibberish02')
@ -282,10 +290,11 @@ class TestObjectVersioning(Base):
versioned_obj_name = versions_container.files()[0] versioned_obj_name = versions_container.files()[0]
prev_version = versions_container.file(versioned_obj_name) prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize() prev_version.initialize()
self.assertEqual("aaaaa", prev_version.read()) self.assertEqual(b"aaaaa", prev_version.read())
self.assertEqual(prev_version.content_type, 'text/jibberish01') self.assertEqual(prev_version.content_type, 'text/jibberish01')
resp_headers = dict(prev_version.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in prev_version.conn.response.getheaders()}
for k, v in expected_headers.items(): for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers) self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()]) self.assertEqual(v, resp_headers[k.lower()])
@ -298,29 +307,29 @@ class TestObjectVersioning(Base):
self.assertEqual(1, versions_container.info()['object_count']) self.assertEqual(1, versions_container.info()['object_count'])
# if we overwrite it again, there are two versions # if we overwrite it again, there are two versions
versioned_obj.write("ccccc") versioned_obj.write(b"ccccc")
self.assertEqual(2, versions_container.info()['object_count']) self.assertEqual(2, versions_container.info()['object_count'])
expected_content_types.append('text/jibberish02') expected_content_types.append('text/jibberish02')
versioned_obj_name = versions_container.files()[1] versioned_obj_name = versions_container.files()[1]
prev_version = versions_container.file(versioned_obj_name) prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize() prev_version.initialize()
self.assertEqual("bbbbb", prev_version.read()) self.assertEqual(b"bbbbb", prev_version.read())
self.assertEqual(prev_version.content_type, 'text/jibberish02') self.assertEqual(prev_version.content_type, 'text/jibberish02')
self.assertNotIn('foo', prev_version.metadata) self.assertNotIn('foo', prev_version.metadata)
self.assertIn('fu', prev_version.metadata) self.assertIn('fu', prev_version.metadata)
# versioned_obj keeps the newest content # versioned_obj keeps the newest content
self.assertEqual("ccccc", versioned_obj.read()) self.assertEqual(b"ccccc", versioned_obj.read())
# test copy from a different container # test copy from a different container
src_container = self.env.account.container(Utils.create_name()) src_container = self.env.account.container(Utils.create_name())
self.assertTrue(src_container.create()) self.assertTrue(src_container.create())
src_name = Utils.create_name() src_name = Utils.create_name()
src_obj = src_container.file(src_name) src_obj = src_container.file(src_name)
src_obj.write("ddddd", hdrs={'Content-Type': 'text/jibberish04'}) src_obj.write(b"ddddd", hdrs={'Content-Type': 'text/jibberish04'})
src_obj.copy(container.name, obj_name) src_obj.copy(container.name, obj_name)
self.assertEqual("ddddd", versioned_obj.read()) self.assertEqual(b"ddddd", versioned_obj.read())
versioned_obj.initialize() versioned_obj.initialize()
self.assertEqual(versioned_obj.content_type, 'text/jibberish04') self.assertEqual(versioned_obj.content_type, 'text/jibberish04')
expected_content_types.append('text/jibberish04') expected_content_types.append('text/jibberish04')
@ -330,7 +339,7 @@ class TestObjectVersioning(Base):
versioned_obj_name = versions_container.files()[2] versioned_obj_name = versions_container.files()[2]
prev_version = versions_container.file(versioned_obj_name) prev_version = versions_container.file(versioned_obj_name)
prev_version.initialize() prev_version.initialize()
self.assertEqual("ccccc", prev_version.read()) self.assertEqual(b"ccccc", prev_version.read())
# for further use in the mode-specific tests # for further use in the mode-specific tests
return (versioned_obj, expected_headers, expected_content_types) return (versioned_obj, expected_headers, expected_content_types)
@ -348,27 +357,28 @@ class TestObjectVersioning(Base):
# test delete # test delete
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("ccccc", versioned_obj.read()) self.assertEqual(b"ccccc", versioned_obj.read())
expected_content_types.pop() expected_content_types.pop()
self.assertEqual(expected_content_types, [ self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files( o['content_type'] for o in versions_container.files(
parms={'format': 'json'})]) parms={'format': 'json'})])
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("bbbbb", versioned_obj.read()) self.assertEqual(b"bbbbb", versioned_obj.read())
expected_content_types.pop() expected_content_types.pop()
self.assertEqual(expected_content_types, [ self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files( o['content_type'] for o in versions_container.files(
parms={'format': 'json'})]) parms={'format': 'json'})])
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("aaaaa", versioned_obj.read()) self.assertEqual(b"aaaaa", versioned_obj.read())
self.assertEqual(0, versions_container.info()['object_count']) self.assertEqual(0, versions_container.info()['object_count'])
# verify that all the original object headers have been copied back # verify that all the original object headers have been copied back
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type']) self.assertEqual('text/jibberish01', obj_info['content_type'])
resp_headers = dict(versioned_obj.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
for k, v in expected_headers.items(): for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers) self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()]) self.assertEqual(v, resp_headers[k.lower()])
@ -390,27 +400,28 @@ class TestObjectVersioning(Base):
# test delete # test delete
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("ccccc", versioned_obj.read()) self.assertEqual(b"ccccc", versioned_obj.read())
expected_content_types.pop() expected_content_types.pop()
self.assertEqual(expected_content_types, [ self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files( o['content_type'] for o in versions_container.files(
parms={'format': 'json'})]) parms={'format': 'json'})])
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("bbbbb", versioned_obj.read()) self.assertEqual(b"bbbbb", versioned_obj.read())
expected_content_types.pop() expected_content_types.pop()
self.assertEqual(expected_content_types, [ self.assertEqual(expected_content_types, [
o['content_type'] for o in versions_container.files( o['content_type'] for o in versions_container.files(
parms={'format': 'json'})]) parms={'format': 'json'})])
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("aaaaa", versioned_obj.read()) self.assertEqual(b"aaaaa", versioned_obj.read())
self.assertEqual(0, versions_container.info()['object_count']) self.assertEqual(0, versions_container.info()['object_count'])
# verify that all the original object headers have been copied back # verify that all the original object headers have been copied back
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
self.assertEqual('text/jibberish01', obj_info['content_type']) self.assertEqual('text/jibberish01', obj_info['content_type'])
resp_headers = dict(versioned_obj.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in versioned_obj.conn.response.getheaders()}
for k, v in expected_headers.items(): for k, v in expected_headers.items():
self.assertIn(k.lower(), resp_headers) self.assertIn(k.lower(), resp_headers)
self.assertEqual(v, resp_headers[k.lower()]) self.assertEqual(v, resp_headers[k.lower()])
@ -420,12 +431,14 @@ class TestObjectVersioning(Base):
def assert_most_recent_version(self, obj_name, content, def assert_most_recent_version(self, obj_name, content,
should_be_dlo=False): should_be_dlo=False):
name_len = len(obj_name if six.PY2 else obj_name.encode('utf8'))
archive_versions = self.env.versions_container.files(parms={ archive_versions = self.env.versions_container.files(parms={
'prefix': '%03x%s/' % (len(obj_name), obj_name), 'prefix': '%03x%s/' % (name_len, obj_name),
'reverse': 'yes'}) 'reverse': 'yes'})
archive_file = self.env.versions_container.file(archive_versions[0]) archive_file = self.env.versions_container.file(archive_versions[0])
self.assertEqual(content, archive_file.read()) self.assertEqual(content, archive_file.read())
resp_headers = dict(archive_file.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in archive_file.conn.response.getheaders()}
if should_be_dlo: if should_be_dlo:
self.assertIn('x-object-manifest', resp_headers) self.assertIn('x-object-manifest', resp_headers)
else: else:
@ -443,34 +456,35 @@ class TestObjectVersioning(Base):
time.sleep(.01) # guarantee that the timestamp changes time.sleep(.01) # guarantee that the timestamp changes
obj_name_seg = obj_name + '/' + i obj_name_seg = obj_name + '/' + i
versioned_obj = container.file(obj_name_seg) versioned_obj = container.file(obj_name_seg)
versioned_obj.write(i) versioned_obj.write(i.encode('ascii'))
# immediately overwrite # immediately overwrite
versioned_obj.write(i + i) versioned_obj.write((i + i).encode('ascii'))
self.assertEqual(3, versions_container.info()['object_count']) self.assertEqual(3, versions_container.info()['object_count'])
man_file = container.file(obj_name) man_file = container.file(obj_name)
# write a normal file first # write a normal file first
man_file.write('old content') man_file.write(b'old content')
# guarantee that the timestamp changes # guarantee that the timestamp changes
time.sleep(.01) time.sleep(.01)
# overwrite with a dlo manifest # overwrite with a dlo manifest
man_file.write('', hdrs={"X-Object-Manifest": "%s/%s/" % man_file.write(b'', hdrs={"X-Object-Manifest": "%s/%s/" %
(self.env.container.name, obj_name)}) (self.env.container.name, obj_name)})
self.assertEqual(4, versions_container.info()['object_count']) self.assertEqual(4, versions_container.info()['object_count'])
self.assertEqual("112233", man_file.read()) self.assertEqual(b"112233", man_file.read())
self.assert_most_recent_version(obj_name, 'old content') self.assert_most_recent_version(obj_name, b'old content')
# overwrite the manifest with a normal file # overwrite the manifest with a normal file
man_file.write('new content') man_file.write(b'new content')
self.assertEqual(5, versions_container.info()['object_count']) self.assertEqual(5, versions_container.info()['object_count'])
# new most-recent archive is the dlo # new most-recent archive is the dlo
self.assert_most_recent_version(obj_name, '112233', should_be_dlo=True) self.assert_most_recent_version(
obj_name, b'112233', should_be_dlo=True)
return obj_name, man_file return obj_name, man_file
@ -480,15 +494,16 @@ class TestObjectVersioning(Base):
# verify that restore works properly # verify that restore works properly
man_file.delete() man_file.delete()
self.assertEqual(4, self.env.versions_container.info()['object_count']) self.assertEqual(4, self.env.versions_container.info()['object_count'])
self.assertEqual("112233", man_file.read()) self.assertEqual(b"112233", man_file.read())
resp_headers = dict(man_file.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in man_file.conn.response.getheaders()}
self.assertIn('x-object-manifest', resp_headers) self.assertIn('x-object-manifest', resp_headers)
self.assert_most_recent_version(obj_name, 'old content') self.assert_most_recent_version(obj_name, b'old content')
man_file.delete() man_file.delete()
self.assertEqual(3, self.env.versions_container.info()['object_count']) self.assertEqual(3, self.env.versions_container.info()['object_count'])
self.assertEqual("old content", man_file.read()) self.assertEqual(b"old content", man_file.read())
def test_versioning_container_acl(self): def test_versioning_container_acl(self):
if tf.skip2: if tf.skip2:
@ -503,7 +518,7 @@ class TestObjectVersioning(Base):
# check account2 cannot write to versions container # check account2 cannot write to versions container
fail_obj_name = Utils.create_name() fail_obj_name = Utils.create_name()
fail_obj = versions_container.file(fail_obj_name) fail_obj = versions_container.file(fail_obj_name)
self.assertRaises(ResponseError, fail_obj.write, "should fail", self.assertRaises(ResponseError, fail_obj.write, b"should fail",
cfg={'use_token': self.env.storage_token2}) cfg={'use_token': self.env.storage_token2})
# create container and give write access to account2 # create container and give write access to account2
@ -528,22 +543,22 @@ class TestObjectVersioning(Base):
# write object twice to container and check version # write object twice to container and check version
obj_name = Utils.create_name() obj_name = Utils.create_name()
versioned_obj = container.file(obj_name) versioned_obj = container.file(obj_name)
self.assertTrue(versioned_obj.write("never argue with the data", self.assertTrue(versioned_obj.write(b"never argue with the data",
cfg={'use_token': self.env.storage_token2})) cfg={'use_token': self.env.storage_token2}))
self.assertEqual(versioned_obj.read(), "never argue with the data") self.assertEqual(versioned_obj.read(), b"never argue with the data")
self.assertTrue( self.assertTrue(
versioned_obj.write("we don't have no beer, just tequila", versioned_obj.write(b"we don't have no beer, just tequila",
cfg={'use_token': self.env.storage_token2})) cfg={'use_token': self.env.storage_token2}))
self.assertEqual(versioned_obj.read(), self.assertEqual(versioned_obj.read(),
"we don't have no beer, just tequila") b"we don't have no beer, just tequila")
self.assertEqual(1, versions_container.info()['object_count']) self.assertEqual(1, versions_container.info()['object_count'])
# read the original uploaded object # read the original uploaded object
for filename in versions_container.files(): for filename in versions_container.files():
backup_file = versions_container.file(filename) backup_file = versions_container.file(filename)
break break
self.assertEqual(backup_file.read(), "never argue with the data") self.assertEqual(backup_file.read(), b"never argue with the data")
# user3 (some random user with no access to any of account1) # user3 (some random user with no access to any of account1)
# tries to read from versioned container # tries to read from versioned container
@ -556,13 +571,13 @@ class TestObjectVersioning(Base):
hdrs={'X-Container-Read': self.env.conn3.user_acl}, hdrs={'X-Container-Read': self.env.conn3.user_acl},
cfg={'use_token': self.env.storage_token2}) cfg={'use_token': self.env.storage_token2})
a2_obj = a2_container.file(Utils.create_name()) a2_obj = a2_container.file(Utils.create_name())
self.assertTrue(a2_obj.write("unused", self.assertTrue(a2_obj.write(b"unused",
cfg={'use_token': self.env.storage_token2})) cfg={'use_token': self.env.storage_token2}))
# user3 cannot write, delete, or copy to/from source container either # user3 cannot write, delete, or copy to/from source container either
number_of_versions = versions_container.info()['object_count'] number_of_versions = versions_container.info()['object_count']
self.assertRaises(ResponseError, versioned_obj.write, self.assertRaises(ResponseError, versioned_obj.write,
"some random user trying to write data", b"some random user trying to write data",
cfg={'use_token': self.env.storage_token3}) cfg={'use_token': self.env.storage_token3})
self.assertEqual(number_of_versions, self.assertEqual(number_of_versions,
versions_container.info()['object_count']) versions_container.info()['object_count'])
@ -610,11 +625,11 @@ class TestObjectVersioning(Base):
obj_name = Utils.create_name() obj_name = Utils.create_name()
versioned_obj = container.file(obj_name) versioned_obj = container.file(obj_name)
versioned_obj.write("aaaaa") versioned_obj.write(b"aaaaa")
self.assertEqual("aaaaa", versioned_obj.read()) self.assertEqual(b"aaaaa", versioned_obj.read())
versioned_obj.write("bbbbb") versioned_obj.write(b"bbbbb")
self.assertEqual("bbbbb", versioned_obj.read()) self.assertEqual(b"bbbbb", versioned_obj.read())
# Use token from second account and try to delete the object # Use token from second account and try to delete the object
org_token = self.env.account.conn.storage_token org_token = self.env.account.conn.storage_token
@ -627,7 +642,7 @@ class TestObjectVersioning(Base):
self.env.account.conn.storage_token = org_token self.env.account.conn.storage_token = org_token
# Verify with token from first account # Verify with token from first account
self.assertEqual("bbbbb", versioned_obj.read()) self.assertEqual(b"bbbbb", versioned_obj.read())
return versioned_obj return versioned_obj
def test_versioning_check_acl(self): def test_versioning_check_acl(self):
@ -635,7 +650,7 @@ class TestObjectVersioning(Base):
raise SkipTest('Account2 not set') raise SkipTest('Account2 not set')
versioned_obj = self._test_versioning_check_acl_setup() versioned_obj = self._test_versioning_check_acl_setup()
versioned_obj.delete() versioned_obj.delete()
self.assertEqual("aaaaa", versioned_obj.read()) self.assertEqual(b"aaaaa", versioned_obj.read())
def _check_overwriting_symlink(self): def _check_overwriting_symlink(self):
# assertions common to x-versions-location and x-history-location modes # assertions common to x-versions-location and x-history-location modes
@ -646,29 +661,29 @@ class TestObjectVersioning(Base):
tgt_b_name = Utils.create_name() tgt_b_name = Utils.create_name()
tgt_a = container.file(tgt_a_name) tgt_a = container.file(tgt_a_name)
tgt_a.write("aaaaa") tgt_a.write(b"aaaaa")
tgt_b = container.file(tgt_b_name) tgt_b = container.file(tgt_b_name)
tgt_b.write("bbbbb") tgt_b.write(b"bbbbb")
symlink_name = Utils.create_name() symlink_name = Utils.create_name()
sym_tgt_header = quote(unquote('%s/%s' % (container.name, tgt_a_name))) sym_tgt_header = quote(unquote('%s/%s' % (container.name, tgt_a_name)))
sym_headers_a = {'X-Symlink-Target': sym_tgt_header} sym_headers_a = {'X-Symlink-Target': sym_tgt_header}
symlink = container.file(symlink_name) symlink = container.file(symlink_name)
symlink.write("", hdrs=sym_headers_a) symlink.write(b"", hdrs=sym_headers_a)
self.assertEqual("aaaaa", symlink.read()) self.assertEqual(b"aaaaa", symlink.read())
sym_headers_b = {'X-Symlink-Target': '%s/%s' % (container.name, sym_headers_b = {'X-Symlink-Target': '%s/%s' % (container.name,
tgt_b_name)} tgt_b_name)}
symlink.write("", hdrs=sym_headers_b) symlink.write(b"", hdrs=sym_headers_b)
self.assertEqual("bbbbb", symlink.read()) self.assertEqual(b"bbbbb", symlink.read())
# the old version got saved off # the old version got saved off
self.assertEqual(1, versions_container.info()['object_count']) self.assertEqual(1, versions_container.info()['object_count'])
versioned_obj_name = versions_container.files()[0] versioned_obj_name = versions_container.files()[0]
prev_version = versions_container.file(versioned_obj_name) prev_version = versions_container.file(versioned_obj_name)
prev_version_info = prev_version.info(parms={'symlink': 'get'}) prev_version_info = prev_version.info(parms={'symlink': 'get'})
self.assertEqual("aaaaa", prev_version.read()) self.assertEqual(b"aaaaa", prev_version.read())
self.assertEqual(MD5_OF_EMPTY_STRING, prev_version_info['etag']) self.assertEqual(MD5_OF_EMPTY_STRING, prev_version_info['etag'])
self.assertEqual(sym_tgt_header, self.assertEqual(sym_tgt_header,
prev_version_info['x_symlink_target']) prev_version_info['x_symlink_target'])
@ -682,7 +697,7 @@ class TestObjectVersioning(Base):
# test delete # test delete
symlink.delete() symlink.delete()
sym_info = symlink.info(parms={'symlink': 'get'}) sym_info = symlink.info(parms={'symlink': 'get'})
self.assertEqual("aaaaa", symlink.read()) self.assertEqual(b"aaaaa", symlink.read())
self.assertEqual(MD5_OF_EMPTY_STRING, sym_info['etag']) self.assertEqual(MD5_OF_EMPTY_STRING, sym_info['etag'])
self.assertEqual( self.assertEqual(
quote(unquote('%s/%s' % (self.env.container.name, target.name))), quote(unquote('%s/%s' % (self.env.container.name, target.name))),
@ -690,16 +705,16 @@ class TestObjectVersioning(Base):
def _setup_symlink(self): def _setup_symlink(self):
target = self.env.container.file('target-object') target = self.env.container.file('target-object')
target.write('target object data') target.write(b'target object data')
symlink = self.env.container.file('symlink') symlink = self.env.container.file('symlink')
symlink.write('', hdrs={ symlink.write(b'', hdrs={
'Content-Type': 'application/symlink', 'Content-Type': 'application/symlink',
'X-Symlink-Target': '%s/%s' % ( 'X-Symlink-Target': '%s/%s' % (
self.env.container.name, target.name)}) self.env.container.name, target.name)})
return symlink, target return symlink, target
def _assert_symlink(self, symlink, target): def _assert_symlink(self, symlink, target):
self.assertEqual('target object data', symlink.read()) self.assertEqual(b'target object data', symlink.read())
self.assertEqual(target.info(), symlink.info()) self.assertEqual(target.info(), symlink.info())
self.assertEqual('application/symlink', self.assertEqual('application/symlink',
symlink.info(parms={ symlink.info(parms={
@ -708,7 +723,7 @@ class TestObjectVersioning(Base):
def _check_copy_destination_restore_symlink(self): def _check_copy_destination_restore_symlink(self):
# assertions common to x-versions-location and x-history-location modes # assertions common to x-versions-location and x-history-location modes
symlink, target = self._setup_symlink() symlink, target = self._setup_symlink()
symlink.write('this is not a symlink') symlink.write(b'this is not a symlink')
# the symlink is versioned # the symlink is versioned
version_container_files = self.env.versions_container.files( version_container_files = self.env.versions_container.files(
parms={'format': 'json'}) parms={'format': 'json'})
@ -736,7 +751,7 @@ class TestObjectVersioning(Base):
# and versioned writes restore # and versioned writes restore
symlink.delete() symlink.delete()
self.assertEqual(1, self.env.versions_container.info()['object_count']) self.assertEqual(1, self.env.versions_container.info()['object_count'])
self.assertEqual('this is not a symlink', symlink.read()) self.assertEqual(b'this is not a symlink', symlink.read())
symlink.delete() symlink.delete()
self.assertEqual(0, self.env.versions_container.info()['object_count']) self.assertEqual(0, self.env.versions_container.info()['object_count'])
self._assert_symlink(symlink, target) self._assert_symlink(symlink, target)
@ -746,7 +761,7 @@ class TestObjectVersioning(Base):
raise SkipTest("Symlinks not enabled") raise SkipTest("Symlinks not enabled")
symlink, target = self._setup_symlink() symlink, target = self._setup_symlink()
symlink.write('this is not a symlink') symlink.write(b'this is not a symlink')
version_container_files = self.env.versions_container.files() version_container_files = self.env.versions_container.files()
self.assertEqual(1, len(version_container_files)) self.assertEqual(1, len(version_container_files))
versioned_obj = self.env.versions_container.file( versioned_obj = self.env.versions_container.file(
@ -812,7 +827,7 @@ class TestObjectVersioningHistoryMode(TestObjectVersioning):
parms={'format': 'json'})]) parms={'format': 'json'})])
# update versioned_obj # update versioned_obj
versioned_obj.write("eeee", hdrs={'Content-Type': 'text/thanksgiving', versioned_obj.write(b"eeee", hdrs={'Content-Type': 'text/thanksgiving',
'X-Object-Meta-Bar': 'foo'}) 'X-Object-Meta-Bar': 'foo'})
# verify the PUT object is kept successfully # verify the PUT object is kept successfully
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
@ -822,7 +837,7 @@ class TestObjectVersioningHistoryMode(TestObjectVersioning):
self.assertEqual(8, versions_container.info()['object_count']) self.assertEqual(8, versions_container.info()['object_count'])
# update versioned_obj # update versioned_obj
versioned_obj.write("ffff", hdrs={'Content-Type': 'text/teriyaki', versioned_obj.write(b"ffff", hdrs={'Content-Type': 'text/teriyaki',
'X-Object-Meta-Food': 'chickin'}) 'X-Object-Meta-Food': 'chickin'})
# verify the PUT object is kept successfully # verify the PUT object is kept successfully
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
@ -864,7 +879,7 @@ class TestObjectVersioningHistoryMode(TestObjectVersioning):
parms={'format': 'json'})]) parms={'format': 'json'})])
# update versioned_obj # update versioned_obj
versioned_obj.write("eeee", hdrs={'Content-Type': 'text/thanksgiving', versioned_obj.write(b"eeee", hdrs={'Content-Type': 'text/thanksgiving',
'X-Object-Meta-Bar': 'foo'}) 'X-Object-Meta-Bar': 'foo'})
# verify the PUT object is kept successfully # verify the PUT object is kept successfully
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
@ -874,7 +889,7 @@ class TestObjectVersioningHistoryMode(TestObjectVersioning):
self.assertEqual(8, versions_container.info()['object_count']) self.assertEqual(8, versions_container.info()['object_count'])
# update versioned_obj # update versioned_obj
versioned_obj.write("ffff", hdrs={'Content-Type': 'text/teriyaki', versioned_obj.write(b"ffff", hdrs={'Content-Type': 'text/teriyaki',
'X-Object-Meta-Food': 'chickin'}) 'X-Object-Meta-Food': 'chickin'})
# verify the PUT object is kept successfully # verify the PUT object is kept successfully
obj_info = versioned_obj.info() obj_info = versioned_obj.info()
@ -898,7 +913,7 @@ class TestObjectVersioningHistoryMode(TestObjectVersioning):
self.assertEqual(404, cm.exception.status) self.assertEqual(404, cm.exception.status)
self.assertEqual(7, self.env.versions_container.info()['object_count']) self.assertEqual(7, self.env.versions_container.info()['object_count'])
expected = ['old content', '112233', 'new content', ''] expected = [b'old content', b'112233', b'new content', b'']
bodies = [ bodies = [
self.env.versions_container.file(f).read() self.env.versions_container.file(f).read()
@ -919,7 +934,7 @@ class TestObjectVersioningHistoryMode(TestObjectVersioning):
# and delete-marker with empty content # and delete-marker with empty content
self.assertEqual(3, self.env.versions_container.info()['object_count']) self.assertEqual(3, self.env.versions_container.info()['object_count'])
files = self.env.versions_container.files() files = self.env.versions_container.files()
for actual, expected in zip(files, ['aaaaa', 'bbbbb', '']): for actual, expected in zip(files, [b'aaaaa', b'bbbbb', b'']):
prev_version = self.env.versions_container.file(actual) prev_version = self.env.versions_container.file(actual)
self.assertEqual(expected, prev_version.read()) self.assertEqual(expected, prev_version.read())
@ -981,7 +996,7 @@ class TestSloWithVersioning(unittest2.TestCase):
('b', 1024 * 1024)): ('b', 1024 * 1024)):
seg_name = letter seg_name = letter
file_item = self.segments_container.file(seg_name) file_item = self.segments_container.file(seg_name)
file_item.write(letter * size) file_item.write((letter * size).encode('ascii'))
self.seg_info[seg_name] = { self.seg_info[seg_name] = {
'size_bytes': size, 'size_bytes': size,
'etag': file_item.md5, 'etag': file_item.md5,
@ -991,13 +1006,14 @@ class TestSloWithVersioning(unittest2.TestCase):
# create a manifest in the versioning container # create a manifest in the versioning container
file_item = self.container.file("my-slo-manifest") file_item = self.container.file("my-slo-manifest")
file_item.write( file_item.write(
json.dumps([self.seg_info[seg_name]]), json.dumps([self.seg_info[seg_name]]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
return file_item return file_item
def _assert_is_manifest(self, file_item, seg_name): def _assert_is_manifest(self, file_item, seg_name):
manifest_body = file_item.read(parms={'multipart-manifest': 'get'}) manifest_body = file_item.read(parms={'multipart-manifest': 'get'})
resp_headers = dict(file_item.conn.response.getheaders()) resp_headers = {
h.lower(): v for h, v in file_item.conn.response.getheaders()}
self.assertIn('x-static-large-object', resp_headers) self.assertIn('x-static-large-object', resp_headers)
self.assertEqual('application/json; charset=utf-8', self.assertEqual('application/json; charset=utf-8',
file_item.content_type) file_item.content_type)
@ -1012,11 +1028,11 @@ class TestSloWithVersioning(unittest2.TestCase):
self.assertEqual(self.seg_info[seg_name][k_client], self.assertEqual(self.seg_info[seg_name][k_client],
manifest[0][k_slo]) manifest[0][k_slo])
def _assert_is_object(self, file_item, seg_name): def _assert_is_object(self, file_item, seg_data):
file_contents = file_item.read() file_contents = file_item.read()
self.assertEqual(1024 * 1024, len(file_contents)) self.assertEqual(1024 * 1024, len(file_contents))
self.assertEqual(seg_name, file_contents[0]) self.assertEqual(seg_data, file_contents[:1])
self.assertEqual(seg_name, file_contents[-1]) self.assertEqual(seg_data, file_contents[-1:])
def tearDown(self): def tearDown(self):
# remove versioning to allow simple container delete # remove versioning to allow simple container delete
@ -1027,24 +1043,24 @@ class TestSloWithVersioning(unittest2.TestCase):
file_item = self._create_manifest('a') file_item = self._create_manifest('a')
# sanity check: read the manifest, then the large object # sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'a') self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, 'a') self._assert_is_object(file_item, b'a')
# upload new manifest # upload new manifest
file_item = self._create_manifest('b') file_item = self._create_manifest('b')
# sanity check: read the manifest, then the large object # sanity check: read the manifest, then the large object
self._assert_is_manifest(file_item, 'b') self._assert_is_manifest(file_item, 'b')
self._assert_is_object(file_item, 'b') self._assert_is_object(file_item, b'b')
versions_list = self.versions_container.files() versions_list = self.versions_container.files()
self.assertEqual(1, len(versions_list)) self.assertEqual(1, len(versions_list))
version_file = self.versions_container.file(versions_list[0]) version_file = self.versions_container.file(versions_list[0])
# check the version is still a manifest # check the version is still a manifest
self._assert_is_manifest(version_file, 'a') self._assert_is_manifest(version_file, 'a')
self._assert_is_object(version_file, 'a') self._assert_is_object(version_file, b'a')
# delete the newest manifest # delete the newest manifest
file_item.delete() file_item.delete()
# expect the original manifest file to be restored # expect the original manifest file to be restored
self._assert_is_manifest(file_item, 'a') self._assert_is_manifest(file_item, 'a')
self._assert_is_object(file_item, 'a') self._assert_is_object(file_item, b'a')

View File

@ -1290,6 +1290,10 @@ class TestFile(Base):
if not matches: if not matches:
errors.append('Missing expected header %s' % k) errors.append('Missing expected header %s' % k)
for (got_k, got_v) in matches: for (got_k, got_v) in matches:
# The Connection: header is parsed by cluster's LB and may
# be returned in either original lowercase or camel-cased.
if k == 'connection':
got_v = got_v.lower()
if got_v != v: if got_v != v:
errors.append('Expected %s but got %s for %s' % errors.append('Expected %s but got %s for %s' %
(v, got_v, k)) (v, got_v, k))

View File

@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import json
import os
import time import time
from os import listdir, unlink from os import listdir, unlink
from os.path import join as path_join from os.path import join as path_join
@ -26,7 +28,7 @@ from swift.common import direct_client
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.utils import hash_path, readconf from swift.common.utils import hash_path, readconf
from swift.obj.diskfile import write_metadata, read_metadata, get_data_dir from swift.obj.diskfile import write_metadata, read_metadata, get_data_dir
from test.probe.common import ReplProbeTest from test.probe.common import ReplProbeTest, ECProbeTest
RETRIES = 5 RETRIES = 5
@ -198,5 +200,61 @@ class TestObjectFailures(ReplProbeTest):
self.run_quarantine_zero_byte_post() self.run_quarantine_zero_byte_post()
class TestECObjectFailures(ECProbeTest):
def test_ec_missing_all_durable_fragments(self):
# This tests helps assert the behavior that when
# the proxy has enough fragments to reconstruct the object
# but none are marked as durable, the proxy should return a 404.
container_name = 'container-%s' % uuid4()
object_name = 'object-%s' % uuid4()
# create EC container
headers = {'X-Storage-Policy': self.policy.name}
client.put_container(self.url, self.token, container_name,
headers=headers)
# PUT object, should go to primary nodes
client.put_object(self.url, self.token, container_name,
object_name, contents='object contents')
# get our node lists
opart, onodes = self.object_ring.get_nodes(
self.account, container_name, object_name)
# sanity test
odata = client.get_object(self.url, self.token, container_name,
object_name)[-1]
self.assertEqual('object contents', odata)
# make all fragments non-durable
for node in onodes:
part_dir = self.storage_dir('object', node, part=opart)
for dirs, subdirs, files in os.walk(part_dir):
for fname in files:
if fname.endswith('.data'):
non_durable_fname = fname.replace('#d', '')
os.rename(os.path.join(dirs, fname),
os.path.join(dirs, non_durable_fname))
break
headers = direct_client.direct_head_object(
node, opart, self.account, container_name, object_name,
headers={
'X-Backend-Storage-Policy-Index': self.policy.idx,
'X-Backend-Fragment-Preferences': json.dumps([])})
self.assertNotIn('X-Backend-Durable-Timestamp', headers)
# Now a new GET should return *404* because all fragments
# are non-durable, even if they are reconstructable
try:
client.get_object(self.url, self.token, container_name,
object_name)
except client.ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -309,6 +309,62 @@ class TestObjectHandoff(ReplProbeTest):
else: else:
self.fail("Expected ClientException but didn't get it") self.fail("Expected ClientException but didn't get it")
def test_missing_primaries(self):
# Create container
container = 'container-%s' % uuid4()
client.put_container(self.url, self.token, container,
headers={'X-Storage-Policy':
self.policy.name})
# Create container/obj (goes to all three primaries)
obj = 'object-%s' % uuid4()
client.put_object(self.url, self.token, container, obj, 'VERIFY')
odata = client.get_object(self.url, self.token, container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Object GET did not return VERIFY, instead it '
'returned: %s' % repr(odata))
# Kill all primaries obj server
obj = 'object-%s' % uuid4()
opart, onodes = self.object_ring.get_nodes(
self.account, container, obj)
for onode in onodes:
kill_server((onode['ip'], onode['port']), self.ipport2server)
# Indirectly (i.e., through proxy) try to GET object, it should return
# a 503, since all primaries will Timeout and handoffs return a 404.
try:
client.get_object(self.url, self.token, container, obj)
except client.ClientException as err:
self.assertEqual(err.http_status, 503)
else:
self.fail("Expected ClientException but didn't get it")
# Restart the first container/obj primary server again
onode = onodes[0]
start_server((onode['ip'], onode['port']), self.ipport2server)
# Send a delete that will reach first primary and handoff.
# Sure, the DELETE will return a 404 since the handoff doesn't
# have a .data file, but object server will still write a
# Tombstone in the handoff node!
try:
client.delete_object(self.url, self.token, container, obj)
except client.ClientException as err:
self.assertEqual(err.http_status, 404)
# kill the first container/obj primary server again
kill_server((onode['ip'], onode['port']), self.ipport2server)
# a new GET should return a 404, since all primaries will Timeout
# and the handoff will return a 404 but this time with a tombstone
try:
client.get_object(self.url, self.token, container, obj)
except client.ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
class TestECObjectHandoff(ECProbeTest): class TestECObjectHandoff(ECProbeTest):
@ -523,5 +579,54 @@ class TestECObjectHandoff(ECProbeTest):
# ... all six unique # ... all six unique
self.assertEqual(len(frag2count), 6) self.assertEqual(len(frag2count), 6)
def test_ec_primary_timeout(self):
container_name = 'container-%s' % uuid4()
object_name = 'object-%s' % uuid4()
# create EC container
headers = {'X-Storage-Policy': self.policy.name}
client.put_container(self.url, self.token, container_name,
headers=headers)
# PUT object, should go to primary nodes
old_contents = Body()
client.put_object(self.url, self.token, container_name,
object_name, contents=old_contents)
# get our node lists
opart, onodes = self.object_ring.get_nodes(
self.account, container_name, object_name)
# shutdown three of the primary data nodes
for i in range(3):
failed_primary = onodes[i]
failed_primary_device_path = self.device_dir('object',
failed_primary)
self.kill_drive(failed_primary_device_path)
# Indirectly (i.e., through proxy) try to GET object, it should return
# a 503, since all primaries will Timeout and handoffs return a 404.
try:
client.get_object(self.url, self.token, container_name,
object_name)
except client.ClientException as err:
self.assertEqual(err.http_status, 503)
else:
self.fail("Expected ClientException but didn't get it")
# Send a delete to write down tombstones in the handoff nodes
client.delete_object(self.url, self.token, container_name, object_name)
# Now a new GET should return 404 because the handoff nodes
# return a 404 with a Tombstone.
try:
client.get_object(self.url, self.token, container_name,
object_name)
except client.ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -1367,7 +1367,8 @@ def xattr_supported_check():
fd, tmppath = mkstemp() fd, tmppath = mkstemp()
xattr.setxattr(fd, 'user.swift.testing_key', big_val) xattr.setxattr(fd, 'user.swift.testing_key', big_val)
except IOError as e: except IOError as e:
if errno.errorcode.get(e.errno) in ('ENOSPC', 'ENOTSUP', 'EOPNOTSUPP'): if errno.errorcode.get(e.errno) in ('ENOSPC', 'ENOTSUP', 'EOPNOTSUPP',
'ERANGE'):
# filesystem does not support xattr of this size # filesystem does not support xattr of this size
return False return False
raise raise

View File

@ -32,6 +32,7 @@ import xml.dom.minidom
from swift import __version__ as swift_version from swift import __version__ as swift_version
from swift.common.swob import (Request, WsgiBytesIO, HTTPNoContent) from swift.common.swob import (Request, WsgiBytesIO, HTTPNoContent)
from swift.common import constraints from swift.common import constraints
from swift.account.backend import AccountBroker
from swift.account.server import AccountController from swift.account.server import AccountController
from swift.common.utils import (normalize_timestamp, replication, public, from swift.common.utils import (normalize_timestamp, replication, public,
mkdirs, storage_directory, Timestamp) mkdirs, storage_directory, Timestamp)
@ -49,7 +50,8 @@ class TestAccountController(unittest.TestCase):
self.testdir = os.path.join(self.testdir_base, 'account_server') self.testdir = os.path.join(self.testdir_base, 'account_server')
mkdirs(os.path.join(self.testdir, 'sda1')) mkdirs(os.path.join(self.testdir, 'sda1'))
self.controller = AccountController( self.controller = AccountController(
{'devices': self.testdir, 'mount_check': 'false'}) {'devices': self.testdir, 'mount_check': 'false'},
logger=debug_logger())
def tearDown(self): def tearDown(self):
"""Tear down for testing swift.account.server.AccountController""" """Tear down for testing swift.account.server.AccountController"""
@ -522,6 +524,51 @@ class TestAccountController(unittest.TestCase):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202) self.assertEqual(resp.status_int, 202)
def test_utf8_metadata(self):
ts_str = normalize_timestamp(1)
def get_test_meta(method, headers):
# Set metadata header
headers.setdefault('X-Timestamp', ts_str)
req = Request.blank(
'/sda1/p/a', environ={'REQUEST_METHOD': method},
headers=headers)
resp = req.get_response(self.controller)
self.assertIn(resp.status_int, (201, 202, 204))
db_path = os.path.join(*next(
(dir_name, file_name)
for dir_name, _, files in os.walk(self.testdir)
for file_name in files if file_name.endswith('.db')
))
broker = AccountBroker(db_path)
# Why not use broker.metadata, you ask? Because we want to get
# as close to the on-disk format as is reasonable.
result = json.loads(broker.get_raw_metadata())
# Clear it out for the next run
with broker.get() as conn:
conn.execute("UPDATE account_stat SET metadata=''")
conn.commit()
return result
wsgi_str = '\xf0\x9f\x91\x8d'
uni_str = u'\U0001f44d'
self.assertEqual(
get_test_meta('PUT', {'x-account-sysmeta-' + wsgi_str: wsgi_str}),
{u'X-Account-Sysmeta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('PUT', {'x-account-meta-' + wsgi_str: wsgi_str}),
{u'X-Account-Meta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('POST', {'x-account-sysmeta-' + wsgi_str: wsgi_str}),
{u'X-Account-Sysmeta-' + uni_str: [uni_str, ts_str]})
self.assertEqual(
get_test_meta('POST', {'x-account-meta-' + wsgi_str: wsgi_str}),
{u'X-Account-Meta-' + uni_str: [uni_str, ts_str]})
def test_PUT_GET_metadata(self): def test_PUT_GET_metadata(self):
# Set metadata header # Set metadata header
req = Request.blank( req = Request.blank(

View File

@ -84,6 +84,9 @@ class TestS3ApiBucket(S3ApiTestCase):
'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None) 'HEAD', '/v1/AUTH_test/junk', swob.HTTPNoContent, {}, None)
self.swift.register( self.swift.register(
'HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None) 'HEAD', '/v1/AUTH_test/nojunk', swob.HTTPNotFound, {}, None)
self.swift.register(
'HEAD', '/v1/AUTH_test/unavailable', swob.HTTPServiceUnavailable,
{}, None)
self.swift.register( self.swift.register(
'GET', '/v1/AUTH_test/junk', swob.HTTPOk, 'GET', '/v1/AUTH_test/junk', swob.HTTPOk,
{'Content-Type': 'application/json'}, object_list) {'Content-Type': 'application/json'}, object_list)
@ -127,6 +130,15 @@ class TestS3ApiBucket(S3ApiTestCase):
self.assertEqual(status.split()[0], '404') self.assertEqual(status.split()[0], '404')
self.assertEqual(body, b'') # sanity self.assertEqual(body, b'') # sanity
def test_bucket_HEAD_503(self):
req = Request.blank('/unavailable',
environ={'REQUEST_METHOD': 'HEAD'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '503')
self.assertEqual(body, b'') # sanity
def test_bucket_HEAD_slash(self): def test_bucket_HEAD_slash(self):
req = Request.blank('/junk/', req = Request.blank('/junk/',
environ={'REQUEST_METHOD': 'HEAD'}, environ={'REQUEST_METHOD': 'HEAD'},
@ -151,6 +163,9 @@ class TestS3ApiBucket(S3ApiTestCase):
self.assertEqual(code, 'AccessDenied') self.assertEqual(code, 'AccessDenied')
code = self._test_method_error('GET', '/bucket', swob.HTTPNotFound) code = self._test_method_error('GET', '/bucket', swob.HTTPNotFound)
self.assertEqual(code, 'NoSuchBucket') self.assertEqual(code, 'NoSuchBucket')
code = self._test_method_error('GET', '/bucket',
swob.HTTPServiceUnavailable)
self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error('GET', '/bucket', swob.HTTPServerError) code = self._test_method_error('GET', '/bucket', swob.HTTPServerError)
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'InternalError')
@ -621,7 +636,7 @@ class TestS3ApiBucket(S3ApiTestCase):
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'InternalError')
code = self._test_method_error( code = self._test_method_error(
'PUT', '/bucket', swob.HTTPServiceUnavailable) 'PUT', '/bucket', swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error( code = self._test_method_error(
'PUT', '/bucket+bucket', swob.HTTPCreated) 'PUT', '/bucket+bucket', swob.HTTPCreated)
self.assertEqual(code, 'InvalidBucketName') self.assertEqual(code, 'InvalidBucketName')
@ -673,7 +688,7 @@ class TestS3ApiBucket(S3ApiTestCase):
headers={'Authorization': 'AWS test:tester:hmac', headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()}) 'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req) status, headers, body = self.call_s3api(req)
self.assertEqual(status, '500 Internal Server Error') self.assertEqual(status, '503 Service Unavailable')
# The last call was PUT not POST for acl set # The last call was PUT not POST for acl set
self.assertEqual(self.swift.calls, [ self.assertEqual(self.swift.calls, [
('PUT', '/v1/AUTH_test/bucket'), ('PUT', '/v1/AUTH_test/bucket'),

View File

@ -18,6 +18,7 @@ import json
import unittest import unittest
from datetime import datetime from datetime import datetime
from hashlib import md5 from hashlib import md5
import mock
from swift.common import swob from swift.common import swob
from swift.common.swob import Request from swift.common.swob import Request
@ -320,6 +321,27 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
status, headers, body = self.call_s3api(req) status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MalformedXML') self.assertEqual(self._get_error_code(body), 'MalformedXML')
@s3acl
def test_object_multi_DELETE_unhandled_exception(self):
exploding_resp = mock.MagicMock(
side_effect=Exception('kaboom'))
self.swift.register('DELETE', '/v1/AUTH_test/bucket/Key1',
exploding_resp, {}, None)
elem = Element('Delete')
obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'Key1'
body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip()
req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header(),
'Content-MD5': content_md5},
body=body)
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
def _test_object_multi_DELETE(self, account): def _test_object_multi_DELETE(self, account):
self.keys = ['Key1', 'Key2'] self.keys = ['Key1', 'Key2']
self.swift.register( self.swift.register(

View File

@ -161,7 +161,7 @@ class TestS3ApiObj(S3ApiTestCase):
self.swift.register('HEAD', '/v1/AUTH_test/bucket/object', self.swift.register('HEAD', '/v1/AUTH_test/bucket/object',
swob.HTTPServiceUnavailable, {}, None) swob.HTTPServiceUnavailable, {}, None)
status, headers, body = self.call_s3api(req) status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '500') self.assertEqual(status.split()[0], '503')
self.assertEqual(body, b'') # sanity self.assertEqual(body, b'') # sanity
def test_object_HEAD(self): def test_object_HEAD(self):
@ -281,7 +281,7 @@ class TestS3ApiObj(S3ApiTestCase):
self.assertEqual(code, 'PreconditionFailed') self.assertEqual(code, 'PreconditionFailed')
code = self._test_method_error('GET', '/bucket/object', code = self._test_method_error('GET', '/bucket/object',
swob.HTTPServiceUnavailable) swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'ServiceUnavailable')
@s3acl @s3acl
def test_object_GET(self): def test_object_GET(self):
@ -398,7 +398,7 @@ class TestS3ApiObj(S3ApiTestCase):
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'InternalError')
code = self._test_method_error('PUT', '/bucket/object', code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPServiceUnavailable) swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'ServiceUnavailable')
code = self._test_method_error('PUT', '/bucket/object', code = self._test_method_error('PUT', '/bucket/object',
swob.HTTPCreated, swob.HTTPCreated,
{'X-Amz-Copy-Source': ''}) {'X-Amz-Copy-Source': ''})
@ -582,9 +582,9 @@ class TestS3ApiObj(S3ApiTestCase):
self.assertEqual('200 ', status[:4], body) self.assertEqual('200 ', status[:4], body)
# Check that s3api does not return an etag header, # Check that s3api does not return an etag header,
# specified copy source. # specified copy source.
self.assertTrue(headers.get('etag') is None) self.assertNotIn('etag', headers)
# Check that s3api does not return custom metadata in response # Check that s3api does not return custom metadata in response
self.assertTrue(headers.get('x-amz-meta-something') is None) self.assertNotIn('x-amz-meta-something', headers)
_, _, headers = self.swift.calls_with_headers[-1] _, _, headers = self.swift.calls_with_headers[-1]
# Check that s3api converts a Content-MD5 header into an etag. # Check that s3api converts a Content-MD5 header into an etag.
@ -964,7 +964,7 @@ class TestS3ApiObj(S3ApiTestCase):
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'InternalError')
code = self._test_method_error('DELETE', '/bucket/object', code = self._test_method_error('DELETE', '/bucket/object',
swob.HTTPServiceUnavailable) swob.HTTPServiceUnavailable)
self.assertEqual(code, 'InternalError') self.assertEqual(code, 'ServiceUnavailable')
with patch( with patch(
'swift.common.middleware.s3api.s3request.get_container_info', 'swift.common.middleware.s3api.s3request.get_container_info',

View File

@ -478,14 +478,14 @@ class TestDloGetManifest(DloTestCase):
def test_get_multi_range(self): def test_get_multi_range(self):
# DLO doesn't support multi-range GETs. The way that you express that # DLO doesn't support multi-range GETs. The way that you express that
# in HTTP is to return a 200 response containing the whole entity. # in HTTP is to return a 200 response containing the whole entity.
req = swob.Request.blank('/v1/AUTH_test/mancon/manifest-many-segments', req = swob.Request.blank('/v1/AUTH_test/mancon/manifest',
environ={'REQUEST_METHOD': 'GET'}, environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=5-9,15-19'}) headers={'Range': 'bytes=5-9,15-19'})
with mock.patch(LIMIT, 3): with mock.patch(LIMIT, 30):
status, headers, body = self.call_dlo(req) status, headers, body = self.call_dlo(req)
headers = HeaderKeyDict(headers) headers = HeaderKeyDict(headers)
self.assertEqual(status, "200 OK") self.assertEqual(status, "200 OK")
self.assertIsNone(headers.get("Content-Length")) self.assertEqual(headers.get("Content-Length"), '25')
self.assertIsNone(headers.get("Content-Range")) self.assertIsNone(headers.get("Content-Range"))
self.assertEqual(body, b'aaaaabbbbbcccccdddddeeeee') self.assertEqual(body, b'aaaaabbbbbcccccdddddeeeee')

View File

@ -54,7 +54,9 @@ def fake_start_response(*args, **kwargs):
def md5hex(s): def md5hex(s):
return hashlib.md5(s.encode('ascii')).hexdigest() if not isinstance(s, bytes):
s = s.encode('ascii')
return hashlib.md5(s).hexdigest()
class SloTestCase(unittest.TestCase): class SloTestCase(unittest.TestCase):
@ -1237,6 +1239,32 @@ class TestSloDeleteManifest(SloTestCase):
'DELETE', '/v1/AUTH_test/deltest-unauth/q_17', 'DELETE', '/v1/AUTH_test/deltest-unauth/q_17',
swob.HTTPUnauthorized, {}, None) swob.HTTPUnauthorized, {}, None)
self.app.register(
'GET', '/v1/AUTH_test/deltest/manifest-with-too-many-segs',
swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true'},
json.dumps([{'name': '/deltest/a_1',
'hash': 'a', 'bytes': '1'},
{'name': '/deltest/multi-submanifest', 'sub_slo': True,
'hash': 'submanifest-etag',
'bytes': len(_submanifest_data)},
{'name': '/deltest/b_2',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/c_3',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/d_4',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/e_5',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/f_6',
'hash': 'b', 'bytes': '1'},
{'name': '/deltest/g_8',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/g_8',
'hash': 'c', 'bytes': '1'},
{'name': '/deltest/h_9',
'hash': 'd', 'bytes': '3'}]))
def test_handle_multipart_delete_man(self): def test_handle_multipart_delete_man(self):
req = Request.blank( req = Request.blank(
'/v1/AUTH_test/deltest/man', '/v1/AUTH_test/deltest/man',
@ -1363,11 +1391,11 @@ class TestSloDeleteManifest(SloTestCase):
def test_handle_multipart_delete_nested_too_many_segments(self): def test_handle_multipart_delete_nested_too_many_segments(self):
req = Request.blank( req = Request.blank(
'/v1/AUTH_test/deltest/manifest-with-submanifest?' + '/v1/AUTH_test/deltest/manifest-with-too-many-segs?' +
'multipart-manifest=delete', 'multipart-manifest=delete',
environ={'REQUEST_METHOD': 'DELETE', environ={'REQUEST_METHOD': 'DELETE',
'HTTP_ACCEPT': 'application/json'}) 'HTTP_ACCEPT': 'application/json'})
with patch.object(slo, 'MAX_BUFFERED_SLO_SEGMENTS', 1): with patch.object(self.slo, 'max_manifest_segments', 1):
status, headers, body = self.call_slo(req) status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') self.assertEqual(status, '200 OK')
resp_data = json.loads(body) resp_data = json.loads(body)
@ -1532,12 +1560,13 @@ class TestSloHeadOldManifest(SloTestCase):
'hash': 'seg02-hash', 'hash': 'seg02-hash',
'content_type': 'text/plain', 'content_type': 'text/plain',
'last_modified': '2013-11-19T11:33:45.137447'}]) 'last_modified': '2013-11-19T11:33:45.137447'}])
self.manifest_json_etag = md5hex(manifest_json)
manifest_headers = { manifest_headers = {
'Content-Length': str(len(manifest_json)), 'Content-Length': str(len(manifest_json)),
'Content-Type': 'test/data', 'Content-Type': 'test/data',
'X-Static-Large-Object': 'true', 'X-Static-Large-Object': 'true',
'X-Object-Sysmeta-Artisanal-Etag': 'bespoke', 'X-Object-Sysmeta-Artisanal-Etag': 'bespoke',
'Etag': md5hex(manifest_json)} 'Etag': self.manifest_json_etag}
manifest_headers.update(getattr(self, 'extra_manifest_headers', {})) manifest_headers.update(getattr(self, 'extra_manifest_headers', {}))
self.manifest_has_sysmeta = all(h in manifest_headers for h in ( self.manifest_has_sysmeta = all(h in manifest_headers for h in (
'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size')) 'X-Object-Sysmeta-Slo-Etag', 'X-Object-Sysmeta-Slo-Size'))
@ -1553,6 +1582,7 @@ class TestSloHeadOldManifest(SloTestCase):
self.assertEqual(status, '200 OK') self.assertEqual(status, '200 OK')
self.assertIn(('Etag', '"%s"' % self.slo_etag), headers) self.assertIn(('Etag', '"%s"' % self.slo_etag), headers)
self.assertIn(('X-Manifest-Etag', self.manifest_json_etag), headers)
self.assertIn(('Content-Length', '300'), headers) self.assertIn(('Content-Length', '300'), headers)
self.assertIn(('Content-Type', 'test/data'), headers) self.assertIn(('Content-Type', 'test/data'), headers)
self.assertEqual(body, b'') # it's a HEAD request, after all self.assertEqual(body, b'') # it's a HEAD request, after all
@ -1829,11 +1859,12 @@ class TestSloGetManifest(SloTestCase):
'bytes': 25}, 'bytes': 25},
{'name': '/gettest/d_20', 'hash': md5hex("d" * 20), {'name': '/gettest/d_20', 'hash': md5hex("d" * 20),
'content_type': 'text/plain', 'bytes': '20'}]) 'content_type': 'text/plain', 'bytes': '20'}])
self.abcd_manifest_json_etag = md5hex(_abcd_manifest_json)
self.app.register( self.app.register(
'GET', '/v1/AUTH_test/gettest/manifest-abcd', 'GET', '/v1/AUTH_test/gettest/manifest-abcd',
swob.HTTPOk, {'Content-Type': 'application/json', swob.HTTPOk, {'Content-Type': 'application/json',
'X-Static-Large-Object': 'true', 'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_manifest_json)}, 'Etag': self.abcd_manifest_json_etag},
_abcd_manifest_json) _abcd_manifest_json)
# A submanifest segment is created using the response headers from a # A submanifest segment is created using the response headers from a
@ -1995,9 +2026,8 @@ class TestSloGetManifest(SloTestCase):
status, headers, body = self.call_slo(req) status, headers, body = self.call_slo(req)
self.assertEqual(status, '200 OK') self.assertEqual(status, '200 OK')
self.assertTrue( self.assertIn(
('Content-Type', 'application/json; charset=utf-8') in headers, ('Content-Type', 'application/json; charset=utf-8'), headers)
headers)
try: try:
resp_data = json.loads(body) resp_data = json.loads(body)
except ValueError: except ValueError:
@ -2010,6 +2040,7 @@ class TestSloGetManifest(SloTestCase):
{'hash': md5hex('c' * 15), 'bytes': '15', 'name': '/gettest/c_15', {'hash': md5hex('c' * 15), 'bytes': '15', 'name': '/gettest/c_15',
'content_type': 'text/plain'}], 'content_type': 'text/plain'}],
body) body)
self.assertIn(('Etag', md5hex(body)), headers)
def test_get_nonmanifest_passthrough(self): def test_get_nonmanifest_passthrough(self):
req = Request.blank( req = Request.blank(
@ -2167,6 +2198,8 @@ class TestSloGetManifest(SloTestCase):
self.assertEqual(status, '200 OK') self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50') self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag) self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(headers['X-Manifest-Etag'],
self.abcd_manifest_json_etag)
self.assertEqual( self.assertEqual(
body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd') body, b'aaaaabbbbbbbbbbcccccccccccccccdddddddddddddddddddd')
@ -2402,6 +2435,164 @@ class TestSloGetManifest(SloTestCase):
('GET', ('GET',
'/v1/AUTH_test/gettest/big_seg?multipart-manifest=get')]) '/v1/AUTH_test/gettest/big_seg?multipart-manifest=get')])
def test_range_get_beyond_manifest_refetch_fails(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPNotFound, {}, None)])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '503 Service Unavailable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_finds_old(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1233'}, [b'small body'])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '503 Service Unavailable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_small_non_slo(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1235'}, [b'small body'])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '416 Requested Range Not Satisfiable')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_big_non_slo(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPOk, {'X-Backend-Timestamp': '1235'},
[b'x' * 1024 * 1024])])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '200 OK') # NOT 416 or 206!
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(len(body), 1024 * 1024)
self.assertEqual(body, b'x' * 1024 * 1024)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_beyond_manifest_refetch_tombstone(self):
big = 'e' * 1024 * 1024
big_etag = md5hex(big)
big_manifest = json.dumps(
[{'name': '/gettest/big_seg', 'hash': big_etag,
'bytes': 1024 * 1024, 'content_type': 'application/foo'}])
self.app.register_responses(
'GET', '/v1/AUTH_test/gettest/big_manifest',
[(swob.HTTPOk, {'Content-Type': 'application/octet-stream',
'X-Static-Large-Object': 'true',
'X-Backend-Timestamp': '1234',
'Etag': md5hex(big_manifest)},
big_manifest),
(swob.HTTPNotFound, {'X-Backend-Timestamp': '1345'}, None)])
req = Request.blank(
'/v1/AUTH_test/gettest/big_manifest',
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=100000-199999'})
status, headers, body = self.call_slo(req)
headers = HeaderKeyDict(headers)
self.assertEqual(status, '404 Not Found')
self.assertNotIn('X-Static-Large-Object', headers)
self.assertEqual(self.app.calls, [
# has Range header, gets 416
('GET', '/v1/AUTH_test/gettest/big_manifest'),
# retry the first one
('GET', '/v1/AUTH_test/gettest/big_manifest'),
])
def test_range_get_bogus_content_range(self): def test_range_get_bogus_content_range(self):
# Just a little paranoia; Swift currently sends back valid # Just a little paranoia; Swift currently sends back valid
# Content-Range headers, but if somehow someone sneaks an invalid one # Content-Range headers, but if somehow someone sneaks an invalid one
@ -2857,6 +3048,8 @@ class TestSloGetManifest(SloTestCase):
self.assertEqual(status, '200 OK') self.assertEqual(status, '200 OK')
self.assertEqual(headers['Content-Length'], '50') self.assertEqual(headers['Content-Length'], '50')
self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag) self.assertEqual(headers['Etag'], '"%s"' % self.manifest_abcd_etag)
self.assertEqual(headers['X-Manifest-Etag'],
self.abcd_manifest_json_etag)
self.assertEqual(body, b'') self.assertEqual(body, b'')
# Note the lack of recursive descent into manifest-bc. We know the # Note the lack of recursive descent into manifest-bc. We know the
# content-length from the outer manifest, so there's no need for any # content-length from the outer manifest, so there's no need for any
@ -3614,11 +3807,12 @@ class TestSloConditionalGetOldManifest(SloTestCase):
_bc_manifest_json) _bc_manifest_json)
_abcd_manifest_json = json.dumps(self.slo_data) _abcd_manifest_json = json.dumps(self.slo_data)
self.abcd_manifest_json_etag = md5hex(_abcd_manifest_json)
manifest_headers = { manifest_headers = {
'Content-Length': str(len(_abcd_manifest_json)), 'Content-Length': str(len(_abcd_manifest_json)),
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'X-Static-Large-Object': 'true', 'X-Static-Large-Object': 'true',
'Etag': md5hex(_abcd_manifest_json), 'Etag': self.abcd_manifest_json_etag,
'X-Object-Sysmeta-Custom-Etag': 'a custom etag'} 'X-Object-Sysmeta-Custom-Etag': 'a custom etag'}
manifest_headers.update(getattr(self, 'extra_manifest_headers', {})) manifest_headers.update(getattr(self, 'extra_manifest_headers', {}))
self.manifest_has_sysmeta = all(h in manifest_headers for h in ( self.manifest_has_sysmeta = all(h in manifest_headers for h in (
@ -3927,7 +4121,7 @@ class TestSloConditionalGetNewManifest(TestSloConditionalGetOldManifest):
super(TestSloConditionalGetNewManifest, self).setUp() super(TestSloConditionalGetNewManifest, self).setUp()
class TestSloBulkLogger(unittest.TestCase): class TestSloBulkDeleter(unittest.TestCase):
def test_reused_logger(self): def test_reused_logger(self):
slo_mware = slo.filter_factory({})('fake app') slo_mware = slo.filter_factory({})('fake app')
self.assertTrue(slo_mware.logger is slo_mware.bulk_deleter.logger) self.assertTrue(slo_mware.logger is slo_mware.bulk_deleter.logger)
@ -3936,6 +4130,13 @@ class TestSloBulkLogger(unittest.TestCase):
slo_mware = slo.filter_factory({'delete_concurrency': 5})('fake app') slo_mware = slo.filter_factory({'delete_concurrency': 5})('fake app')
self.assertEqual(5, slo_mware.bulk_deleter.delete_concurrency) self.assertEqual(5, slo_mware.bulk_deleter.delete_concurrency)
def test_uses_big_max_deletes(self):
slo_mware = slo.filter_factory(
{'max_manifest_segments': 123456789})('fake app')
self.assertGreaterEqual(
slo_mware.bulk_deleter.max_deletes_per_request,
123456789)
class TestSwiftInfo(unittest.TestCase): class TestSwiftInfo(unittest.TestCase):
def setUp(self): def setUp(self):

View File

@ -15,6 +15,7 @@
# limitations under the License. # limitations under the License.
import unittest import unittest
import io
import json import json
import mock import mock
@ -23,7 +24,7 @@ from swift.common import swob
from swift.common.middleware import symlink, copy, versioned_writes, \ from swift.common.middleware import symlink, copy, versioned_writes, \
listing_formats listing_formats
from swift.common.swob import Request from swift.common.swob import Request
from swift.common.utils import MD5_OF_EMPTY_STRING from swift.common.utils import MD5_OF_EMPTY_STRING, get_swift_info
from test.unit.common.middleware.helpers import FakeSwift from test.unit.common.middleware.helpers import FakeSwift
from test.unit.common.middleware.test_versioned_writes import FakeCache from test.unit.common.middleware.test_versioned_writes import FakeCache
@ -77,6 +78,14 @@ class TestSymlinkMiddlewareBase(unittest.TestCase):
class TestSymlinkMiddleware(TestSymlinkMiddlewareBase): class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
def test_symlink_info(self):
swift_info = get_swift_info()
self.assertEqual(swift_info['symlink'], {
'symloop_max': 2,
'static_links': True,
})
def test_symlink_simple_put(self): def test_symlink_simple_put(self):
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {}) self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT', req = Request.blank('/v1/a/c/symlink', method='PUT',
@ -90,6 +99,196 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs) self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag') val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o' % MD5_OF_EMPTY_STRING) self.assertEqual(val, '%s; symlink_target=c1/o' % MD5_OF_EMPTY_STRING)
self.assertEqual('application/symlink', hdrs.get('Content-Type'))
def test_symlink_simple_put_with_content_type(self):
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={'X-Symlink-Target': 'c1/o',
'Content-Type': 'application/linkyfoo'},
body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
method, path, hdrs = self.app.calls_with_headers[0]
val = hdrs.get('X-Object-Sysmeta-Symlink-Target')
self.assertEqual(val, 'c1/o')
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o' % MD5_OF_EMPTY_STRING)
self.assertEqual('application/linkyfoo', hdrs.get('Content-Type'))
def test_symlink_simple_put_with_etag(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'Etag': 'tgt-etag', 'Content-Length': 42,
'Content-Type': 'application/foo'})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'tgt-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
method, path, hdrs = self.app.calls_with_headers[1]
val = hdrs.get('X-Object-Sysmeta-Symlink-Target')
self.assertEqual(val, 'c1/o')
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o; '
'symlink_target_etag=tgt-etag; '
'symlink_target_bytes=42' % MD5_OF_EMPTY_STRING)
self.assertEqual([
('HEAD', '/v1/a/c1/o'),
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
self.assertEqual('application/foo',
self.app._calls[-1].headers['Content-Type'])
def test_symlink_simple_put_with_etag_target_missing_content_type(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'Etag': 'tgt-etag', 'Content-Length': 42})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'tgt-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
method, path, hdrs = self.app.calls_with_headers[1]
val = hdrs.get('X-Object-Sysmeta-Symlink-Target')
self.assertEqual(val, 'c1/o')
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o; '
'symlink_target_etag=tgt-etag; '
'symlink_target_bytes=42' % MD5_OF_EMPTY_STRING)
self.assertEqual([
('HEAD', '/v1/a/c1/o'),
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
# N.B. the ObjectController would call _update_content_type on PUT
# regardless, but you actually can't get a HEAD response without swob
# setting a Content-Type
self.assertEqual('text/html; charset=UTF-8',
self.app._calls[-1].headers['Content-Type'])
def test_symlink_simple_put_with_etag_explicit_content_type(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'Etag': 'tgt-etag', 'Content-Length': 42,
'Content-Type': 'application/foo'})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'tgt-etag',
'Content-Type': 'application/bar',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
method, path, hdrs = self.app.calls_with_headers[1]
val = hdrs.get('X-Object-Sysmeta-Symlink-Target')
self.assertEqual(val, 'c1/o')
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o; '
'symlink_target_etag=tgt-etag; '
'symlink_target_bytes=42' % MD5_OF_EMPTY_STRING)
self.assertEqual([
('HEAD', '/v1/a/c1/o'),
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
self.assertEqual('application/bar',
self.app._calls[-1].headers['Content-Type'])
def test_symlink_simple_put_with_unmatched_etag(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'Etag': 'tgt-etag', 'Content-Length': 42})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'not-tgt-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict')
self.assertIn(('Content-Location', '/v1/a/c1/o'), headers)
self.assertEqual(body, b"Object Etag 'tgt-etag' does not match "
b"X-Symlink-Target-Etag header 'not-tgt-etag'")
def test_symlink_simple_put_to_non_existing_object(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPNotFound, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'not-tgt-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict')
self.assertIn(('Content-Location', '/v1/a/c1/o'), headers)
self.assertIn(b'does not exist', body)
def test_symlink_put_with_prevalidated_etag(self):
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT', headers={
'X-Symlink-Target': 'c1/o',
'X-Object-Sysmeta-Symlink-Target-Etag': 'tgt-etag',
'X-Object-Sysmeta-Symlink-Target-Bytes': '13',
'Content-Type': 'application/foo',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
self.assertEqual([
# N.B. no HEAD!
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
self.assertEqual('application/foo',
self.app._calls[-1].headers['Content-Type'])
method, path, hdrs = self.app.calls_with_headers[0]
val = hdrs.get('X-Object-Sysmeta-Symlink-Target')
self.assertEqual(val, 'c1/o')
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o; '
'symlink_target_etag=tgt-etag; '
'symlink_target_bytes=13' % MD5_OF_EMPTY_STRING)
def test_symlink_put_with_prevalidated_etag_sysmeta_incomplete(self):
req = Request.blank('/v1/a/c/symlink', method='PUT', headers={
'X-Symlink-Target': 'c1/o',
'X-Object-Sysmeta-Symlink-Target-Etag': 'tgt-etag',
}, body='')
with self.assertRaises(KeyError) as cm:
self.call_sym(req)
self.assertEqual(cm.exception.args[0], swob.header_to_environ_key(
'X-Object-Sysmeta-Symlink-Target-Bytes'))
def test_symlink_chunked_put(self):
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={'X-Symlink-Target': 'c1/o'},
environ={'wsgi.input': io.BytesIO(b'')})
self.assertIsNone(req.content_length) # sanity
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
method, path, hdrs = self.app.calls_with_headers[0]
val = hdrs.get('X-Object-Sysmeta-Symlink-Target')
self.assertEqual(val, 'c1/o')
self.assertNotIn('X-Object-Sysmeta-Symlink-Target-Account', hdrs)
val = hdrs.get('X-Object-Sysmeta-Container-Update-Override-Etag')
self.assertEqual(val, '%s; symlink_target=c1/o' % MD5_OF_EMPTY_STRING)
def test_symlink_chunked_put_error(self):
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={'X-Symlink-Target': 'c1/o'},
environ={'wsgi.input':
io.BytesIO(b'this has a body')})
self.assertIsNone(req.content_length) # sanity
status, headers, body = self.call_sym(req)
self.assertEqual(status, '400 Bad Request')
def test_symlink_put_different_account(self): def test_symlink_put_different_account(self):
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {}) self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
@ -248,6 +447,64 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
self.assertNotIn('X-Symlink-Target-Account', dict(headers)) self.assertNotIn('X-Symlink-Target-Account', dict(headers))
self.assertNotIn('Content-Location', dict(headers)) self.assertNotIn('Content-Location', dict(headers))
def test_get_static_link_mismatched_etag(self):
self.app.register('GET', '/v1/a/c/symlink', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c1/o',
'X-Object-Sysmeta-Symlink-Target-Etag': 'the-etag'})
# apparently target object was overwritten
self.app.register('GET', '/v1/a/c1/o', swob.HTTPOk,
{'ETag': 'not-the-etag'}, 'resp_body')
req = Request.blank('/v1/a/c/symlink', method='GET')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict')
self.assertEqual(body, b"Object Etag 'not-the-etag' does not "
b"match X-Symlink-Target-Etag header 'the-etag'")
def test_get_static_link_to_symlink(self):
self.app.register('GET', '/v1/a/c/static_link', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c/symlink',
'X-Object-Sysmeta-Symlink-Target-Etag': 'the-etag'})
self.app.register('GET', '/v1/a/c/symlink', swob.HTTPOk,
{'ETag': 'the-etag',
'X-Object-Sysmeta-Symlink-Target': 'c1/o'})
self.app.register('GET', '/v1/a/c1/o', swob.HTTPOk,
{'ETag': 'not-the-etag'}, 'resp_body')
req = Request.blank('/v1/a/c/static_link', method='GET')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '200 OK')
def test_get_static_link_to_symlink_fails(self):
self.app.register('GET', '/v1/a/c/static_link', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c/symlink',
'X-Object-Sysmeta-Symlink-Target-Etag': 'the-etag'})
self.app.register('GET', '/v1/a/c/symlink', swob.HTTPOk,
{'ETag': 'not-the-etag',
'X-Object-Sysmeta-Symlink-Target': 'c1/o'})
req = Request.blank('/v1/a/c/static_link', method='GET')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict')
self.assertEqual(body, b"X-Symlink-Target-Etag headers do not match")
def put_static_link_to_symlink(self):
self.app.register('HEAD', '/v1/a/c/symlink', swob.HTTPOk,
{'ETag': 'symlink-etag',
'X-Object-Sysmeta-Symlink-Target': 'c/o',
'Content-Type': 'application/symlink'})
self.app.register('HEAD', '/v1/a/c/o', swob.HTTPOk,
{'ETag': 'tgt-etag',
'Content-Type': 'application/data'}, 'resp_body')
self.app.register('PUT', '/v1/a/c/static_link', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/static_link', method='PUT',
headers={
'X-Symlink-Target': 'c/symlink',
'X-Symlink-Target-Etag': 'symlink-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
self.assertEqual([], self.app.calls)
self.assertEqual('application/data',
self.app._calls[-1].headers['Content-Type'])
def test_head_symlink(self): def test_head_symlink(self):
self.app.register('HEAD', '/v1/a/c/symlink', swob.HTTPOk, self.app.register('HEAD', '/v1/a/c/symlink', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c1/o', {'X-Object-Sysmeta-Symlink-Target': 'c1/o',
@ -298,15 +555,21 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
self.assertFalse(calls[2:]) self.assertFalse(calls[2:])
def test_symlink_too_deep(self): def test_symlink_too_deep(self):
self.app.register('HEAD', '/v1/a/c/symlink', swob.HTTPOk, self.app.register('GET', '/v1/a/c/symlink', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c/sym1'}) {'X-Object-Sysmeta-Symlink-Target': 'c/sym1'})
self.app.register('HEAD', '/v1/a/c/sym1', swob.HTTPOk, self.app.register('GET', '/v1/a/c/sym1', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c/sym2'}) {'X-Object-Sysmeta-Symlink-Target': 'c/sym2'})
self.app.register('HEAD', '/v1/a/c/sym2', swob.HTTPOk, self.app.register('GET', '/v1/a/c/sym2', swob.HTTPOk,
{'X-Object-Sysmeta-Symlink-Target': 'c/o'}) {'X-Object-Sysmeta-Symlink-Target': 'c/o'})
req = Request.blank('/v1/a/c/symlink', method='HEAD') req = Request.blank('/v1/a/c/symlink', method='HEAD')
status, headers, body = self.call_sym(req) status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict') self.assertEqual(status, '409 Conflict')
self.assertEqual(body, b'')
req = Request.blank('/v1/a/c/symlink')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict')
self.assertEqual(body, b'Too many levels of symbolic links, '
b'maximum allowed is 2')
def test_symlink_change_symloopmax(self): def test_symlink_change_symloopmax(self):
# similar test to test_symlink_too_deep, but now changed the limit to 3 # similar test to test_symlink_too_deep, but now changed the limit to 3
@ -390,11 +653,11 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
status, headers, body = self.call_sym(req) status, headers, body = self.call_sym(req)
self.assertEqual(status, '404 Not Found') self.assertEqual(status, '404 Not Found')
def test_check_symlink_header(self): def test_validate_and_prep_request_headers(self):
def do_test(headers): def do_test(headers):
req = Request.blank('/v1/a/c/o', method='PUT', req = Request.blank('/v1/a/c/o', method='PUT',
headers=headers) headers=headers)
symlink._check_symlink_header(req) symlink._validate_and_prep_request_headers(req)
# normal cases # normal cases
do_test({'X-Symlink-Target': 'c1/o1'}) do_test({'X-Symlink-Target': 'c1/o1'})
@ -419,12 +682,12 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
{'X-Symlink-Target': 'cont/obj', {'X-Symlink-Target': 'cont/obj',
'X-Symlink-Target-Account': swob.wsgi_quote(target)}) 'X-Symlink-Target-Account': swob.wsgi_quote(target)})
def test_check_symlink_header_invalid_format(self): def test_validate_and_prep_request_headers_invalid_format(self):
def do_test(headers, status, err_msg): def do_test(headers, status, err_msg):
req = Request.blank('/v1/a/c/o', method='PUT', req = Request.blank('/v1/a/c/o', method='PUT',
headers=headers) headers=headers)
with self.assertRaises(swob.HTTPException) as cm: with self.assertRaises(swob.HTTPException) as cm:
symlink._check_symlink_header(req) symlink._validate_and_prep_request_headers(req)
self.assertEqual(cm.exception.status, status) self.assertEqual(cm.exception.status, status)
self.assertEqual(cm.exception.body, err_msg) self.assertEqual(cm.exception.body, err_msg)
@ -484,11 +747,11 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
'412 Precondition Failed', '412 Precondition Failed',
b'Account name cannot contain slashes') b'Account name cannot contain slashes')
def test_check_symlink_header_points_to_itself(self): def test_validate_and_prep_request_headers_points_to_itself(self):
req = Request.blank('/v1/a/c/o', method='PUT', req = Request.blank('/v1/a/c/o', method='PUT',
headers={'X-Symlink-Target': 'c/o'}) headers={'X-Symlink-Target': 'c/o'})
with self.assertRaises(swob.HTTPException) as cm: with self.assertRaises(swob.HTTPException) as cm:
symlink._check_symlink_header(req) symlink._validate_and_prep_request_headers(req)
self.assertEqual(cm.exception.status, '400 Bad Request') self.assertEqual(cm.exception.status, '400 Bad Request')
self.assertEqual(cm.exception.body, b'Symlink cannot target itself') self.assertEqual(cm.exception.body, b'Symlink cannot target itself')
@ -497,7 +760,7 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
headers={'X-Symlink-Target': 'c/o', headers={'X-Symlink-Target': 'c/o',
'X-Symlink-Target-Account': 'a'}) 'X-Symlink-Target-Account': 'a'})
with self.assertRaises(swob.HTTPException) as cm: with self.assertRaises(swob.HTTPException) as cm:
symlink._check_symlink_header(req) symlink._validate_and_prep_request_headers(req)
self.assertEqual(cm.exception.status, '400 Bad Request') self.assertEqual(cm.exception.status, '400 Bad Request')
self.assertEqual(cm.exception.body, b'Symlink cannot target itself') self.assertEqual(cm.exception.body, b'Symlink cannot target itself')
@ -505,7 +768,7 @@ class TestSymlinkMiddleware(TestSymlinkMiddlewareBase):
req = Request.blank('/v1/a/c/o', method='PUT', req = Request.blank('/v1/a/c/o', method='PUT',
headers={'X-Symlink-Target': 'c/o', headers={'X-Symlink-Target': 'c/o',
'X-Symlink-Target-Account': 'a1'}) 'X-Symlink-Target-Account': 'a1'})
symlink._check_symlink_header(req) symlink._validate_and_prep_request_headers(req)
def test_symloop_max_config(self): def test_symloop_max_config(self):
self.app = FakeSwift() self.app = FakeSwift()
@ -665,6 +928,145 @@ class SymlinkCopyingTestCase(TestSymlinkMiddlewareBase):
self.assertEqual( self.assertEqual(
hdrs.get('X-Object-Sysmeta-Symlink-Target-Account'), 'a2') hdrs.get('X-Object-Sysmeta-Symlink-Target-Account'), 'a2')
def test_static_link_to_new_slo_manifest(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'X-Static-Large-Object': 'True',
'Etag': 'manifest-etag',
'X-Object-Sysmeta-Slo-Size': '1048576',
'X-Object-Sysmeta-Slo-Etag': 'this-is-not-used',
'Content-Length': 42,
'Content-Type': 'application/big-data',
'X-Object-Sysmeta-Container-Update-Override-Etag':
'956859738870e5ca6aa17eeda58e4df0; '
'slo_etag=71e938d37c1d06dc634dd24660255a88',
})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'manifest-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
self.assertEqual([
('HEAD', '/v1/a/c1/o'),
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
method, path, hdrs = self.app.calls_with_headers[-1]
self.assertEqual('application/big-data', hdrs['Content-Type'])
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target'], 'c1/o')
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target-Etag'],
'manifest-etag')
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target-Bytes'],
'1048576')
self.assertEqual(
hdrs['X-Object-Sysmeta-Container-Update-Override-Etag'],
'd41d8cd98f00b204e9800998ecf8427e; '
'slo_etag=71e938d37c1d06dc634dd24660255a88; '
'symlink_target=c1/o; '
'symlink_target_etag=manifest-etag; '
'symlink_target_bytes=1048576')
def test_static_link_to_old_slo_manifest(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'X-Static-Large-Object': 'True',
'Etag': 'manifest-etag',
'X-Object-Sysmeta-Slo-Size': '1048576',
'X-Object-Sysmeta-Slo-Etag': '71e938d37c1d06dc634dd24660255a88',
'Content-Length': 42,
'Content-Type': 'application/big-data',
})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'manifest-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
self.assertEqual([
('HEAD', '/v1/a/c1/o'),
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
method, path, hdrs = self.app.calls_with_headers[-1]
self.assertEqual('application/big-data', hdrs['Content-Type'])
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target'], 'c1/o')
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target-Etag'],
'manifest-etag')
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target-Bytes'],
'1048576')
self.assertEqual(
hdrs['X-Object-Sysmeta-Container-Update-Override-Etag'],
'd41d8cd98f00b204e9800998ecf8427e; '
'slo_etag=71e938d37c1d06dc634dd24660255a88; '
'symlink_target=c1/o; '
'symlink_target_etag=manifest-etag; '
'symlink_target_bytes=1048576')
def test_static_link_to_really_old_slo_manifest(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'X-Static-Large-Object': 'True',
'Etag': 'manifest-etag',
'Content-Length': 42,
'Content-Type': 'application/big-data',
})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'manifest-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '201 Created')
self.assertEqual([
('HEAD', '/v1/a/c1/o'),
('PUT', '/v1/a/c/symlink'),
], self.app.calls)
method, path, hdrs = self.app.calls_with_headers[-1]
self.assertEqual('application/big-data', hdrs['Content-Type'])
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target'], 'c1/o')
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target-Etag'],
'manifest-etag')
# symlink m/w is doing a HEAD, it's not going to going to read the
# manifest body and sum up the bytes - so we just use manifest size
self.assertEqual(hdrs['X-Object-Sysmeta-Symlink-Target-Bytes'],
'42')
# no slo_etag, and target_bytes is manifest
self.assertEqual(
hdrs['X-Object-Sysmeta-Container-Update-Override-Etag'],
'd41d8cd98f00b204e9800998ecf8427e; '
'symlink_target=c1/o; '
'symlink_target_etag=manifest-etag; '
'symlink_target_bytes=42')
def test_static_link_to_slo_manifest_slo_etag(self):
self.app.register('HEAD', '/v1/a/c1/o', swob.HTTPOk, {
'Etag': 'manifest-etag',
'X-Object-Sysmeta-Slo-Etag': 'slo-etag',
'Content-Length': 42,
})
self.app.register('PUT', '/v1/a/c/symlink', swob.HTTPCreated, {})
# unquoted slo-etag doesn't match
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': 'slo-etag',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '409 Conflict')
# the quoted slo-etag is just straight up invalid
req = Request.blank('/v1/a/c/symlink', method='PUT',
headers={
'X-Symlink-Target': 'c1/o',
'X-Symlink-Target-Etag': '"slo-etag"',
}, body='')
status, headers, body = self.call_sym(req)
self.assertEqual(status, '400 Bad Request')
self.assertEqual(b'Bad X-Symlink-Target-Etag format', body)
class SymlinkVersioningTestCase(TestSymlinkMiddlewareBase): class SymlinkVersioningTestCase(TestSymlinkMiddlewareBase):
# verify interaction of versioned_writes and symlink middlewares # verify interaction of versioned_writes and symlink middlewares
@ -793,13 +1195,16 @@ class TestSymlinkContainerContext(TestSymlinkMiddlewareBase):
def test_extract_symlink_path_json_symlink_path(self): def test_extract_symlink_path_json_symlink_path(self):
obj_dict = {"bytes": 6, obj_dict = {"bytes": 6,
"last_modified": "1", "last_modified": "1",
"hash": "etag; symlink_target=c/o", "hash": "etag; symlink_target=c/o; something_else=foo; "
"symlink_target_etag=tgt_etag; symlink_target_bytes=8",
"name": "obj", "name": "obj",
"content_type": "application/octet-stream"} "content_type": "application/octet-stream"}
obj_dict = self.context._extract_symlink_path_json( obj_dict = self.context._extract_symlink_path_json(
obj_dict, 'v1', 'AUTH_a') obj_dict, 'v1', 'AUTH_a')
self.assertEqual(obj_dict['hash'], 'etag') self.assertEqual(obj_dict['hash'], 'etag; something_else=foo')
self.assertEqual(obj_dict['symlink_path'], '/v1/AUTH_a/c/o') self.assertEqual(obj_dict['symlink_path'], '/v1/AUTH_a/c/o')
self.assertEqual(obj_dict['symlink_etag'], 'tgt_etag')
self.assertEqual(obj_dict['symlink_bytes'], 8)
def test_extract_symlink_path_json_symlink_path_and_account(self): def test_extract_symlink_path_json_symlink_path_and_account(self):
obj_dict = { obj_dict = {

View File

@ -417,7 +417,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
self.assertRequestEqual(req, self.authorized[1]) self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count) self.assertEqual(3, self.app.call_count)
self.assertEqual([ self.assertEqual([
('GET', '/v1/a/c/o'), ('GET', '/v1/a/c/o?symlink=get'),
('PUT', '/v1/a/ver_cont/001o/0000000060.00000'), ('PUT', '/v1/a/ver_cont/001o/0000000060.00000'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
], self.app.calls) ], self.app.calls)
@ -449,7 +449,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
self.assertRequestEqual(req, self.authorized[1]) self.assertRequestEqual(req, self.authorized[1])
self.assertEqual(3, self.app.call_count) self.assertEqual(3, self.app.call_count)
self.assertEqual([ self.assertEqual([
('GET', '/v1/a/c/o'), ('GET', '/v1/a/c/o?symlink=get'),
('PUT', '/v1/a/ver_cont/001o/0000003600.00000'), ('PUT', '/v1/a/ver_cont/001o/0000003600.00000'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
], self.app.calls) ], self.app.calls)
@ -682,7 +682,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&' prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/2'), ('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'), ('DELETE', '/v1/a/ver_cont/001o/2'),
]) ])
@ -777,7 +777,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('HEAD', '/v1/a/c/o'), ('HEAD', '/v1/a/c/o'),
('GET', '/v1/a/ver_cont/001o/1'), ('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'), ('DELETE', '/v1/a/ver_cont/001o/1'),
('DELETE', '/v1/a/ver_cont/001o/2'), ('DELETE', '/v1/a/ver_cont/001o/2'),
@ -941,7 +941,7 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&' prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/1'), ('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'), ('DELETE', '/v1/a/ver_cont/001o/1'),
]) ])
@ -989,8 +989,8 @@ class VersionedWritesTestCase(VersionedWritesBaseTestCase):
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&' prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/2'), ('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('GET', '/v1/a/ver_cont/001o/1'), ('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'), ('DELETE', '/v1/a/ver_cont/001o/1'),
]) ])
@ -1114,7 +1114,7 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'), ('GET', prefix_listing_prefix + 'marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/2'), ('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'), ('DELETE', '/v1/a/ver_cont/001o/2'),
]) ])
@ -1167,8 +1167,8 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', prefix_listing_prefix + 'marker=001o/2'), ('GET', prefix_listing_prefix + 'marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/2'), ('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('GET', '/v1/a/ver_cont/001o/1'), ('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'), ('DELETE', '/v1/a/ver_cont/001o/1'),
]) ])
@ -1282,14 +1282,14 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&' prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/4'), ('GET', '/v1/a/ver_cont/001o/4?symlink=get'),
('GET', '/v1/a/ver_cont/001o/3'), ('GET', '/v1/a/ver_cont/001o/3?symlink=get'),
('GET', '/v1/a/ver_cont/001o/2'), ('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('GET', prefix_listing_prefix + 'marker=001o/2&reverse=on'), ('GET', prefix_listing_prefix + 'marker=001o/2&reverse=on'),
('GET', prefix_listing_prefix + 'marker=&end_marker=001o/2'), ('GET', prefix_listing_prefix + 'marker=&end_marker=001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/0&end_marker=001o/2'), ('GET', prefix_listing_prefix + 'marker=001o/0&end_marker=001o/2'),
('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/2'), ('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/2'),
('GET', '/v1/a/ver_cont/001o/1'), ('GET', '/v1/a/ver_cont/001o/1?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/1'), ('DELETE', '/v1/a/ver_cont/001o/1'),
]) ])
@ -1354,13 +1354,13 @@ class VersionedWritesOldContainersTestCase(VersionedWritesBaseTestCase):
prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&' prefix_listing_prefix = '/v1/a/ver_cont?prefix=001o/&'
self.assertEqual(self.app.calls, [ self.assertEqual(self.app.calls, [
('GET', prefix_listing_prefix + 'marker=&reverse=on'), ('GET', prefix_listing_prefix + 'marker=&reverse=on'),
('GET', '/v1/a/ver_cont/001o/4'), ('GET', '/v1/a/ver_cont/001o/4?symlink=get'),
('GET', '/v1/a/ver_cont/001o/3'), ('GET', '/v1/a/ver_cont/001o/3?symlink=get'),
('GET', prefix_listing_prefix + 'marker=001o/3&reverse=on'), ('GET', prefix_listing_prefix + 'marker=001o/3&reverse=on'),
('GET', prefix_listing_prefix + 'marker=&end_marker=001o/3'), ('GET', prefix_listing_prefix + 'marker=&end_marker=001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/3'), ('GET', prefix_listing_prefix + 'marker=001o/1&end_marker=001o/3'),
('GET', prefix_listing_prefix + 'marker=001o/2&end_marker=001o/3'), ('GET', prefix_listing_prefix + 'marker=001o/2&end_marker=001o/3'),
('GET', '/v1/a/ver_cont/001o/2'), ('GET', '/v1/a/ver_cont/001o/2?symlink=get'),
('PUT', '/v1/a/c/o'), ('PUT', '/v1/a/c/o'),
('DELETE', '/v1/a/ver_cont/001o/2'), ('DELETE', '/v1/a/ver_cont/001o/2'),
]) ])

View File

@ -15,6 +15,7 @@
import unittest import unittest
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.swob import bytes_to_wsgi
class TestHeaderKeyDict(unittest.TestCase): class TestHeaderKeyDict(unittest.TestCase):
@ -27,6 +28,20 @@ class TestHeaderKeyDict(unittest.TestCase):
self.assertEqual(headers['content-length'], '20') self.assertEqual(headers['content-length'], '20')
self.assertEqual(headers['CONTENT-LENGTH'], '20') self.assertEqual(headers['CONTENT-LENGTH'], '20')
def test_unicode(self):
def mkstr(prefix):
return bytes_to_wsgi((prefix + u'\U0001f44d').encode('utf8'))
headers = HeaderKeyDict()
headers[mkstr('x-object-meta-')] = 'ok'
self.assertIn(mkstr('x-object-meta-'), headers)
self.assertIn(mkstr('X-Object-Meta-'), headers)
self.assertIn(mkstr('X-OBJECT-META-'), headers)
keys = list(headers)
self.assertNotIn(mkstr('x-object-meta-'), keys)
self.assertIn(mkstr('X-Object-Meta-'), keys)
self.assertNotIn(mkstr('X-OBJECT-META-'), keys)
def test_setdefault(self): def test_setdefault(self):
headers = HeaderKeyDict() headers = HeaderKeyDict()

View File

@ -107,7 +107,7 @@ class GetMetadataInternalClient(internal_client.InternalClient):
self.metadata = 'some_metadata' self.metadata = 'some_metadata'
def _get_metadata(self, path, metadata_prefix, acceptable_statuses=None, def _get_metadata(self, path, metadata_prefix, acceptable_statuses=None,
headers=None): headers=None, params=None):
self.get_metadata_called += 1 self.get_metadata_called += 1
self.test.assertEqual(self.path, path) self.test.assertEqual(self.path, path)
self.test.assertEqual(self.metadata_prefix, metadata_prefix) self.test.assertEqual(self.metadata_prefix, metadata_prefix)
@ -649,7 +649,7 @@ class TestInternalClient(unittest.TestCase):
def make_request( def make_request(
self, method, path, headers, acceptable_statuses, self, method, path, headers, acceptable_statuses,
body_file=None): body_file=None, params=None):
self.make_request_called += 1 self.make_request_called += 1
self.test.assertEqual('HEAD', method) self.test.assertEqual('HEAD', method)
self.test.assertEqual(self.path, path) self.test.assertEqual(self.path, path)

View File

@ -5939,6 +5939,13 @@ class TestAuditLocationGenerator(unittest.TestCase):
class TestGreenAsyncPile(unittest.TestCase): class TestGreenAsyncPile(unittest.TestCase):
def setUp(self):
self.timeout = Timeout(5.0)
def tearDown(self):
self.timeout.cancel()
def test_runs_everything(self): def test_runs_everything(self):
def run_test(): def run_test():
tests_ran[0] += 1 tests_ran[0] += 1
@ -6045,6 +6052,58 @@ class TestGreenAsyncPile(unittest.TestCase):
# pending remains 0 # pending remains 0
self.assertEqual(0, pile._pending) self.assertEqual(0, pile._pending)
def _exploder(self, arg):
if isinstance(arg, Exception):
raise arg
else:
return arg
def test_blocking_last_next_explodes(self):
pile = utils.GreenAsyncPile(2)
pile.spawn(self._exploder, 1)
pile.spawn(self._exploder, 2)
pile.spawn(self._exploder, Exception('kaboom'))
self.assertEqual(1, next(pile))
self.assertEqual(2, next(pile))
with self.assertRaises(StopIteration):
next(pile)
self.assertEqual(pile.inflight, 0)
self.assertEqual(pile._pending, 0)
def test_no_blocking_last_next_explodes(self):
pile = utils.GreenAsyncPile(10)
pile.spawn(self._exploder, 1)
self.assertEqual(1, next(pile))
pile.spawn(self._exploder, 2)
self.assertEqual(2, next(pile))
pile.spawn(self._exploder, Exception('kaboom'))
with self.assertRaises(StopIteration):
next(pile)
self.assertEqual(pile.inflight, 0)
self.assertEqual(pile._pending, 0)
def test_exceptions_in_streaming_pile(self):
with utils.StreamingPile(2) as pile:
results = list(pile.asyncstarmap(self._exploder, [
(1,),
(Exception('kaboom'),),
(3,),
]))
self.assertEqual(results, [1, 3])
self.assertEqual(pile.inflight, 0)
self.assertEqual(pile._pending, 0)
def test_exceptions_at_end_of_streaming_pile(self):
with utils.StreamingPile(2) as pile:
results = list(pile.asyncstarmap(self._exploder, [
(1,),
(2,),
(Exception('kaboom'),),
]))
self.assertEqual(results, [1, 2])
self.assertEqual(pile.inflight, 0)
self.assertEqual(pile._pending, 0)
class TestLRUCache(unittest.TestCase): class TestLRUCache(unittest.TestCase):

View File

@ -92,7 +92,7 @@ def setup_servers(the_object_server=object_server, extra_conf=None):
conf = {'devices': _testdir, 'swift_dir': _testdir, conf = {'devices': _testdir, 'swift_dir': _testdir,
'mount_check': 'false', 'allowed_headers': 'mount_check': 'false', 'allowed_headers':
'content-encoding, x-object-manifest, content-disposition, foo', 'content-encoding, x-object-manifest, content-disposition, foo',
'allow_versions': 't'} 'allow_versions': 't', 'node_timeout': 20}
if extra_conf: if extra_conf:
conf.update(extra_conf) conf.update(extra_conf)
prolis = listen_zero() prolis = listen_zero()

View File

@ -5031,16 +5031,18 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
archive_bodies = encode_frag_archive_bodies(self.policy, body) archive_bodies = encode_frag_archive_bodies(self.policy, body)
# pop the index to the destination node # pop the index to the destination node
archive_bodies.pop(1) archive_bodies.pop(1)
ec_archive_dict[ key = (md5(body).hexdigest(), next(ts).internal, bool(i % 2))
(md5(body).hexdigest(), next(ts).internal)] = archive_bodies ec_archive_dict[key] = archive_bodies
responses = list() responses = list()
# fill out response list by 3 different etag bodies # fill out response list by 3 different etag bodies
for etag, ts in itertools.cycle(ec_archive_dict): for etag, ts, durable in itertools.cycle(ec_archive_dict):
body = ec_archive_dict[(etag, ts)].pop(0) body = ec_archive_dict[(etag, ts, durable)].pop(0)
headers = get_header_frag_index(self, body) headers = get_header_frag_index(self, body)
headers.update({'X-Object-Sysmeta-Ec-Etag': etag, headers.update({'X-Object-Sysmeta-Ec-Etag': etag,
'X-Backend-Timestamp': ts}) 'X-Backend-Timestamp': ts})
if durable:
headers['X-Backend-Durable-Timestamp'] = ts
responses.append((200, body, headers)) responses.append((200, body, headers))
if len(responses) >= (self.policy.object_ring.replicas - 1): if len(responses) >= (self.policy.object_ring.replicas - 1):
break break
@ -5063,7 +5065,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
# 1 error log per etag to report not enough responses # 1 error log per etag to report not enough responses
self.assertEqual(3, len(error_lines)) self.assertEqual(3, len(error_lines))
for error_line in error_lines: for error_line in error_lines:
for expected_etag, ts in ec_archive_dict: for expected_etag, ts, durable in ec_archive_dict:
if expected_etag in error_line: if expected_etag in error_line:
break break
else: else:
@ -5072,13 +5074,15 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
(list(ec_archive_dict), error_line)) (list(ec_archive_dict), error_line))
# remove the found etag which should not be found in the # remove the found etag which should not be found in the
# following error lines # following error lines
del ec_archive_dict[(expected_etag, ts)] del ec_archive_dict[(expected_etag, ts, durable)]
expected = 'Unable to get enough responses (%s/10) to ' \ expected = 'Unable to get enough responses (%s/10) to ' \
'reconstruct 10.0.0.1:1001/sdb/0%s policy#0 ' \ 'reconstruct %s 10.0.0.1:1001/sdb/0%s policy#0 ' \
'frag#1 with ETag' % \ 'frag#1 with ETag %s and timestamp %s' % \
(etag_count[expected_etag], (etag_count[expected_etag],
self.obj_path.decode('utf8')) 'durable' if durable else 'non-durable',
self.obj_path.decode('utf8'),
expected_etag, ts)
self.assertIn( self.assertIn(
expected, error_line, expected, error_line,
"Unexpected error line found: Expected: %s Got: %s" "Unexpected error line found: Expected: %s Got: %s"

View File

@ -19,6 +19,7 @@ import os
import mock import mock
from gzip import GzipFile from gzip import GzipFile
from shutil import rmtree from shutil import rmtree
import six
import six.moves.cPickle as pickle import six.moves.cPickle as pickle
import time import time
import tempfile import tempfile
@ -124,7 +125,8 @@ def _mock_process(ret):
MockProcess.captured_log = captured_log MockProcess.captured_log = captured_log
orig_process = subprocess.Popen orig_process = subprocess.Popen
MockProcess.ret_code = (i[0] for i in ret) MockProcess.ret_code = (i[0] for i in ret)
MockProcess.ret_log = (i[1] for i in ret) MockProcess.ret_log = (i[1] if six.PY2 else i[1].encode('utf8')
for i in ret)
MockProcess.check_args = (i[2] for i in ret) MockProcess.check_args = (i[2] for i in ret)
object_replicator.subprocess.Popen = MockProcess object_replicator.subprocess.Popen = MockProcess
yield captured_log yield captured_log

View File

@ -38,6 +38,7 @@ from swift.obj.diskfile import (
from swift.common.ring import RingData from swift.common.ring import RingData
from swift.common import utils from swift.common import utils
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.swob import bytes_to_wsgi
from swift.common.utils import ( from swift.common.utils import (
hash_path, normalize_timestamp, mkdirs, write_pickle) hash_path, normalize_timestamp, mkdirs, write_pickle)
from swift.common.storage_policy import StoragePolicy, POLICIES from swift.common.storage_policy import StoragePolicy, POLICIES
@ -504,13 +505,13 @@ class TestObjectUpdater(unittest.TestCase):
self.assertEqual(inc.readline(), self.assertEqual(inc.readline(),
b'PUT /sda1/0/a/c/o HTTP/1.1\r\n') b'PUT /sda1/0/a/c/o HTTP/1.1\r\n')
headers = HeaderKeyDict() headers = HeaderKeyDict()
line = inc.readline() line = bytes_to_wsgi(inc.readline())
while line and line != b'\r\n': while line and line != '\r\n':
headers[line.split(b':')[0]] = \ headers[line.split(':')[0]] = \
line.split(b':')[1].strip() line.split(':')[1].strip()
line = inc.readline() line = bytes_to_wsgi(inc.readline())
self.assertIn(b'x-container-timestamp', headers) self.assertIn('x-container-timestamp', headers)
self.assertIn(b'X-Backend-Storage-Policy-Index', self.assertIn('X-Backend-Storage-Policy-Index',
headers) headers)
except BaseException as err: except BaseException as err:
return err return err

View File

@ -1570,6 +1570,45 @@ class TestReplicatedObjController(CommonObjectControllerMixin,
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 404)
def test_GET_primaries_explode(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
codes = [Exception('kaboom!')] * self.obj_ring.replicas + (
[404] * self.obj_ring.max_more_nodes)
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 503)
def test_GET_primaries_timeout(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
codes = [Timeout()] * self.obj_ring.replicas + (
[404] * self.obj_ring.max_more_nodes)
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 503)
def test_GET_primaries_mixed_explode_and_timeout(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
primaries = []
for i in range(self.obj_ring.replicas):
if i % 2:
primaries.append(Timeout())
else:
primaries.append(Exception('kaboom!'))
codes = primaries + [404] * self.obj_ring.max_more_nodes
with set_http_connect(*codes):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 503)
def test_primary_returns_some_nonsense_timestamp(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
# an un-handled ValueError in _make_node_request should just continue
# to the next node rather than hang the request
headers = [{'X-Backend-Timestamp': 'not-a-timestamp'}, {}]
codes = [200, 200]
with set_http_connect(*codes, headers=headers):
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
def test_GET_not_found_when_404_newer(self): def test_GET_not_found_when_404_newer(self):
# if proxy receives a 404, it keeps waiting for other connections until # if proxy receives a 404, it keeps waiting for other connections until
# max number of nodes in hopes of finding an object, but if 404 is # max number of nodes in hopes of finding an object, but if 404 is
@ -2136,7 +2175,7 @@ class ECObjectControllerMixin(CommonObjectControllerMixin):
{'obj': obj1, 'frag': 13}, {'obj': obj1, 'frag': 13},
] ]
# ... and the rests are 404s which is limited by request_count # ... and the rest are 404s which is limited by request_count
# (2 * replicas in default) rather than max_extra_requests limitation # (2 * replicas in default) rather than max_extra_requests limitation
# because the retries will be in ResumingGetter if the responses # because the retries will be in ResumingGetter if the responses
# are 404s # are 404s
@ -2147,7 +2186,7 @@ class ECObjectControllerMixin(CommonObjectControllerMixin):
with capture_http_requests(fake_response) as log: with capture_http_requests(fake_response) as log:
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 503)
# expect a request to all nodes # expect a request to all nodes
self.assertEqual(2 * self.replicas(), len(log)) self.assertEqual(2 * self.replicas(), len(log))
@ -2693,7 +2732,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
with capture_http_requests(fake_response) as log: with capture_http_requests(fake_response) as log:
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 503)
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -2792,7 +2831,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
collected_indexes[fi].append(conn) collected_indexes[fi].append(conn)
self.assertEqual(len(collected_indexes), 7) self.assertEqual(len(collected_indexes), 7)
def test_GET_with_mixed_frags_and_no_quorum_will_503(self): def test_GET_with_mixed_nondurable_frags_and_no_quorum_will_503(self):
# all nodes have a frag but there is no one set that reaches quorum, # all nodes have a frag but there is no one set that reaches quorum,
# which means there is no backend 404 response, but proxy should still # which means there is no backend 404 response, but proxy should still
# return 404 rather than 503 # return 404 rather than 503
@ -2801,6 +2840,68 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
obj3 = self._make_ec_object_stub(pattern='obj3') obj3 = self._make_ec_object_stub(pattern='obj3')
obj4 = self._make_ec_object_stub(pattern='obj4') obj4 = self._make_ec_object_stub(pattern='obj4')
node_frags = [
{'obj': obj1, 'frag': 0, 'durable': False},
{'obj': obj2, 'frag': 0, 'durable': False},
{'obj': obj3, 'frag': 0, 'durable': False},
{'obj': obj1, 'frag': 1, 'durable': False},
{'obj': obj2, 'frag': 1, 'durable': False},
{'obj': obj3, 'frag': 1, 'durable': False},
{'obj': obj1, 'frag': 2, 'durable': False},
{'obj': obj2, 'frag': 2, 'durable': False},
{'obj': obj3, 'frag': 2, 'durable': False},
{'obj': obj1, 'frag': 3, 'durable': False},
{'obj': obj2, 'frag': 3, 'durable': False},
{'obj': obj3, 'frag': 3, 'durable': False},
{'obj': obj1, 'frag': 4, 'durable': False},
{'obj': obj2, 'frag': 4, 'durable': False},
{'obj': obj3, 'frag': 4, 'durable': False},
{'obj': obj1, 'frag': 5, 'durable': False},
{'obj': obj2, 'frag': 5, 'durable': False},
{'obj': obj3, 'frag': 5, 'durable': False},
{'obj': obj1, 'frag': 6, 'durable': False},
{'obj': obj2, 'frag': 6, 'durable': False},
{'obj': obj3, 'frag': 6, 'durable': False},
{'obj': obj1, 'frag': 7, 'durable': False},
{'obj': obj2, 'frag': 7, 'durable': False},
{'obj': obj3, 'frag': 7, 'durable': False},
{'obj': obj1, 'frag': 8, 'durable': False},
{'obj': obj2, 'frag': 8, 'durable': False},
{'obj': obj3, 'frag': 8, 'durable': False},
{'obj': obj4, 'frag': 8, 'durable': False},
]
fake_response = self._fake_ec_node_response(node_frags)
req = swob.Request.blank('/v1/a/c/o')
with capture_http_requests(fake_response) as log:
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404)
collected_etags = set()
collected_status = set()
for conn in log:
etag = conn.resp.headers['X-Object-Sysmeta-Ec-Etag']
collected_etags.add(etag)
collected_status.add(conn.resp.status)
# default node_iter will exhaust at 2 * replicas
self.assertEqual(len(log), 2 * self.replicas())
self.assertEqual(
{obj1['etag'], obj2['etag'], obj3['etag'], obj4['etag']},
collected_etags)
self.assertEqual({200}, collected_status)
def test_GET_with_mixed_frags_and_no_quorum_will_503(self):
# all nodes have a frag but there is no one set that reaches quorum,
# but since they're all marked durable (so we *should* be able to
# reconstruct), proxy will 503
obj1 = self._make_ec_object_stub(pattern='obj1')
obj2 = self._make_ec_object_stub(pattern='obj2')
obj3 = self._make_ec_object_stub(pattern='obj3')
obj4 = self._make_ec_object_stub(pattern='obj4')
node_frags = [ node_frags = [
{'obj': obj1, 'frag': 0}, {'obj': obj1, 'frag': 0},
{'obj': obj2, 'frag': 0}, {'obj': obj2, 'frag': 0},
@ -3229,9 +3330,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
with capture_http_requests(fake_response) as log: with capture_http_requests(fake_response) as log:
resp = req.get_response(self.app) resp = req.get_response(self.app)
# read body to provoke any EC decode errors # read body to provoke any EC decode errors
self.assertFalse(resp.body) self.assertTrue(resp.body)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 503)
self.assertEqual(len(log), self.replicas() * 2) self.assertEqual(len(log), self.replicas() * 2)
collected_etags = set() collected_etags = set()
for conn in log: for conn in log:
@ -3240,7 +3341,10 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual({obj1['etag'], obj2['etag'], None}, collected_etags) self.assertEqual({obj1['etag'], obj2['etag'], None}, collected_etags)
log_lines = self.app.logger.get_lines_for_level('error') log_lines = self.app.logger.get_lines_for_level('error')
self.assertEqual(log_lines, self.assertEqual(log_lines,
['Problem with fragment response: ETag mismatch'] * 7) ['Problem with fragment response: ETag mismatch'] * 7
+ ['Object returning 503 for []'])
# Note the empty list above -- that log line comes out of
# best_response but we've already thrown out the "good" responses :-/
def test_GET_mixed_success_with_range(self): def test_GET_mixed_success_with_range(self):
fragment_size = self.policy.fragment_size fragment_size = self.policy.fragment_size
@ -3926,7 +4030,7 @@ class TestECDuplicationObjController(
{'obj': obj1, 'frag': 8}, {'obj': obj1, 'frag': 8},
{'obj': obj2, 'frag': 8}, {'obj': obj2, 'frag': 8},
] ]
# ... and the rests are 404s which is limited by request_count # ... and the rest are 404s which is limited by request_count
# (2 * replicas in default) rather than max_extra_requests limitation # (2 * replicas in default) rather than max_extra_requests limitation
# because the retries will be in ResumingGetter if the responses # because the retries will be in ResumingGetter if the responses
# are 404s # are 404s
@ -3937,7 +4041,7 @@ class TestECDuplicationObjController(
with capture_http_requests(fake_response) as log: with capture_http_requests(fake_response) as log:
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 503)
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -4267,9 +4371,9 @@ class TestECDuplicationObjController(
with capture_http_requests(fake_response) as log: with capture_http_requests(fake_response) as log:
resp = req.get_response(self.app) resp = req.get_response(self.app)
# read body to provoke any EC decode errors # read body to provoke any EC decode errors
self.assertFalse(resp.body) self.assertTrue(resp.body)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 503)
self.assertEqual(len(log), self.replicas() * 2) self.assertEqual(len(log), self.replicas() * 2)
collected_etags = set() collected_etags = set()
for conn in log: for conn in log:
@ -4278,7 +4382,8 @@ class TestECDuplicationObjController(
self.assertEqual({obj1['etag'], obj2['etag'], None}, collected_etags) self.assertEqual({obj1['etag'], obj2['etag'], None}, collected_etags)
log_lines = self.app.logger.get_lines_for_level('error') log_lines = self.app.logger.get_lines_for_level('error')
self.assertEqual(log_lines, self.assertEqual(log_lines,
['Problem with fragment response: ETag mismatch'] * 7) ['Problem with fragment response: ETag mismatch'] * 7
+ ['Object returning 503 for []'])
def _test_determine_chunk_destinations_prioritize( def _test_determine_chunk_destinations_prioritize(
self, missing_two, missing_one): self, missing_two, missing_one):

View File

@ -3975,6 +3975,13 @@ class TestReplicatedObjectController(
test_status_map((200, 200, 404, 404, 500), 404) test_status_map((200, 200, 404, 404, 500), 404)
test_status_map((200, 200, 500, 500, 500), 503) test_status_map((200, 200, 500, 500, 500), 503)
POLICIES.default.object_ring.max_more_nodes = 3
test_status_map(
(200, 200,
Timeout(), Timeout(), Timeout(), # Can't reach primaries
404, 404, 404), # No surprise: handoffs know nothing
503)
def test_HEAD_newest(self): def test_HEAD_newest(self):
with save_globals(): with save_globals():
def test_status_map(statuses, expected, timestamps, def test_status_map(statuses, expected, timestamps,
@ -7527,7 +7534,7 @@ class TestECGets(unittest.TestCase):
} }
resp = self._setup_nodes_and_do_GET(objs, node_state) resp = self._setup_nodes_and_do_GET(objs, node_state)
self.assertEqual(resp.status_int, 503) self.assertEqual(resp.status_int, 404)
def test_GET_with_multiple_frags_per_node(self): def test_GET_with_multiple_frags_per_node(self):
# verify object GET behavior when multiple fragments are on same node # verify object GET behavior when multiple fragments are on same node
@ -7632,13 +7639,26 @@ class TestECGets(unittest.TestCase):
# will be sent frag prefs that exclude frag_index 1) # will be sent frag prefs that exclude frag_index 1)
node_state = { node_state = {
0: [dict(ref='obj1a', frag_index=1, durable=False)], 0: [dict(ref='obj1a', frag_index=1, durable=False)],
1: [dict(ref='obj1b', frag_index=1, durable=True)], 1: [dict(ref='obj1b', frag_index=1, durable=False)],
2: [dict(ref='obj1c', frag_index=1, durable=False)] 2: [dict(ref='obj1c', frag_index=1, durable=False)]
} }
resp = self._setup_nodes_and_do_GET(objs, node_state) resp = self._setup_nodes_and_do_GET(objs, node_state)
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 404)
# if we know it should be durable, we can be more specific.
# note that we need to set *both* of those first ones durable
# to avoid a flaky test -- in the future we can be smarter and
# let the durability bubble up, even from a duplicate frag
node_state = {
0: [dict(ref='obj1a', frag_index=1, durable=True)],
1: [dict(ref='obj1b', frag_index=1, durable=True)],
2: [dict(ref='obj1c', frag_index=1, durable=False)]
}
resp = self._setup_nodes_and_do_GET(objs, node_state)
self.assertEqual(resp.status_int, 503)
class TestObjectDisconnectCleanup(unittest.TestCase): class TestObjectDisconnectCleanup(unittest.TestCase):

33
tox.ini
View File

@ -48,18 +48,49 @@ commands = ./.functests {posargs}
basepython = python3 basepython = python3
commands = commands =
nosetests {posargs: \ nosetests {posargs: \
test/functional/s3api/test_acl.py \
test/functional/s3api/test_multi_delete.py \
test/functional/s3api/test_multi_upload.py \
test/functional/s3api/test_object.py \
test/functional/s3api/test_presigned.py \
test/functional/s3api/test_service.py \
test/functional/test_access_control.py \
test/functional/test_account.py \
test/functional/test_container.py \
test/functional/test_dlo.py \
test/functional/test_domain_remap.py \ test/functional/test_domain_remap.py \
test/functional/test_object.py \
test/functional/test_slo.py \
test/functional/test_staticweb.py \ test/functional/test_staticweb.py \
test/functional/test_symlink.py \ test/functional/test_symlink.py \
test/functional/test_tempurl.py \ test/functional/test_tempurl.py \
test/functional/test_versioned_writes.py \
test/functional/tests.py} test/functional/tests.py}
[testenv:func-ec-py3]
basepython = python3
commands = {[testenv:func-py3]commands}
setenv = SWIFT_TEST_IN_PROCESS=1
SWIFT_TEST_IN_PROCESS_CONF_LOADER=ec
[testenv:func-s3api-py3]
basepython = python3
commands = {[testenv:func-py3]commands}
setenv = SWIFT_TEST_IN_PROCESS=1
SWIFT_TEST_IN_PROCESS_CONF_LOADER=s3api
[testenv:func-encryption-py3]
basepython = python3
commands = {[testenv:func-py3]commands}
setenv = SWIFT_TEST_IN_PROCESS=1
SWIFT_TEST_IN_PROCESS_CONF_LOADER=encryption
[testenv:func-domain-remap-staticweb-py3] [testenv:func-domain-remap-staticweb-py3]
basepython = python3 basepython = python3
commands = {[testenv:func-py3]commands} commands = {[testenv:func-py3]commands}
setenv = SWIFT_TEST_IN_PROCESS=1 setenv = SWIFT_TEST_IN_PROCESS=1
SWIFT_TEST_IN_PROCESS_CONF_LOADER=domain_remap_staticweb SWIFT_TEST_IN_PROCESS_CONF_LOADER=domain_remap_staticweb
[testenv:func-encryption] [testenv:func-encryption]
commands = ./.functests {posargs} commands = ./.functests {posargs}
setenv = SWIFT_TEST_IN_PROCESS=1 setenv = SWIFT_TEST_IN_PROCESS=1
@ -95,7 +126,7 @@ commands = sphinx-build -W -b html doc/source doc/build/html
[testenv:api-ref] [testenv:api-ref]
# This environment is called from CI scripts to test and publish # This environment is called from CI scripts to test and publish
# the API Ref to developer.openstack.org. # the API Ref to docs.openstack.org.
basepython = python3 basepython = python3
deps = -r{toxinidir}/doc/requirements.txt deps = -r{toxinidir}/doc/requirements.txt
commands = commands =