replace md5 with swift utils version

md5 is not an approved algorithm in FIPS mode, and trying to
instantiate a hashlib.md5() will fail when the system is running in
FIPS mode.

md5 is allowed when in a non-security context.  There is a plan to
add a keyword parameter (usedforsecurity) to hashlib.md5() to annotate
whether or not the instance is being used in a security context.

In the case where it is not, the instantiation of md5 will be allowed.
See https://bugs.python.org/issue9216 for more details.

Some downstream python versions already support this parameter.  To
support these versions, a new encapsulation of md5() is added to
swift/common/utils.py.  This encapsulation is identical to the one being
added to oslo.utils, but is recreated here to avoid adding a dependency.

This patch is to replace the instances of hashlib.md5() with this new
encapsulation, adding an annotation indicating whether the usage is
a security context or not.

While this patch seems large, it is really just the same change over and
again.  Reviewers need to pay particular attention as to whether the
keyword parameter (usedforsecurity) is set correctly.   Right now, all
of them appear to be not used in a security context.

Now that all the instances have been converted, we can update the bandit
run to look for these instances and ensure that new invocations do not
creep in.

With this latest patch, the functional and unit tests all pass
on a FIPS enabled system.

Co-Authored-By: Pete Zaitcev
Change-Id: Ibb4917da4c083e1e094156d748708b87387f2d87
This commit is contained in:
Ade Lee 2020-09-11 16:28:11 -04:00
parent 36107b4a69
commit 5320ecbaf2
61 changed files with 759 additions and 509 deletions

View File

@ -80,7 +80,7 @@
# B703 : django_mark_safe # B703 : django_mark_safe
# (optional) list included test IDs here, eg '[B101, B406]': # (optional) list included test IDs here, eg '[B101, B406]':
tests: [B102, B103, B302, B306, B308, B309, B310, B401, B501, B502, B506, B601, B602, B609] tests: [B102, B103, B302, B303, B304, B305, B306, B308, B309, B310, B401, B501, B502, B506, B601, B602, B609]
# (optional) list skipped test IDs here, eg '[B101, B406]': # (optional) list skipped test IDs here, eg '[B101, B406]':
skips: skips:

View File

@ -19,7 +19,6 @@ import socket
from logging import DEBUG from logging import DEBUG
from math import sqrt from math import sqrt
from time import time from time import time
from hashlib import md5
import itertools import itertools
from eventlet import GreenPool, sleep, Timeout from eventlet import GreenPool, sleep, Timeout
@ -35,7 +34,7 @@ from swift.common.request_helpers import USE_REPLICATION_NETWORK_HEADER
from swift.common.ring import Ring from swift.common.ring import Ring
from swift.common.ring.utils import is_local_device from swift.common.ring.utils import is_local_device
from swift.common.utils import get_logger, whataremyips, config_true_value, \ from swift.common.utils import get_logger, whataremyips, config_true_value, \
Timestamp Timestamp, md5
from swift.common.daemon import Daemon from swift.common.daemon import Daemon
from swift.common.storage_policy import POLICIES, PolicyError from swift.common.storage_policy import POLICIES, PolicyError
@ -271,8 +270,9 @@ class AccountReaper(Daemon):
container_ = container.encode('utf-8') container_ = container.encode('utf-8')
else: else:
container_ = container container_ = container
this_shard = int(md5(container_).hexdigest(), 16) % \ this_shard = (
len(nodes) int(md5(container_, usedforsecurity=False)
.hexdigest(), 16) % len(nodes))
if container_shard not in (this_shard, None): if container_shard not in (this_shard, None):
continue continue

View File

@ -15,7 +15,6 @@ import itertools
import json import json
import os import os
import sqlite3 import sqlite3
from hashlib import md5
from collections import defaultdict from collections import defaultdict
from six.moves import urllib from six.moves import urllib
@ -32,6 +31,7 @@ from swift.obj.diskfile import get_data_dir, read_metadata, DATADIR_BASE, \
extract_policy extract_policy
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from swift.common.middleware.crypto.crypto_utils import load_crypto_meta from swift.common.middleware.crypto.crypto_utils import load_crypto_meta
from swift.common.utils import md5
class InfoSystemExit(Exception): class InfoSystemExit(Exception):
@ -545,7 +545,7 @@ def print_obj(datafile, check_etag=True, swift_dir='/etc/swift',
# Optional integrity check; it's useful, but slow. # Optional integrity check; it's useful, but slow.
file_len = None file_len = None
if check_etag: if check_etag:
h = md5() h = md5(usedforsecurity=False)
file_len = 0 file_len = 0
while True: while True:
data = fp.read(64 * 1024) data = fp.read(64 * 1024)

View File

@ -17,7 +17,6 @@
from contextlib import contextmanager, closing from contextlib import contextmanager, closing
import base64 import base64
import hashlib
import json import json
import logging import logging
import os import os
@ -36,7 +35,7 @@ import sqlite3
from swift.common.constraints import MAX_META_COUNT, MAX_META_OVERALL_SIZE, \ from swift.common.constraints import MAX_META_COUNT, MAX_META_OVERALL_SIZE, \
check_utf8 check_utf8
from swift.common.utils import Timestamp, renamer, \ from swift.common.utils import Timestamp, renamer, \
mkdirs, lock_parent_directory, fallocate mkdirs, lock_parent_directory, fallocate, md5
from swift.common.exceptions import LockTimeout from swift.common.exceptions import LockTimeout
from swift.common.swob import HTTPBadRequest from swift.common.swob import HTTPBadRequest
@ -186,7 +185,8 @@ def chexor(old, name, timestamp):
""" """
if name is None: if name is None:
raise Exception('name is None!') raise Exception('name is None!')
new = hashlib.md5(('%s-%s' % (name, timestamp)).encode('utf8')).hexdigest() new = md5(('%s-%s' % (name, timestamp)).encode('utf8'),
usedforsecurity=False).hexdigest()
return '%032x' % (int(old, 16) ^ int(new, 16)) return '%032x' % (int(old, 16) ^ int(new, 16))

View File

@ -50,13 +50,13 @@ import json
import logging import logging
import time import time
from bisect import bisect from bisect import bisect
from hashlib import md5
from eventlet.green import socket from eventlet.green import socket
from eventlet.pools import Pool from eventlet.pools import Pool
from eventlet import Timeout from eventlet import Timeout
from six.moves import range from six.moves import range
from swift.common import utils from swift.common import utils
from swift.common.utils import md5
DEFAULT_MEMCACHED_PORT = 11211 DEFAULT_MEMCACHED_PORT = 11211
@ -81,7 +81,7 @@ def md5hash(key):
key = key.encode('utf-8') key = key.encode('utf-8')
else: else:
key = key.encode('utf-8', errors='surrogateescape') key = key.encode('utf-8', errors='surrogateescape')
return md5(key).hexdigest().encode('ascii') return md5(key, usedforsecurity=False).hexdigest().encode('ascii')
def sanitize_timeout(timeout): def sanitize_timeout(timeout):

View File

@ -27,7 +27,7 @@ from swift.common.request_helpers import get_object_transient_sysmeta, \
from swift.common.swob import Request, Match, HTTPException, \ from swift.common.swob import Request, Match, HTTPException, \
HTTPUnprocessableEntity, wsgi_to_bytes, bytes_to_wsgi, normalize_etag HTTPUnprocessableEntity, wsgi_to_bytes, bytes_to_wsgi, normalize_etag
from swift.common.utils import get_logger, config_true_value, \ from swift.common.utils import get_logger, config_true_value, \
MD5_OF_EMPTY_STRING MD5_OF_EMPTY_STRING, md5
def encrypt_header_val(crypto, value, key): def encrypt_header_val(crypto, value, key):
@ -91,8 +91,8 @@ class EncInputWrapper(object):
self.body_crypto_meta['key_id'] = self.keys['id'] self.body_crypto_meta['key_id'] = self.keys['id']
self.body_crypto_ctxt = self.crypto.create_encryption_ctxt( self.body_crypto_ctxt = self.crypto.create_encryption_ctxt(
body_key, self.body_crypto_meta.get('iv')) body_key, self.body_crypto_meta.get('iv'))
self.plaintext_md5 = hashlib.md5() self.plaintext_md5 = md5(usedforsecurity=False)
self.ciphertext_md5 = hashlib.md5() self.ciphertext_md5 = md5(usedforsecurity=False)
def install_footers_callback(self, req): def install_footers_callback(self, req):
# the proxy controller will call back for footer metadata after # the proxy controller will call back for footer metadata after

View File

@ -122,7 +122,6 @@ import json
import six import six
from hashlib import md5
from swift.common import constraints from swift.common import constraints
from swift.common.exceptions import ListingIterError, SegmentError from swift.common.exceptions import ListingIterError, SegmentError
from swift.common.http import is_success from swift.common.http import is_success
@ -131,7 +130,7 @@ from swift.common.swob import Request, Response, HTTPException, \
str_to_wsgi, wsgi_to_str, wsgi_quote, wsgi_unquote, normalize_etag str_to_wsgi, wsgi_to_str, wsgi_quote, wsgi_unquote, normalize_etag
from swift.common.utils import get_logger, \ from swift.common.utils import get_logger, \
RateLimitedIterator, quote, close_if_possible, closing_if_possible, \ RateLimitedIterator, quote, close_if_possible, closing_if_possible, \
drain_and_close drain_and_close, md5
from swift.common.request_helpers import SegmentedIterable, \ from swift.common.request_helpers import SegmentedIterable, \
update_ignore_range_header update_ignore_range_header
from swift.common.wsgi import WSGIContext, make_subrequest, load_app_config from swift.common.wsgi import WSGIContext, make_subrequest, load_app_config
@ -333,7 +332,7 @@ class GetContext(WSGIContext):
if have_complete_listing: if have_complete_listing:
response_headers = [(h, v) for h, v in response_headers response_headers = [(h, v) for h, v in response_headers
if h.lower() != "etag"] if h.lower() != "etag"]
etag = md5() etag = md5(usedforsecurity=False)
for seg_dict in segments: for seg_dict in segments:
etag.update(normalize_etag(seg_dict['hash']).encode('utf8')) etag.update(normalize_etag(seg_dict['hash']).encode('utf8'))
response_headers.append(('Etag', '"%s"' % etag.hexdigest())) response_headers.append(('Etag', '"%s"' % etag.hexdigest()))

View File

@ -61,7 +61,6 @@ Static Large Object when the multipart upload is completed.
import binascii import binascii
import copy import copy
from hashlib import md5
import os import os
import re import re
import time import time
@ -69,7 +68,7 @@ import time
import six import six
from swift.common.swob import Range, bytes_to_wsgi, normalize_etag from swift.common.swob import Range, bytes_to_wsgi, normalize_etag
from swift.common.utils import json, public, reiterate from swift.common.utils import json, public, reiterate, md5
from swift.common.db import utf8encode from swift.common.db import utf8encode
from swift.common.request_helpers import get_container_update_override_key from swift.common.request_helpers import get_container_update_override_key
@ -636,7 +635,7 @@ class UploadController(Controller):
headers['Content-Type'] = content_type headers['Content-Type'] = content_type
container = req.container_name + MULTIUPLOAD_SUFFIX container = req.container_name + MULTIUPLOAD_SUFFIX
s3_etag_hasher = md5() s3_etag_hasher = md5(usedforsecurity=False)
manifest = [] manifest = []
previous_number = 0 previous_number = 0
try: try:
@ -646,7 +645,8 @@ class UploadController(Controller):
if 'content-md5' in req.headers: if 'content-md5' in req.headers:
# If an MD5 was provided, we need to verify it. # If an MD5 was provided, we need to verify it.
# Note that S3Request already took care of translating to ETag # Note that S3Request already took care of translating to ETag
if req.headers['etag'] != md5(xml).hexdigest(): if req.headers['etag'] != md5(
xml, usedforsecurity=False).hexdigest():
raise BadDigest(content_md5=req.headers['content-md5']) raise BadDigest(content_md5=req.headers['content-md5'])
# We're only interested in the body here, in the # We're only interested in the body here, in the
# multipart-upload controller -- *don't* let it get # multipart-upload controller -- *don't* let it get

View File

@ -17,7 +17,7 @@ import base64
import binascii import binascii
from collections import defaultdict, OrderedDict from collections import defaultdict, OrderedDict
from email.header import Header from email.header import Header
from hashlib import sha1, sha256, md5 from hashlib import sha1, sha256
import hmac import hmac
import re import re
import six import six
@ -26,7 +26,7 @@ from six.moves.urllib.parse import quote, unquote, parse_qsl
import string import string
from swift.common.utils import split_path, json, get_swift_info, \ from swift.common.utils import split_path, json, get_swift_info, \
close_if_possible close_if_possible, md5
from swift.common import swob from swift.common import swob
from swift.common.http import HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED, \ from swift.common.http import HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED, \
HTTP_NO_CONTENT, HTTP_UNAUTHORIZED, HTTP_FORBIDDEN, HTTP_NOT_FOUND, \ HTTP_NO_CONTENT, HTTP_UNAUTHORIZED, HTTP_FORBIDDEN, HTTP_NOT_FOUND, \
@ -866,7 +866,8 @@ class S3Request(swob.Request):
raise InvalidRequest('Missing required header for this request: ' raise InvalidRequest('Missing required header for this request: '
'Content-MD5') 'Content-MD5')
digest = base64.b64encode(md5(body).digest()).strip().decode('ascii') digest = base64.b64encode(md5(
body, usedforsecurity=False).digest()).strip().decode('ascii')
if self.environ['HTTP_CONTENT_MD5'] != digest: if self.environ['HTTP_CONTENT_MD5'] != digest:
raise BadDigest(content_md5=self.environ['HTTP_CONTENT_MD5']) raise BadDigest(content_md5=self.environ['HTTP_CONTENT_MD5'])

View File

@ -330,7 +330,6 @@ import json
import mimetypes import mimetypes
import re import re
import time import time
from hashlib import md5
import six import six
@ -348,7 +347,7 @@ from swift.common.utils import get_logger, config_true_value, \
get_valid_utf8_str, override_bytes_from_content_type, split_path, \ get_valid_utf8_str, override_bytes_from_content_type, split_path, \
register_swift_info, RateLimitedIterator, quote, close_if_possible, \ register_swift_info, RateLimitedIterator, quote, close_if_possible, \
closing_if_possible, LRUCache, StreamingPile, strict_b64decode, \ closing_if_possible, LRUCache, StreamingPile, strict_b64decode, \
Timestamp, drain_and_close, get_expirer_container Timestamp, drain_and_close, get_expirer_container, md5
from swift.common.request_helpers import SegmentedIterable, \ from swift.common.request_helpers import SegmentedIterable, \
get_sys_meta_prefix, update_etag_is_at_header, resolve_etag_is_at_header, \ get_sys_meta_prefix, update_etag_is_at_header, resolve_etag_is_at_header, \
get_container_update_override_key, update_ignore_range_header get_container_update_override_key, update_ignore_range_header
@ -927,7 +926,9 @@ class SloGetContext(WSGIContext):
if header.lower() == 'content-length': if header.lower() == 'content-length':
new_headers.append(('Content-Length', len(json_data))) new_headers.append(('Content-Length', len(json_data)))
elif header.lower() == 'etag': elif header.lower() == 'etag':
new_headers.append(('Etag', md5(json_data).hexdigest())) new_headers.append(
('Etag', md5(json_data, usedforsecurity=False)
.hexdigest()))
else: else:
new_headers.append((header, value)) new_headers.append((header, value))
self._response_headers = new_headers self._response_headers = new_headers
@ -965,7 +966,7 @@ class SloGetContext(WSGIContext):
# Prep to calculate content_length & etag if necessary # Prep to calculate content_length & etag if necessary
if slo_etag is None: if slo_etag is None:
calculated_etag = md5() calculated_etag = md5(usedforsecurity=False)
if content_length is None: if content_length is None:
calculated_content_length = 0 calculated_content_length = 0
@ -977,7 +978,8 @@ class SloGetContext(WSGIContext):
if slo_etag is None: if slo_etag is None:
if 'raw_data' in seg_dict: if 'raw_data' in seg_dict:
r = md5(seg_dict['raw_data']).hexdigest() r = md5(seg_dict['raw_data'],
usedforsecurity=False).hexdigest()
elif seg_dict.get('range'): elif seg_dict.get('range'):
r = '%s:%s;' % (seg_dict['hash'], seg_dict['range']) r = '%s:%s;' % (seg_dict['hash'], seg_dict['range'])
else: else:
@ -1347,11 +1349,11 @@ class StaticLargeObject(object):
out_content_type, resp_dict, problem_segments, 'upload') out_content_type, resp_dict, problem_segments, 'upload')
return return
slo_etag = md5() slo_etag = md5(usedforsecurity=False)
for seg_data in data_for_storage: for seg_data in data_for_storage:
if 'data' in seg_data: if 'data' in seg_data:
raw_data = base64.b64decode(seg_data['data']) raw_data = base64.b64decode(seg_data['data'])
r = md5(raw_data).hexdigest() r = md5(raw_data, usedforsecurity=False).hexdigest()
elif seg_data.get('range'): elif seg_data.get('range'):
r = '%s:%s;' % (seg_data['hash'], seg_data['range']) r = '%s:%s;' % (seg_data['hash'], seg_data['range'])
else: else:
@ -1386,7 +1388,7 @@ class StaticLargeObject(object):
SYSMETA_SLO_ETAG: slo_etag, SYSMETA_SLO_ETAG: slo_etag,
SYSMETA_SLO_SIZE: total_size, SYSMETA_SLO_SIZE: total_size,
'X-Static-Large-Object': 'True', 'X-Static-Large-Object': 'True',
'Etag': md5(json_data).hexdigest(), 'Etag': md5(json_data, usedforsecurity=False).hexdigest(),
}) })
# Ensure container listings have both etags. However, if any # Ensure container listings have both etags. However, if any

View File

@ -20,7 +20,6 @@ Why not swift.common.utils, you ask? Because this way we can import things
from swob in here without creating circular imports. from swob in here without creating circular imports.
""" """
import hashlib
import itertools import itertools
import sys import sys
import time import time
@ -40,7 +39,7 @@ from swift.common.utils import split_path, validate_device_partition, \
close_if_possible, maybe_multipart_byteranges_to_document_iters, \ close_if_possible, maybe_multipart_byteranges_to_document_iters, \
multipart_byteranges_to_document_iters, parse_content_type, \ multipart_byteranges_to_document_iters, parse_content_type, \
parse_content_range, csv_append, list_from_csv, Spliterator, quote, \ parse_content_range, csv_append, list_from_csv, Spliterator, quote, \
RESERVED, config_true_value RESERVED, config_true_value, md5
from swift.common.wsgi import make_subrequest from swift.common.wsgi import make_subrequest
@ -604,7 +603,7 @@ class SegmentedIterable(object):
seg_hash = None seg_hash = None
if seg_resp.etag and not seg_req.headers.get('Range'): if seg_resp.etag and not seg_req.headers.get('Range'):
# Only calculate the MD5 if it we can use it to validate # Only calculate the MD5 if it we can use it to validate
seg_hash = hashlib.md5() seg_hash = md5(usedforsecurity=False)
document_iters = maybe_multipart_byteranges_to_document_iters( document_iters = maybe_multipart_byteranges_to_document_iters(
seg_resp.app_iter, seg_resp.app_iter,

View File

@ -22,7 +22,6 @@ from os.path import getmtime
import struct import struct
from time import time from time import time
import os import os
from hashlib import md5
from itertools import chain, count from itertools import chain, count
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
import sys import sys
@ -32,7 +31,7 @@ import six
from six.moves import range from six.moves import range
from swift.common.exceptions import RingLoadError from swift.common.exceptions import RingLoadError
from swift.common.utils import hash_path, validate_configuration from swift.common.utils import hash_path, validate_configuration, md5
from swift.common.ring.utils import tiers_for_dev from swift.common.ring.utils import tiers_for_dev
@ -53,7 +52,7 @@ class RingReader(object):
self._buffer = b'' self._buffer = b''
self.size = 0 self.size = 0
self.raw_size = 0 self.raw_size = 0
self._md5 = md5() self._md5 = md5(usedforsecurity=False)
self._decomp = zlib.decompressobj(32 + zlib.MAX_WBITS) self._decomp = zlib.decompressobj(32 + zlib.MAX_WBITS)
@property @property
@ -538,7 +537,8 @@ class Ring(object):
(d['region'], d['zone'], d['ip']) for d in primary_nodes) (d['region'], d['zone'], d['ip']) for d in primary_nodes)
parts = len(self._replica2part2dev_id[0]) parts = len(self._replica2part2dev_id[0])
part_hash = md5(str(part).encode('ascii')).digest() part_hash = md5(str(part).encode('ascii'),
usedforsecurity=False).digest()
start = struct.unpack_from('>I', part_hash)[0] >> self._part_shift start = struct.unpack_from('>I', part_hash)[0] >> self._part_shift
inc = int(parts / 65536) or 1 inc = int(parts / 65536) or 1
# Multiple loops for execution speed; the checks and bookkeeping get # Multiple loops for execution speed; the checks and bookkeeping get

View File

@ -40,7 +40,7 @@ import uuid
import functools import functools
import platform import platform
import email.parser import email.parser
from hashlib import md5, sha1 from hashlib import sha1
from random import random, shuffle from random import random, shuffle
from contextlib import contextmanager, closing from contextlib import contextmanager, closing
import ctypes import ctypes
@ -674,7 +674,10 @@ class StrAnonymizer(str):
if not self: if not self:
return self return self
else: else:
h = getattr(hashlib, self.method)() if self.method == 'md5':
h = md5(usedforsecurity=False)
else:
h = getattr(hashlib, self.method)()
if self.salt: if self.salt:
h.update(six.b(self.salt)) h.update(six.b(self.salt))
h.update(six.b(self)) h.update(six.b(self))
@ -2735,10 +2738,10 @@ def hash_path(account, container=None, object=None, raw_digest=False):
else object.encode('utf8')) else object.encode('utf8'))
if raw_digest: if raw_digest:
return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths) return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths)
+ HASH_PATH_SUFFIX).digest() + HASH_PATH_SUFFIX, usedforsecurity=False).digest()
else: else:
return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths) return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths)
+ HASH_PATH_SUFFIX).hexdigest() + HASH_PATH_SUFFIX, usedforsecurity=False).hexdigest()
def get_zero_indexed_base_string(base, index): def get_zero_indexed_base_string(base, index):
@ -4858,6 +4861,31 @@ def get_md5_socket():
return md5_sockfd return md5_sockfd
try:
_test_md5 = hashlib.md5(usedforsecurity=False) # nosec
def md5(string=b'', usedforsecurity=True):
"""Return an md5 hashlib object using usedforsecurity parameter
For python distributions that support the usedforsecurity keyword
parameter, this passes the parameter through as expected.
See https://bugs.python.org/issue9216
"""
return hashlib.md5(string, usedforsecurity=usedforsecurity) # nosec
except TypeError:
def md5(string=b'', usedforsecurity=True):
"""Return an md5 hashlib object without usedforsecurity parameter
For python distributions that do not yet support this keyword
parameter, we drop the parameter
"""
return hashlib.md5(string) # nosec
def md5_factory():
return md5(usedforsecurity=False)
class ShardRange(object): class ShardRange(object):
""" """
A ShardRange encapsulates sharding state related to a container including A ShardRange encapsulates sharding state related to a container including
@ -4999,7 +5027,8 @@ class ShardRange(object):
if not isinstance(parent_container, bytes): if not isinstance(parent_container, bytes):
parent_container = parent_container.encode('utf-8') parent_container = parent_container.encode('utf-8')
return "%s-%s-%s-%s" % (root_container, return "%s-%s-%s-%s" % (root_container,
hashlib.md5(parent_container).hexdigest(), md5(parent_container,
usedforsecurity=False).hexdigest(),
cls._to_timestamp(timestamp).internal, cls._to_timestamp(timestamp).internal,
index) index)
@ -5583,7 +5612,7 @@ def md5_hash_for_file(fname):
:returns: MD5 checksum, hex encoded :returns: MD5 checksum, hex encoded
""" """
with open(fname, 'rb') as f: with open(fname, 'rb') as f:
md5sum = md5() md5sum = md5(usedforsecurity=False)
for block in iter(lambda: f.read(MD5_BLOCK_READ_BYTES), b''): for block in iter(lambda: f.read(MD5_BLOCK_READ_BYTES), b''):
md5sum.update(block) md5sum.update(block)
return md5sum.hexdigest() return md5sum.hexdigest()

View File

@ -40,7 +40,6 @@ import os
import re import re
import time import time
import uuid import uuid
from hashlib import md5
import logging import logging
import traceback import traceback
import xattr import xattr
@ -66,7 +65,8 @@ from swift.common.utils import mkdirs, Timestamp, \
config_true_value, listdir, split_path, remove_file, \ config_true_value, listdir, split_path, remove_file, \
get_md5_socket, F_SETPIPE_SZ, decode_timestamps, encode_timestamps, \ get_md5_socket, F_SETPIPE_SZ, decode_timestamps, encode_timestamps, \
MD5_OF_EMPTY_STRING, link_fd_to_path, \ MD5_OF_EMPTY_STRING, link_fd_to_path, \
O_TMPFILE, makedirs_count, replace_partition_in_path, remove_directory O_TMPFILE, makedirs_count, replace_partition_in_path, remove_directory, \
md5, md5_factory
from swift.common.splice import splice, tee from swift.common.splice import splice, tee
from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \ from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \
DiskFileCollision, DiskFileNoSpace, DiskFileDeviceUnavailable, \ DiskFileCollision, DiskFileNoSpace, DiskFileDeviceUnavailable, \
@ -222,14 +222,16 @@ def read_metadata(fd, add_missing_checksum=False):
# exist. This is fine; it just means that this object predates the # exist. This is fine; it just means that this object predates the
# introduction of metadata checksums. # introduction of metadata checksums.
if add_missing_checksum: if add_missing_checksum:
new_checksum = md5(metadata).hexdigest().encode('ascii') new_checksum = (md5(metadata, usedforsecurity=False)
.hexdigest().encode('ascii'))
try: try:
xattr.setxattr(fd, METADATA_CHECKSUM_KEY, new_checksum) xattr.setxattr(fd, METADATA_CHECKSUM_KEY, new_checksum)
except (IOError, OSError) as e: except (IOError, OSError) as e:
logging.error("Error adding metadata: %s" % e) logging.error("Error adding metadata: %s" % e)
if metadata_checksum: if metadata_checksum:
computed_checksum = md5(metadata).hexdigest().encode('ascii') computed_checksum = (md5(metadata, usedforsecurity=False)
.hexdigest().encode('ascii'))
if metadata_checksum != computed_checksum: if metadata_checksum != computed_checksum:
raise DiskFileBadMetadataChecksum( raise DiskFileBadMetadataChecksum(
"Metadata checksum mismatch for %s: " "Metadata checksum mismatch for %s: "
@ -254,7 +256,8 @@ def write_metadata(fd, metadata, xattr_size=65536):
:param metadata: metadata to write :param metadata: metadata to write
""" """
metastr = pickle.dumps(_encode_metadata(metadata), PICKLE_PROTOCOL) metastr = pickle.dumps(_encode_metadata(metadata), PICKLE_PROTOCOL)
metastr_md5 = md5(metastr).hexdigest().encode('ascii') metastr_md5 = (
md5(metastr, usedforsecurity=False).hexdigest().encode('ascii'))
key = 0 key = 0
try: try:
while metastr: while metastr:
@ -1113,11 +1116,11 @@ class BaseDiskFileManager(object):
:param policy: storage policy used :param policy: storage policy used
""" """
if six.PY2: if six.PY2:
hashes = defaultdict(md5) hashes = defaultdict(md5_factory)
else: else:
class shim(object): class shim(object):
def __init__(self): def __init__(self):
self.md5 = md5() self.md5 = md5(usedforsecurity=False)
def update(self, s): def update(self, s):
if isinstance(s, str): if isinstance(s, str):
@ -1686,7 +1689,7 @@ class BaseDiskFileWriter(object):
self._fd = None self._fd = None
self._tmppath = None self._tmppath = None
self._size = size self._size = size
self._chunks_etag = md5() self._chunks_etag = md5(usedforsecurity=False)
self._bytes_per_sync = bytes_per_sync self._bytes_per_sync = bytes_per_sync
self._diskfile = diskfile self._diskfile = diskfile
self.next_part_power = next_part_power self.next_part_power = next_part_power
@ -2003,7 +2006,7 @@ class BaseDiskFileReader(object):
def _init_checks(self): def _init_checks(self):
if self._fp.tell() == 0: if self._fp.tell() == 0:
self._started_at_0 = True self._started_at_0 = True
self._iter_etag = md5() self._iter_etag = md5(usedforsecurity=False)
def _update_checks(self, chunk): def _update_checks(self, chunk):
if self._iter_etag: if self._iter_etag:

View File

@ -20,7 +20,6 @@ from time import time
from os.path import join from os.path import join
from swift import gettext_ as _ from swift import gettext_ as _
from collections import defaultdict, deque from collections import defaultdict, deque
import hashlib
from eventlet import sleep, Timeout from eventlet import sleep, Timeout
from eventlet.greenpool import GreenPool from eventlet.greenpool import GreenPool
@ -30,7 +29,7 @@ from swift.common.daemon import Daemon
from swift.common.internal_client import InternalClient, UnexpectedResponse from swift.common.internal_client import InternalClient, UnexpectedResponse
from swift.common.utils import get_logger, dump_recon_cache, split_path, \ from swift.common.utils import get_logger, dump_recon_cache, split_path, \
Timestamp, config_true_value, normalize_delete_at_timestamp, \ Timestamp, config_true_value, normalize_delete_at_timestamp, \
RateLimitedIterator RateLimitedIterator, md5
from swift.common.http import HTTP_NOT_FOUND, HTTP_CONFLICT, \ from swift.common.http import HTTP_NOT_FOUND, HTTP_CONFLICT, \
HTTP_PRECONDITION_FAILED HTTP_PRECONDITION_FAILED
from swift.common.swob import wsgi_quote, str_to_wsgi from swift.common.swob import wsgi_quote, str_to_wsgi
@ -218,7 +217,8 @@ class ObjectExpirer(Daemon):
if not isinstance(name, bytes): if not isinstance(name, bytes):
name = name.encode('utf8') name = name.encode('utf8')
# md5 is only used for shuffling mod # md5 is only used for shuffling mod
return int(hashlib.md5(name).hexdigest(), 16) % divisor return int(md5(
name, usedforsecurity=False).hexdigest(), 16) % divisor
def iter_task_accounts_to_expire(self): def iter_task_accounts_to_expire(self):
""" """

View File

@ -17,7 +17,6 @@
import io import io
import time import time
import hashlib
from contextlib import contextmanager from contextlib import contextmanager
from eventlet import Timeout from eventlet import Timeout
@ -27,6 +26,7 @@ from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \
DiskFileCollision, DiskFileDeleted, DiskFileNotOpen DiskFileCollision, DiskFileDeleted, DiskFileNotOpen
from swift.common.request_helpers import is_sys_meta from swift.common.request_helpers import is_sys_meta
from swift.common.swob import multi_range_iterator from swift.common.swob import multi_range_iterator
from swift.common.utils import md5
from swift.obj.diskfile import DATAFILE_SYSTEM_META, RESERVED_DATAFILE_META from swift.obj.diskfile import DATAFILE_SYSTEM_META, RESERVED_DATAFILE_META
@ -103,7 +103,7 @@ class DiskFileWriter(object):
self._name = name self._name = name
self._fp = None self._fp = None
self._upload_size = 0 self._upload_size = 0
self._chunks_etag = hashlib.md5() self._chunks_etag = md5(usedforsecurity=False)
def open(self): def open(self):
""" """
@ -197,7 +197,7 @@ class DiskFileReader(object):
self._read_to_eof = False self._read_to_eof = False
if self._fp.tell() == 0: if self._fp.tell() == 0:
self._started_at_0 = True self._started_at_0 = True
self._iter_etag = hashlib.md5() self._iter_etag = md5(usedforsecurity=False)
while True: while True:
chunk = self._fp.read() chunk = self._fp.read()
if chunk: if chunk:

View File

@ -26,7 +26,6 @@ import traceback
import socket import socket
import math import math
from swift import gettext_ as _ from swift import gettext_ as _
from hashlib import md5
from eventlet import sleep, wsgi, Timeout, tpool from eventlet import sleep, wsgi, Timeout, tpool
from eventlet.greenthread import spawn from eventlet.greenthread import spawn
@ -37,7 +36,7 @@ from swift.common.utils import public, get_logger, \
get_expirer_container, parse_mime_headers, \ get_expirer_container, parse_mime_headers, \
iter_multipart_mime_documents, extract_swift_bytes, safe_json_loads, \ iter_multipart_mime_documents, extract_swift_bytes, safe_json_loads, \
config_auto_int_value, split_path, get_redirect_data, \ config_auto_int_value, split_path, get_redirect_data, \
normalize_timestamp normalize_timestamp, md5
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
from swift.common.constraints import check_object_creation, \ from swift.common.constraints import check_object_creation, \
valid_timestamp, check_utf8, AUTO_CREATE_ACCOUNT_PREFIX valid_timestamp, check_utf8, AUTO_CREATE_ACCOUNT_PREFIX
@ -583,7 +582,7 @@ class ObjectController(BaseStorageServer):
footer_md5 = footer_hdrs.get('Content-MD5') footer_md5 = footer_hdrs.get('Content-MD5')
if not footer_md5: if not footer_md5:
raise HTTPBadRequest(body="no Content-MD5 in footer") raise HTTPBadRequest(body="no Content-MD5 in footer")
if footer_md5 != md5(footer_body).hexdigest(): if footer_md5 != md5(footer_body, usedforsecurity=False).hexdigest():
raise HTTPUnprocessableEntity(body="footer MD5 mismatch") raise HTTPUnprocessableEntity(body="footer MD5 mismatch")
try: try:

View File

@ -35,7 +35,6 @@ import mimetypes
import time import time
import math import math
import random import random
from hashlib import md5
import sys import sys
from greenlet import GreenletExit from greenlet import GreenletExit
@ -49,7 +48,8 @@ from swift.common.utils import (
GreenAsyncPile, GreenthreadSafeIterator, Timestamp, WatchdogTimeout, GreenAsyncPile, GreenthreadSafeIterator, Timestamp, WatchdogTimeout,
normalize_delete_at_timestamp, public, get_expirer_container, normalize_delete_at_timestamp, public, get_expirer_container,
document_iters_to_http_response_body, parse_content_range, document_iters_to_http_response_body, parse_content_range,
quorum_size, reiterate, close_if_possible, safe_json_loads) quorum_size, reiterate, close_if_possible, safe_json_loads, md5,
md5_factory)
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
from swift.common.constraints import check_metadata, check_object_creation from swift.common.constraints import check_metadata, check_object_creation
from swift.common import constraints from swift.common import constraints
@ -1784,7 +1784,8 @@ class MIMEPutter(Putter):
self._start_object_data() self._start_object_data()
footer_body = json.dumps(footer_metadata).encode('ascii') footer_body = json.dumps(footer_metadata).encode('ascii')
footer_md5 = md5(footer_body).hexdigest().encode('ascii') footer_md5 = md5(
footer_body, usedforsecurity=False).hexdigest().encode('ascii')
tail_boundary = (b"--%s" % (self.mime_boundary,)) tail_boundary = (b"--%s" % (self.mime_boundary,))
if not self.multiphase: if not self.multiphase:
@ -3178,7 +3179,7 @@ class ECObjectController(BaseObjectController):
bytes_transferred = 0 bytes_transferred = 0
chunk_transform = chunk_transformer(policy) chunk_transform = chunk_transformer(policy)
chunk_transform.send(None) chunk_transform.send(None)
frag_hashers = collections.defaultdict(md5) frag_hashers = collections.defaultdict(md5_factory)
def send_chunk(chunk): def send_chunk(chunk):
# Note: there's two different hashers in here. etag_hasher is # Note: there's two different hashers in here. etag_hasher is
@ -3411,7 +3412,7 @@ class ECObjectController(BaseObjectController):
# the same as the request body sent proxy -> object, we # the same as the request body sent proxy -> object, we
# can't rely on the object-server to do the etag checking - # can't rely on the object-server to do the etag checking -
# so we have to do it here. # so we have to do it here.
etag_hasher = md5() etag_hasher = md5(usedforsecurity=False)
min_conns = policy.quorum min_conns = policy.quorum
putters = self._get_put_connections( putters = self._get_put_connections(

View File

@ -23,13 +23,13 @@ import boto
# pylint: disable-msg=E0611,F0401 # pylint: disable-msg=E0611,F0401
from distutils.version import StrictVersion from distutils.version import StrictVersion
from hashlib import md5
from six.moves import zip, zip_longest from six.moves import zip, zip_longest
import test.functional as tf import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring, tostring, \ from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement Element, SubElement
from swift.common.middleware.s3api.utils import mktime from swift.common.middleware.s3api.utils import mktime
from swift.common.utils import md5
from test.functional.s3api import S3ApiBase from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection from test.functional.s3api.s3_test_client import Connection
@ -180,7 +180,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
# Upload Part # Upload Part
key, upload_id = uploads[0] key, upload_id = uploads[0]
content = b'a' * self.min_segment_size content = b'a' * self.min_segment_size
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
status, headers, body = \ status, headers, body = \
self._upload_part(bucket, key, upload_id, content) self._upload_part(bucket, key, upload_id, content)
self.assertEqual(status, 200) self.assertEqual(status, 200)
@ -196,7 +196,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
src_bucket = 'bucket2' src_bucket = 'bucket2'
src_obj = 'obj3' src_obj = 'obj3'
src_content = b'b' * self.min_segment_size src_content = b'b' * self.min_segment_size
etag = md5(src_content).hexdigest() etag = md5(src_content, usedforsecurity=False).hexdigest()
# prepare src obj # prepare src obj
self.conn.make_request('PUT', src_bucket) self.conn.make_request('PUT', src_bucket)
@ -312,7 +312,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
concatted_etags = b''.join( concatted_etags = b''.join(
etag.strip('"').encode('ascii') for etag in etags) etag.strip('"').encode('ascii') for etag in etags)
exp_etag = '"%s-%s"' % ( exp_etag = '"%s-%s"' % (
md5(binascii.unhexlify(concatted_etags)).hexdigest(), len(etags)) md5(binascii.unhexlify(concatted_etags),
usedforsecurity=False).hexdigest(), len(etags))
etag = elem.find('ETag').text etag = elem.find('ETag').text
self.assertEqual(etag, exp_etag) self.assertEqual(etag, exp_etag)
@ -324,7 +325,8 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertEqual(headers['content-type'], 'foo/bar') self.assertEqual(headers['content-type'], 'foo/bar')
self.assertEqual(headers['x-amz-meta-baz'], 'quux') self.assertEqual(headers['x-amz-meta-baz'], 'quux')
swift_etag = '"%s"' % md5(concatted_etags).hexdigest() swift_etag = '"%s"' % md5(
concatted_etags, usedforsecurity=False).hexdigest()
# TODO: GET via swift api, check against swift_etag # TODO: GET via swift api, check against swift_etag
# Should be safe to retry # Should be safe to retry
@ -375,7 +377,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
self.assertIsNotNone(last_modified) self.assertIsNotNone(last_modified)
exp_content = b'a' * self.min_segment_size exp_content = b'a' * self.min_segment_size
etag = md5(exp_content).hexdigest() etag = md5(exp_content, usedforsecurity=False).hexdigest()
self.assertEqual(resp_etag, etag) self.assertEqual(resp_etag, etag)
# Also check that the etag is correct in part listings # Also check that the etag is correct in part listings
@ -858,7 +860,9 @@ class TestS3ApiMultiUpload(S3ApiBase):
src_content = b'y' * (self.min_segment_size // 2) + b'z' * \ src_content = b'y' * (self.min_segment_size // 2) + b'z' * \
self.min_segment_size self.min_segment_size
src_range = 'bytes=0-%d' % (self.min_segment_size - 1) src_range = 'bytes=0-%d' % (self.min_segment_size - 1)
etag = md5(src_content[:self.min_segment_size]).hexdigest() etag = md5(
src_content[:self.min_segment_size],
usedforsecurity=False).hexdigest()
# prepare src obj # prepare src obj
self.conn.make_request('PUT', src_bucket) self.conn.make_request('PUT', src_bucket)
@ -951,7 +955,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
src_obj = 'obj4' src_obj = 'obj4'
src_content = b'y' * (self.min_segment_size // 2) + b'z' * \ src_content = b'y' * (self.min_segment_size // 2) + b'z' * \
self.min_segment_size self.min_segment_size
etags = [md5(src_content).hexdigest()] etags = [md5(src_content, usedforsecurity=False).hexdigest()]
# prepare null-version src obj # prepare null-version src obj
self.conn.make_request('PUT', src_bucket) self.conn.make_request('PUT', src_bucket)
@ -969,7 +973,7 @@ class TestS3ApiMultiUpload(S3ApiBase):
src_obj2 = 'obj5' src_obj2 = 'obj5'
src_content2 = b'stub' src_content2 = b'stub'
etags.append(md5(src_content2).hexdigest()) etags.append(md5(src_content2, usedforsecurity=False).hexdigest())
# prepare src obj w/ real version # prepare src obj w/ real version
self.conn.make_request('PUT', src_bucket, src_obj2, body=src_content2) self.conn.make_request('PUT', src_bucket, src_obj2, body=src_content2)
@ -1098,7 +1102,7 @@ class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload):
# Complete Multipart Upload # Complete Multipart Upload
key, upload_id = uploads[0] key, upload_id = uploads[0]
etags = [md5(content).hexdigest()] etags = [md5(content, usedforsecurity=False).hexdigest()]
xml = self._gen_comp_xml(etags) xml = self._gen_comp_xml(etags)
status, headers, body = \ status, headers, body = \
self._complete_multi_upload(bucket, key, upload_id, xml) self._complete_multi_upload(bucket, key, upload_id, xml)

View File

@ -24,13 +24,13 @@ from distutils.version import StrictVersion
import email.parser import email.parser
from email.utils import formatdate, parsedate from email.utils import formatdate, parsedate
from time import mktime from time import mktime
from hashlib import md5
import six import six
from six.moves.urllib.parse import quote from six.moves.urllib.parse import quote
import test.functional as tf import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring from swift.common.middleware.s3api.etree import fromstring
from swift.common.utils import md5
from test.functional.s3api import S3ApiBase from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection from test.functional.s3api.s3_test_client import Connection
@ -61,7 +61,7 @@ class TestS3ApiObject(S3ApiBase):
def test_object(self): def test_object(self):
obj = 'object name with %-sign' obj = 'object name with %-sign'
content = b'abc123' content = b'abc123'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
# PUT Object # PUT Object
status, headers, body = \ status, headers, body = \
@ -252,7 +252,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_content_encoding(self): def test_put_object_content_encoding(self):
obj = 'object' obj = 'object'
etag = md5().hexdigest() etag = md5(usedforsecurity=False).hexdigest()
headers = {'Content-Encoding': 'gzip'} headers = {'Content-Encoding': 'gzip'}
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers) self.conn.make_request('PUT', self.bucket, obj, headers)
@ -267,7 +267,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_content_md5(self): def test_put_object_content_md5(self):
obj = 'object' obj = 'object'
content = b'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'Content-MD5': calculate_md5(content)} headers = {'Content-MD5': calculate_md5(content)}
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content) self.conn.make_request('PUT', self.bucket, obj, headers, content)
@ -278,7 +278,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_content_type(self): def test_put_object_content_type(self):
obj = 'object' obj = 'object'
content = b'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'Content-Type': 'text/plain'} headers = {'Content-Type': 'text/plain'}
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content) self.conn.make_request('PUT', self.bucket, obj, headers, content)
@ -320,7 +320,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_expect(self): def test_put_object_expect(self):
obj = 'object' obj = 'object'
content = b'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'Expect': '100-continue'} headers = {'Expect': '100-continue'}
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content) self.conn.make_request('PUT', self.bucket, obj, headers, content)
@ -333,7 +333,7 @@ class TestS3ApiObject(S3ApiBase):
expected_headers = req_headers expected_headers = req_headers
obj = 'object' obj = 'object'
content = b'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, self.conn.make_request('PUT', self.bucket, obj,
req_headers, content) req_headers, content)
@ -389,7 +389,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_storage_class(self): def test_put_object_storage_class(self):
obj = 'object' obj = 'object'
content = b'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
headers = {'X-Amz-Storage-Class': 'STANDARD'} headers = {'X-Amz-Storage-Class': 'STANDARD'}
status, headers, body = \ status, headers, body = \
self.conn.make_request('PUT', self.bucket, obj, headers, content) self.conn.make_request('PUT', self.bucket, obj, headers, content)
@ -435,7 +435,7 @@ class TestS3ApiObject(S3ApiBase):
def test_put_object_copy_source(self): def test_put_object_copy_source(self):
obj = 'object' obj = 'object'
content = b'abcdefghij' content = b'abcdefghij'
etag = md5(content).hexdigest() etag = md5(content, usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj, body=content) self.conn.make_request('PUT', self.bucket, obj, body=content)
dst_bucket = 'dst-bucket' dst_bucket = 'dst-bucket'
@ -521,7 +521,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object' obj = 'object'
dst_bucket = 'dst-bucket' dst_bucket = 'dst-bucket'
dst_obj = 'dst_object' dst_obj = 'dst_object'
etag = md5().hexdigest() etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket) self.conn.make_request('PUT', dst_bucket)
@ -541,7 +541,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object' obj = 'object'
dst_bucket = 'dst-bucket' dst_bucket = 'dst-bucket'
dst_obj = 'dst_object' dst_obj = 'dst_object'
etag = md5().hexdigest() etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket) self.conn.make_request('PUT', dst_bucket)
@ -561,7 +561,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object' obj = 'object'
dst_bucket = 'dst-bucket' dst_bucket = 'dst-bucket'
dst_obj = 'dst_object' dst_obj = 'dst_object'
etag = md5().hexdigest() etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket) self.conn.make_request('PUT', dst_bucket)
@ -580,7 +580,7 @@ class TestS3ApiObject(S3ApiBase):
obj = 'object' obj = 'object'
dst_bucket = 'dst-bucket' dst_bucket = 'dst-bucket'
dst_obj = 'dst_object' dst_obj = 'dst_object'
etag = md5().hexdigest() etag = md5(usedforsecurity=False).hexdigest()
self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', self.bucket, obj)
self.conn.make_request('PUT', dst_bucket) self.conn.make_request('PUT', dst_bucket)

View File

@ -14,8 +14,8 @@
# limitations under the License. # limitations under the License.
from base64 import b64encode from base64 import b64encode
from hashlib import md5
from swift.common.middleware.s3api.etree import fromstring from swift.common.middleware.s3api.etree import fromstring
from swift.common.utils import md5
def get_error_code(body): def get_error_code(body):
@ -29,4 +29,5 @@ def get_error_msg(body):
def calculate_md5(body): def calculate_md5(body):
return b64encode(md5(body).digest()).strip().decode('ascii') return b64encode(
md5(body, usedforsecurity=False).digest()).strip().decode('ascii')

View File

@ -13,7 +13,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import hashlib
import io import io
import json import json
import os import os
@ -33,7 +32,7 @@ from swiftclient import get_auth
from swift.common import constraints from swift.common import constraints
from swift.common.http import is_success from swift.common.http import is_success
from swift.common.swob import str_to_wsgi, wsgi_to_str from swift.common.swob import str_to_wsgi, wsgi_to_str
from swift.common.utils import config_true_value from swift.common.utils import config_true_value, md5
from test import safe_repr from test import safe_repr
@ -851,7 +850,7 @@ class File(Base):
if isinstance(data, bytes): if isinstance(data, bytes):
data = io.BytesIO(data) data = io.BytesIO(data)
checksum = hashlib.md5() checksum = md5(usedforsecurity=False)
buff = data.read(block_size) buff = data.read(block_size)
while buff: while buff:
checksum.update(buff) checksum.update(buff)
@ -1058,7 +1057,7 @@ class File(Base):
raise ResponseError(self.conn.response, 'GET', raise ResponseError(self.conn.response, 'GET',
self.conn.make_path(self.path)) self.conn.make_path(self.path))
checksum = hashlib.md5() checksum = md5(usedforsecurity=False)
scratch = self.conn.response.read(8192) scratch = self.conn.response.read(8192)
while len(scratch) > 0: while len(scratch) > 0:

View File

@ -16,7 +16,6 @@
# limitations under the License. # limitations under the License.
import datetime import datetime
import hashlib
import json import json
import unittest import unittest
from uuid import uuid4 from uuid import uuid4
@ -29,6 +28,7 @@ from six.moves import range
from test.functional import check_response, retry, requires_acls, \ from test.functional import check_response, retry, requires_acls, \
requires_policies, SkipTest, requires_bulk requires_policies, SkipTest, requires_bulk
import test.functional as tf import test.functional as tf
from swift.common.utils import md5
def setUpModule(): def setUpModule():
@ -1741,7 +1741,7 @@ class TestObject(unittest.TestCase):
expect_quoted = tf.cluster_info.get('etag_quoter', {}).get( expect_quoted = tf.cluster_info.get('etag_quoter', {}).get(
'enable_by_default', False) 'enable_by_default', False)
expected_etag = hashlib.md5(b'test').hexdigest() expected_etag = md5(b'test', usedforsecurity=False).hexdigest()
if expect_quoted: if expect_quoted:
expected_etag = '"%s"' % expected_etag expected_etag = '"%s"' % expected_etag
self.assertEqual(resp.headers['etag'], expected_etag) self.assertEqual(resp.headers['etag'], expected_etag)

View File

@ -21,13 +21,12 @@ import time
import six import six
from copy import deepcopy from copy import deepcopy
from hashlib import md5
from six.moves.urllib.parse import quote, unquote from six.moves.urllib.parse import quote, unquote
import test.functional as tf import test.functional as tf
from swift.common.swob import normalize_etag from swift.common.swob import normalize_etag
from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value, md5
from swift.common.middleware.versioned_writes.object_versioning import \ from swift.common.middleware.versioned_writes.object_versioning import \
DELETE_MARKER_CONTENT_TYPE DELETE_MARKER_CONTENT_TYPE
@ -338,7 +337,9 @@ class TestObjectVersioning(TestObjectVersioningBase):
obj = self.env.unversioned_container.file(oname) obj = self.env.unversioned_container.file(oname)
resp = obj.write(body, return_resp=True) resp = obj.write(body, return_resp=True)
etag = resp.getheader('etag') etag = resp.getheader('etag')
self.assertEqual(md5(body).hexdigest(), normalize_etag(etag)) self.assertEqual(
md5(body, usedforsecurity=False).hexdigest(),
normalize_etag(etag))
# un-versioned object is cool with with if-match # un-versioned object is cool with with if-match
self.assertEqual(body, obj.read(hdrs={'if-match': etag})) self.assertEqual(body, obj.read(hdrs={'if-match': etag}))
@ -569,7 +570,7 @@ class TestObjectVersioning(TestObjectVersioningBase):
'name': obj_name, 'name': obj_name,
'content_type': version['content_type'], 'content_type': version['content_type'],
'version_id': version['version_id'], 'version_id': version['version_id'],
'hash': md5(version['body']).hexdigest(), 'hash': md5(version['body'], usedforsecurity=False).hexdigest(),
'bytes': len(version['body'],) 'bytes': len(version['body'],)
} for version in reversed(versions)] } for version in reversed(versions)]
for item, is_latest in zip(expected, (True, False, False)): for item, is_latest in zip(expected, (True, False, False)):
@ -1263,14 +1264,14 @@ class TestContainerOperations(TestObjectVersioningBase):
# v1 # v1
resp = obj.write(b'version1', hdrs={ resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish11', 'Content-Type': 'text/jibberish11',
'ETag': md5(b'version1').hexdigest(), 'ETag': md5(b'version1', usedforsecurity=False).hexdigest(),
}, return_resp=True) }, return_resp=True)
obj1_v1['id'] = resp.getheader('x-object-version-id') obj1_v1['id'] = resp.getheader('x-object-version-id')
# v2 # v2
resp = obj.write(b'version2', hdrs={ resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish12', 'Content-Type': 'text/jibberish12',
'ETag': md5(b'version2').hexdigest(), 'ETag': md5(b'version2', usedforsecurity=False).hexdigest(),
}, return_resp=True) }, return_resp=True)
obj1_v2 = {} obj1_v2 = {}
obj1_v2['name'] = obj1_v1['name'] obj1_v2['name'] = obj1_v1['name']
@ -1279,7 +1280,7 @@ class TestContainerOperations(TestObjectVersioningBase):
# v3 # v3
resp = obj.write(b'version3', hdrs={ resp = obj.write(b'version3', hdrs={
'Content-Type': 'text/jibberish13', 'Content-Type': 'text/jibberish13',
'ETag': md5(b'version3').hexdigest(), 'ETag': md5(b'version3', usedforsecurity=False).hexdigest(),
}, return_resp=True) }, return_resp=True)
obj1_v3 = {} obj1_v3 = {}
obj1_v3['name'] = obj1_v1['name'] obj1_v3['name'] = obj1_v1['name']
@ -1333,20 +1334,20 @@ class TestContainerOperations(TestObjectVersioningBase):
obj = self.env.unversioned_container.file(objs[0]) obj = self.env.unversioned_container.file(objs[0])
obj.write(b'data', hdrs={ obj.write(b'data', hdrs={
'Content-Type': 'text/jibberish11', 'Content-Type': 'text/jibberish11',
'ETag': md5(b'data').hexdigest(), 'ETag': md5(b'data', usedforsecurity=False).hexdigest(),
}) })
obj.delete() obj.delete()
obj = self.env.unversioned_container.file(objs[1]) obj = self.env.unversioned_container.file(objs[1])
obj.write(b'first', hdrs={ obj.write(b'first', hdrs={
'Content-Type': 'text/blah-blah-blah', 'Content-Type': 'text/blah-blah-blah',
'ETag': md5(b'first').hexdigest(), 'ETag': md5(b'first', usedforsecurity=False).hexdigest(),
}) })
obj = self.env.unversioned_container.file(objs[2]) obj = self.env.unversioned_container.file(objs[2])
obj.write(b'second', hdrs={ obj.write(b'second', hdrs={
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
'ETag': md5(b'second').hexdigest(), 'ETag': md5(b'second', usedforsecurity=False).hexdigest(),
}) })
return objs return objs
@ -1385,21 +1386,21 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v3['name'], 'name': obj1_v3['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish13', 'content_type': 'text/jibberish13',
'hash': md5(b'version3').hexdigest(), 'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v3['id'], 'version_id': obj1_v3['id'],
}, { }, {
'name': obj1_v2['name'], 'name': obj1_v2['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v2['id'], 'version_id': obj1_v2['id'],
}, { }, {
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v1['id'], 'version_id': obj1_v1['id'],
}]) }])
@ -1418,21 +1419,21 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v1['id'], 'version_id': obj1_v1['id'],
}, { }, {
'name': obj1_v2['name'], 'name': obj1_v2['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v2['id'], 'version_id': obj1_v2['id'],
}, { }, {
'name': obj1_v3['name'], 'name': obj1_v3['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish13', 'content_type': 'text/jibberish13',
'hash': md5(b'version3').hexdigest(), 'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v3['id'], 'version_id': obj1_v3['id'],
}, { }, {
@ -1481,21 +1482,21 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v3['name'], 'name': obj1_v3['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish13', 'content_type': 'text/jibberish13',
'hash': md5(b'version3').hexdigest(), 'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v3['id'], 'version_id': obj1_v3['id'],
}, { }, {
'name': obj1_v2['name'], 'name': obj1_v2['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v2['id'], 'version_id': obj1_v2['id'],
}, { }, {
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v1['id'], 'version_id': obj1_v1['id'],
}]) }])
@ -1516,21 +1517,21 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v1['id'], 'version_id': obj1_v1['id'],
}, { }, {
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v2['id'], 'version_id': obj1_v2['id'],
}, { }, {
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish13', 'content_type': 'text/jibberish13',
'hash': md5(b'version3').hexdigest(), 'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v3['id'], 'version_id': obj1_v3['id'],
}, { }, {
@ -1601,7 +1602,7 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v3['name'], 'name': obj1_v3['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish13', 'content_type': 'text/jibberish13',
'hash': md5(b'version3').hexdigest(), 'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v3['id'], 'version_id': obj1_v3['id'],
}]) }])
@ -1623,14 +1624,14 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v2['name'], 'name': obj1_v2['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v2['id'], 'version_id': obj1_v2['id'],
}, { }, {
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v1['id'], 'version_id': obj1_v1['id'],
}]) }])
@ -1690,14 +1691,14 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj1_v2['name'], 'name': obj1_v2['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v2['id'], 'version_id': obj1_v2['id'],
}, { }, {
'name': obj1_v1['name'], 'name': obj1_v1['name'],
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj1_v1['id'], 'version_id': obj1_v1['id'],
}]) }])
@ -2052,7 +2053,7 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj2, 'name': obj2,
'bytes': 5, 'bytes': 5,
'content_type': 'text/blah-blah-blah', 'content_type': 'text/blah-blah-blah',
'hash': md5(b'first').hexdigest(), 'hash': md5(b'first', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': 'null', 'version_id': 'null',
} }
@ -2060,7 +2061,7 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj3, 'name': obj3,
'bytes': 6, 'bytes': 6,
'content_type': 'text/plain', 'content_type': 'text/plain',
'hash': md5(b'second').hexdigest(), 'hash': md5(b'second', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': 'null', 'version_id': 'null',
} }
@ -2112,14 +2113,14 @@ class TestContainerOperations(TestObjectVersioningBase):
# v1 # v1
resp = obj.write(b'version1', hdrs={ resp = obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish11', 'Content-Type': 'text/jibberish11',
'ETag': md5(b'version1').hexdigest(), 'ETag': md5(b'version1', usedforsecurity=False).hexdigest(),
}, return_resp=True) }, return_resp=True)
obj_v1 = resp.getheader('x-object-version-id') obj_v1 = resp.getheader('x-object-version-id')
# v2 # v2
resp = obj.write(b'version2', hdrs={ resp = obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish12', 'Content-Type': 'text/jibberish12',
'ETag': md5(b'version2').hexdigest(), 'ETag': md5(b'version2', usedforsecurity=False).hexdigest(),
}, return_resp=True) }, return_resp=True)
obj_v2 = resp.getheader('x-object-version-id') obj_v2 = resp.getheader('x-object-version-id')
@ -2130,7 +2131,7 @@ class TestContainerOperations(TestObjectVersioningBase):
resp = obj.write(b'version4', hdrs={ resp = obj.write(b'version4', hdrs={
'Content-Type': 'text/jibberish14', 'Content-Type': 'text/jibberish14',
'ETag': md5(b'version4').hexdigest(), 'ETag': md5(b'version4', usedforsecurity=False).hexdigest(),
}, return_resp=True) }, return_resp=True)
obj_v4 = resp.getheader('x-object-version-id') obj_v4 = resp.getheader('x-object-version-id')
@ -2143,7 +2144,7 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish14', 'content_type': 'text/jibberish14',
'hash': md5(b'version4').hexdigest(), 'hash': md5(b'version4', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': obj_v4, 'version_id': obj_v4,
}, { }, {
@ -2157,14 +2158,14 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj_v2, 'version_id': obj_v2,
}, { }, {
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj_v1, 'version_id': obj_v1,
}]) }])
@ -2175,7 +2176,7 @@ class TestContainerOperations(TestObjectVersioningBase):
# v5 - non-versioned # v5 - non-versioned
obj.write(b'version5', hdrs={ obj.write(b'version5', hdrs={
'Content-Type': 'text/jibberish15', 'Content-Type': 'text/jibberish15',
'ETag': md5(b'version5').hexdigest(), 'ETag': md5(b'version5', usedforsecurity=False).hexdigest(),
}) })
listing_parms = {'format': 'json', 'versions': None} listing_parms = {'format': 'json', 'versions': None}
@ -2187,14 +2188,14 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish15', 'content_type': 'text/jibberish15',
'hash': md5(b'version5').hexdigest(), 'hash': md5(b'version5', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': 'null', 'version_id': 'null',
}, { }, {
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish14', 'content_type': 'text/jibberish14',
'hash': md5(b'version4').hexdigest(), 'hash': md5(b'version4', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj_v4, 'version_id': obj_v4,
}, { }, {
@ -2208,14 +2209,14 @@ class TestContainerOperations(TestObjectVersioningBase):
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj_v2, 'version_id': obj_v2,
}, { }, {
'name': obj.name, 'name': obj.name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': False, 'is_latest': False,
'version_id': obj_v1, 'version_id': obj_v1,
}]) }])
@ -2496,19 +2497,19 @@ class TestVersionsLocationWithVersioning(TestObjectVersioningBase):
# v1 # v1
obj.write(b'version1', hdrs={ obj.write(b'version1', hdrs={
'Content-Type': 'text/jibberish11', 'Content-Type': 'text/jibberish11',
'ETag': md5(b'version1').hexdigest(), 'ETag': md5(b'version1', usedforsecurity=False).hexdigest(),
}) })
# v2 # v2
obj.write(b'version2', hdrs={ obj.write(b'version2', hdrs={
'Content-Type': 'text/jibberish12', 'Content-Type': 'text/jibberish12',
'ETag': md5(b'version2').hexdigest(), 'ETag': md5(b'version2', usedforsecurity=False).hexdigest(),
}) })
# v3 # v3
obj.write(b'version3', hdrs={ obj.write(b'version3', hdrs={
'Content-Type': 'text/jibberish13', 'Content-Type': 'text/jibberish13',
'ETag': md5(b'version3').hexdigest(), 'ETag': md5(b'version3', usedforsecurity=False).hexdigest(),
}) })
return obj return obj
@ -2526,7 +2527,7 @@ class TestVersionsLocationWithVersioning(TestObjectVersioningBase):
'name': obj_name, 'name': obj_name,
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish13', 'content_type': 'text/jibberish13',
'hash': md5(b'version3').hexdigest(), 'hash': md5(b'version3', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': 'null' 'version_id': 'null'
}]) }])
@ -2543,13 +2544,13 @@ class TestVersionsLocationWithVersioning(TestObjectVersioningBase):
self.assertEqual(prev_versions, [{ self.assertEqual(prev_versions, [{
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish11', 'content_type': 'text/jibberish11',
'hash': md5(b'version1').hexdigest(), 'hash': md5(b'version1', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': 'null', 'version_id': 'null',
}, { }, {
'bytes': 8, 'bytes': 8,
'content_type': 'text/jibberish12', 'content_type': 'text/jibberish12',
'hash': md5(b'version2').hexdigest(), 'hash': md5(b'version2', usedforsecurity=False).hexdigest(),
'is_latest': True, 'is_latest': True,
'version_id': 'null' 'version_id': 'null'
}]) }])

View File

@ -16,7 +16,6 @@
import base64 import base64
import email.parser import email.parser
import hashlib
import itertools import itertools
import json import json
from copy import deepcopy from copy import deepcopy
@ -24,6 +23,7 @@ from copy import deepcopy
import six import six
from swift.common.swob import normalize_etag from swift.common.swob import normalize_etag
from swift.common.utils import md5
import test.functional as tf import test.functional as tf
from test.functional import cluster_info, SkipTest from test.functional import cluster_info, SkipTest
@ -124,9 +124,9 @@ class TestSloEnv(BaseEnv):
cd_json = json.dumps([ cd_json = json.dumps([
seg_info['seg_c'], seg_info['seg_d']]).encode('ascii') seg_info['seg_c'], seg_info['seg_d']]).encode('ascii')
file_item.write(cd_json, parms={'multipart-manifest': 'put'}) file_item.write(cd_json, parms={'multipart-manifest': 'put'})
cd_etag = hashlib.md5(( cd_etag = md5((
seg_info['seg_c']['etag'] + seg_info['seg_d']['etag'] seg_info['seg_c']['etag'] + seg_info['seg_d']['etag']
).encode('ascii')).hexdigest() ).encode('ascii'), usedforsecurity=False).hexdigest()
file_item = cls.container.file("manifest-bcd-submanifest") file_item = cls.container.file("manifest-bcd-submanifest")
file_item.write( file_item.write(
@ -137,8 +137,9 @@ class TestSloEnv(BaseEnv):
'path': '/%s/%s' % (cls.container.name, 'path': '/%s/%s' % (cls.container.name,
'manifest-cd')}]).encode('ascii'), 'manifest-cd')}]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
bcd_submanifest_etag = hashlib.md5(( bcd_submanifest_etag = md5((
seg_info['seg_b']['etag'] + cd_etag).encode('ascii')).hexdigest() seg_info['seg_b']['etag'] + cd_etag).encode('ascii'),
usedforsecurity=False).hexdigest()
file_item = cls.container.file("manifest-abcde-submanifest") file_item = cls.container.file("manifest-abcde-submanifest")
file_item.write( file_item.write(
@ -152,9 +153,10 @@ class TestSloEnv(BaseEnv):
'manifest-bcd-submanifest')}, 'manifest-bcd-submanifest')},
seg_info['seg_e']]).encode('ascii'), seg_info['seg_e']]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
abcde_submanifest_etag = hashlib.md5(( abcde_submanifest_etag = md5((
seg_info['seg_a']['etag'] + bcd_submanifest_etag + seg_info['seg_a']['etag'] + bcd_submanifest_etag +
seg_info['seg_e']['etag']).encode('ascii')).hexdigest() seg_info['seg_e']['etag']).encode('ascii'),
usedforsecurity=False).hexdigest()
abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] + abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] +
seg_info['seg_b']['size_bytes'] + seg_info['seg_b']['size_bytes'] +
seg_info['seg_c']['size_bytes'] + seg_info['seg_c']['size_bytes'] +
@ -180,11 +182,11 @@ class TestSloEnv(BaseEnv):
'manifest-abcde-submanifest'), 'manifest-abcde-submanifest'),
'range': '3145727-3145728'}]).encode('ascii'), # 'cd' 'range': '3145727-3145728'}]).encode('ascii'), # 'cd'
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
ranged_manifest_etag = hashlib.md5(( ranged_manifest_etag = md5((
abcde_submanifest_etag + ':3145727-4194304;' + abcde_submanifest_etag + ':3145727-4194304;' +
abcde_submanifest_etag + ':524288-1572863;' + abcde_submanifest_etag + ':524288-1572863;' +
abcde_submanifest_etag + ':3145727-3145728;' abcde_submanifest_etag + ':3145727-3145728;'
).encode('ascii')).hexdigest() ).encode('ascii'), usedforsecurity=False).hexdigest()
ranged_manifest_size = 2 * 1024 * 1024 + 4 ranged_manifest_size = 2 * 1024 * 1024 + 4
file_item = cls.container.file("ranged-submanifest") file_item = cls.container.file("ranged-submanifest")
@ -263,12 +265,13 @@ class TestSlo(Base):
"Expected slo_enabled to be True/False, got %r" % "Expected slo_enabled to be True/False, got %r" %
(self.env.slo_enabled,)) (self.env.slo_enabled,))
manifest_abcde_hash = hashlib.md5() manifest_abcde_hash = md5(usedforsecurity=False)
for letter in (b'a', b'b', b'c', b'd'): for letter in (b'a', b'b', b'c', b'd'):
manifest_abcde_hash.update(hashlib.md5( manifest_abcde_hash.update(
letter * 1024 * 1024).hexdigest().encode('ascii')) md5(letter * 1024 * 1024, usedforsecurity=False)
manifest_abcde_hash.update(hashlib.md5( .hexdigest().encode('ascii'))
b'e').hexdigest().encode('ascii')) manifest_abcde_hash.update(
md5(b'e', usedforsecurity=False).hexdigest().encode('ascii'))
self.manifest_abcde_etag = manifest_abcde_hash.hexdigest() self.manifest_abcde_etag = manifest_abcde_hash.hexdigest()
def test_slo_get_simple_manifest(self): def test_slo_get_simple_manifest(self):
@ -501,7 +504,7 @@ class TestSlo(Base):
def test_slo_etag_is_quote_wrapped_hash_of_etags_submanifests(self): def test_slo_etag_is_quote_wrapped_hash_of_etags_submanifests(self):
def hd(x): def hd(x):
return hashlib.md5(x).hexdigest().encode('ascii') return md5(x, usedforsecurity=False).hexdigest().encode('ascii')
expected_etag = hd(hd(b'a' * 1024 * 1024) + expected_etag = hd(hd(b'a' * 1024 * 1024) +
hd(hd(b'b' * 1024 * 1024) + hd(hd(b'b' * 1024 * 1024) +
@ -534,7 +537,9 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024 - 1, 'size_bytes': 1024 * 1024 - 1,
'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'etag': md5(
b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'), }]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
@ -549,7 +554,8 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'), }]).encode('ascii'),
parms={'multipart-manifest': 'put'}, parms={'multipart-manifest': 'put'},
@ -559,8 +565,8 @@ class TestSlo(Base):
def test_slo_client_etag(self): def test_slo_client_etag(self):
file_item = self.env.container.file("manifest-a-b-etag") file_item = self.env.container.file("manifest-a-b-etag")
etag_a = hashlib.md5(b'a' * 1024 * 1024).hexdigest() etag_a = md5(b'a' * 1024 * 1024, usedforsecurity=False).hexdigest()
etag_b = hashlib.md5(b'b' * 1024 * 1024).hexdigest() etag_b = md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest()
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
@ -571,7 +577,8 @@ class TestSlo(Base):
'path': '/%s/%s' % (self.env.container.name, 'seg_b'), 'path': '/%s/%s' % (self.env.container.name, 'seg_b'),
}]).encode('ascii'), }]).encode('ascii'),
parms={'multipart-manifest': 'put'}, parms={'multipart-manifest': 'put'},
hdrs={'Etag': hashlib.md5((etag_a + etag_b).encode()).hexdigest()}) hdrs={'Etag': md5((etag_a + etag_b).encode(),
usedforsecurity=False).hexdigest()})
self.assert_status(201) self.assert_status(201)
def test_slo_unspecified_etag(self): def test_slo_unspecified_etag(self):
@ -590,7 +597,8 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'size_bytes': None, 'size_bytes': None,
'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'), }]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
@ -622,7 +630,8 @@ class TestSlo(Base):
file_item = self.env.container.file("manifest-a-missing-size") file_item = self.env.container.file("manifest-a-missing-size")
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a'), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'), }]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
@ -642,7 +651,8 @@ class TestSlo(Base):
try: try:
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'teag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'teag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'size_bytes': 1024 * 1024, 'size_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a'), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'), }]).encode('ascii'),
@ -657,7 +667,8 @@ class TestSlo(Base):
try: try:
file_item.write( file_item.write(
json.dumps([{ json.dumps([{
'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'siz_bytes': 1024 * 1024, 'siz_bytes': 1024 * 1024,
'path': '/%s/%s' % (self.env.container.name, 'seg_a'), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'),
}]).encode('ascii'), }]).encode('ascii'),
@ -673,13 +684,16 @@ class TestSlo(Base):
file_item.write( file_item.write(
json.dumps([ json.dumps([
{'size_bytes': 1024 * 1024, {'size_bytes': 1024 * 1024,
'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), 'etag': md5(b'a' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_a')}, 'path': '/%s/%s' % (self.env.container.name, 'seg_a')},
{'size_bytes': 1024 * 1024, {'size_bytes': 1024 * 1024,
'etag': hashlib.md5(b'b' * 1024 * 1024).hexdigest(), 'etag': md5(b'b' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_b')}, 'path': '/%s/%s' % (self.env.container.name, 'seg_b')},
{'size_bytes': 1024 * 1024, {'size_bytes': 1024 * 1024,
'etag': hashlib.md5(b'c' * 1024 * 1024).hexdigest(), 'etag': md5(b'c' * 1024 * 1024,
usedforsecurity=False).hexdigest(),
'path': '/%s/%s' % (self.env.container.name, 'seg_c')}, 'path': '/%s/%s' % (self.env.container.name, 'seg_c')},
]).encode('ascii'), ]).encode('ascii'),
parms={'multipart-manifest': 'put'}) parms={'multipart-manifest': 'put'})
@ -722,7 +736,7 @@ class TestSlo(Base):
source.initialize(parms={'multipart-manifest': 'get'}) source.initialize(parms={'multipart-manifest': 'get'})
source_contents = source.read(parms={'multipart-manifest': 'get'}) source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents) source_json = json.loads(source_contents)
manifest_etag = hashlib.md5(source_contents).hexdigest() manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest()
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'): if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
manifest_etag = '"%s"' % manifest_etag manifest_etag = '"%s"' % manifest_etag
self.assertEqual(manifest_etag, source.etag) self.assertEqual(manifest_etag, source.etag)
@ -798,7 +812,7 @@ class TestSlo(Base):
source.initialize(parms={'multipart-manifest': 'get'}) source.initialize(parms={'multipart-manifest': 'get'})
source_contents = source.read(parms={'multipart-manifest': 'get'}) source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents) source_json = json.loads(source_contents)
manifest_etag = hashlib.md5(source_contents).hexdigest() manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest()
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'): if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
manifest_etag = '"%s"' % manifest_etag manifest_etag = '"%s"' % manifest_etag
self.assertEqual(manifest_etag, source.etag) self.assertEqual(manifest_etag, source.etag)
@ -1091,16 +1105,18 @@ class TestSlo(Base):
self.assertEqual(len(value), 2) self.assertEqual(len(value), 2)
self.assertEqual(value[0]['bytes'], 1024 * 1024) self.assertEqual(value[0]['bytes'], 1024 * 1024)
self.assertEqual(value[0]['hash'], self.assertEqual(
hashlib.md5(b'd' * 1024 * 1024).hexdigest()) value[0]['hash'],
md5(b'd' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_d' % self.env.container.name expected_name = '/%s/seg_d' % self.env.container.name
if six.PY2: if six.PY2:
expected_name = expected_name.decode("utf-8") expected_name = expected_name.decode("utf-8")
self.assertEqual(value[0]['name'], expected_name) self.assertEqual(value[0]['name'], expected_name)
self.assertEqual(value[1]['bytes'], 1024 * 1024) self.assertEqual(value[1]['bytes'], 1024 * 1024)
self.assertEqual(value[1]['hash'], self.assertEqual(
hashlib.md5(b'b' * 1024 * 1024).hexdigest()) value[1]['hash'],
md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_b' % self.env.container.name expected_name = '/%s/seg_b' % self.env.container.name
if six.PY2: if six.PY2:
expected_name = expected_name.decode("utf-8") expected_name = expected_name.decode("utf-8")
@ -1110,7 +1126,8 @@ class TestSlo(Base):
manifest = self.env.container.file("manifest-db") manifest = self.env.container.file("manifest-db")
got_body = manifest.read(parms={'multipart-manifest': 'get', got_body = manifest.read(parms={'multipart-manifest': 'get',
'format': 'raw'}) 'format': 'raw'})
self.assert_etag(hashlib.md5(got_body).hexdigest()) self.assert_etag(
md5(got_body, usedforsecurity=False).hexdigest())
# raw format should have the actual manifest object content-type # raw format should have the actual manifest object content-type
self.assertEqual('application/octet-stream', manifest.content_type) self.assertEqual('application/octet-stream', manifest.content_type)
@ -1124,15 +1141,17 @@ class TestSlo(Base):
set(value[0].keys()), set(('size_bytes', 'etag', 'path'))) set(value[0].keys()), set(('size_bytes', 'etag', 'path')))
self.assertEqual(len(value), 2) self.assertEqual(len(value), 2)
self.assertEqual(value[0]['size_bytes'], 1024 * 1024) self.assertEqual(value[0]['size_bytes'], 1024 * 1024)
self.assertEqual(value[0]['etag'], self.assertEqual(
hashlib.md5(b'd' * 1024 * 1024).hexdigest()) value[0]['etag'],
md5(b'd' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_d' % self.env.container.name expected_name = '/%s/seg_d' % self.env.container.name
if six.PY2: if six.PY2:
expected_name = expected_name.decode("utf-8") expected_name = expected_name.decode("utf-8")
self.assertEqual(value[0]['path'], expected_name) self.assertEqual(value[0]['path'], expected_name)
self.assertEqual(value[1]['size_bytes'], 1024 * 1024) self.assertEqual(value[1]['size_bytes'], 1024 * 1024)
self.assertEqual(value[1]['etag'], self.assertEqual(
hashlib.md5(b'b' * 1024 * 1024).hexdigest()) value[1]['etag'],
md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest())
expected_name = '/%s/seg_b' % self.env.container.name expected_name = '/%s/seg_b' % self.env.container.name
if six.PY2: if six.PY2:
expected_name = expected_name.decode("utf-8") expected_name = expected_name.decode("utf-8")

View File

@ -26,7 +26,7 @@ from uuid import uuid4
from swift.common.http import is_success from swift.common.http import is_success
from swift.common.swob import normalize_etag from swift.common.swob import normalize_etag
from swift.common.utils import json, MD5_OF_EMPTY_STRING from swift.common.utils import json, MD5_OF_EMPTY_STRING, md5
from swift.common.middleware.slo import SloGetContext from swift.common.middleware.slo import SloGetContext
from test.functional import check_response, retry, requires_acls, \ from test.functional import check_response, retry, requires_acls, \
cluster_info, SkipTest cluster_info, SkipTest
@ -1798,11 +1798,13 @@ class TestSymlinkToSloSegments(Base):
self.fail('Failed to find manifest file in container listing') self.fail('Failed to find manifest file in container listing')
def test_slo_etag_is_hash_of_etags(self): def test_slo_etag_is_hash_of_etags(self):
expected_hash = hashlib.md5() expected_hash = md5(usedforsecurity=False)
expected_hash.update(hashlib.md5( expected_hash.update((
b'a' * 1024 * 1024).hexdigest().encode('ascii')) md5(b'a' * 1024 * 1024, usedforsecurity=False)
expected_hash.update(hashlib.md5( .hexdigest().encode('ascii')))
b'b' * 1024 * 1024).hexdigest().encode('ascii')) expected_hash.update((
md5(b'b' * 1024 * 1024, usedforsecurity=False)
.hexdigest().encode('ascii')))
expected_etag = expected_hash.hexdigest() expected_etag = expected_hash.hexdigest()
file_item = self.env.container.file('manifest-linkto-ab') file_item = self.env.container.file('manifest-linkto-ab')
@ -1823,7 +1825,7 @@ class TestSymlinkToSloSegments(Base):
source = self.env.container.file("manifest-linkto-ab") source = self.env.container.file("manifest-linkto-ab")
source_contents = source.read(parms={'multipart-manifest': 'get'}) source_contents = source.read(parms={'multipart-manifest': 'get'})
source_json = json.loads(source_contents) source_json = json.loads(source_contents)
manifest_etag = hashlib.md5(source_contents).hexdigest() manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest()
if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'): if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'):
manifest_etag = '"%s"' % manifest_etag manifest_etag = '"%s"' % manifest_etag

View File

@ -15,7 +15,6 @@
# limitations under the License. # limitations under the License.
from datetime import datetime from datetime import datetime
import hashlib
import io import io
import locale import locale
import random import random
@ -28,6 +27,7 @@ from copy import deepcopy
import eventlet import eventlet
from swift.common.http import is_success, is_client_error from swift.common.http import is_success, is_client_error
from swift.common.swob import normalize_etag from swift.common.swob import normalize_etag
from swift.common.utils import md5
from email.utils import parsedate from email.utils import parsedate
if six.PY2: if six.PY2:
@ -1371,7 +1371,8 @@ class TestFile(Base):
'x-object-meta-fruit': 'Banana', 'x-object-meta-fruit': 'Banana',
'accept-ranges': 'bytes', 'accept-ranges': 'bytes',
'content-type': 'application/test', 'content-type': 'application/test',
'etag': hashlib.md5(obj_data).hexdigest(), 'etag': md5(
obj_data, usedforsecurity=False).hexdigest(),
'last-modified': mock.ANY, 'last-modified': mock.ANY,
'date': mock.ANY, 'date': mock.ANY,
'x-delete-at': mock.ANY, 'x-delete-at': mock.ANY,
@ -1538,7 +1539,7 @@ class TestFile(Base):
self.assertTrue(dest_cont.create()) self.assertTrue(dest_cont.create())
expected_body = data[100:201] expected_body = data[100:201]
expected_etag = hashlib.md5(expected_body) expected_etag = md5(expected_body, usedforsecurity=False)
# copy both from within and across containers # copy both from within and across containers
for cont in (self.env.container, dest_cont): for cont in (self.env.container, dest_cont):
# copy both with and without initial slash # copy both with and without initial slash

View File

@ -24,7 +24,6 @@ from textwrap import dedent
from time import sleep, time from time import sleep, time
from collections import defaultdict from collections import defaultdict
import unittest import unittest
from hashlib import md5
from uuid import uuid4 from uuid import uuid4
import shutil import shutil
from six.moves.http_client import HTTPConnection from six.moves.http_client import HTTPConnection
@ -34,7 +33,8 @@ from swiftclient import get_auth, head_account, client
from swift.common import internal_client, direct_client from swift.common import internal_client, direct_client
from swift.common.direct_client import DirectClientException from swift.common.direct_client import DirectClientException
from swift.common.ring import Ring from swift.common.ring import Ring
from swift.common.utils import readconf, renamer, rsync_module_interpolation from swift.common.utils import readconf, renamer, \
rsync_module_interpolation, md5
from swift.common.manager import Manager from swift.common.manager import Manager
from swift.common.storage_policy import POLICIES, EC_POLICY, REPL_POLICY from swift.common.storage_policy import POLICIES, EC_POLICY, REPL_POLICY
from swift.obj.diskfile import get_data_dir from swift.obj.diskfile import get_data_dir
@ -297,7 +297,7 @@ class Body(object):
def __init__(self, total=3.5 * 2 ** 20): def __init__(self, total=3.5 * 2 ** 20):
self.length = int(total) self.length = int(total)
self.hasher = md5() self.hasher = md5(usedforsecurity=False)
self.read_amount = 0 self.read_amount = 0
self.chunk = uuid4().hex.encode('ascii') * 2 ** 10 self.chunk = uuid4().hex.encode('ascii') * 2 ** 10
self.buff = b'' self.buff = b''

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from hashlib import md5
import time import time
import uuid import uuid
import random import random
@ -23,6 +22,7 @@ from swift.common.internal_client import InternalClient
from swift.common import utils, direct_client from swift.common import utils, direct_client
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from swift.common.http import HTTP_NOT_FOUND from swift.common.http import HTTP_NOT_FOUND
from swift.common.utils import md5
from swift.container.reconciler import MISPLACED_OBJECTS_ACCOUNT from swift.container.reconciler import MISPLACED_OBJECTS_ACCOUNT
from test.probe.brain import BrainSplitter, InternalBrainSplitter from test.probe.brain import BrainSplitter, InternalBrainSplitter
from swift.common.request_helpers import get_reserved_name from swift.common.request_helpers import get_reserved_name
@ -266,7 +266,7 @@ class TestContainerMergePolicyIndex(ReplProbeTest):
part_name = self.get_object_name('manifest_part_%0.2d' % i) part_name = self.get_object_name('manifest_part_%0.2d' % i)
manifest_entry = { manifest_entry = {
"path": "/%s/%s" % (self.container_name, part_name), "path": "/%s/%s" % (self.container_name, part_name),
"etag": md5(body).hexdigest(), "etag": md5(body, usedforsecurity=False).hexdigest(),
"size_bytes": len(body), "size_bytes": len(body),
} }
self.brain.client.put_object(self.container_name, part_name, {}, self.brain.client.put_object(self.container_name, part_name, {},

View File

@ -19,7 +19,6 @@ from __future__ import print_function
from unittest import main from unittest import main
from uuid import uuid4 from uuid import uuid4
import random import random
from hashlib import md5
from collections import defaultdict from collections import defaultdict
import os import os
import socket import socket
@ -30,6 +29,7 @@ from swiftclient import client
from swift.common import direct_client from swift.common import direct_client
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.manager import Manager from swift.common.manager import Manager
from swift.common.utils import md5
from test.probe.common import (kill_server, start_server, ReplProbeTest, from test.probe.common import (kill_server, start_server, ReplProbeTest,
ECProbeTest, Body) ECProbeTest, Body)
@ -373,7 +373,7 @@ class TestECObjectHandoff(ECProbeTest):
container_name, container_name,
object_name, object_name,
resp_chunk_size=64 * 2 ** 10) resp_chunk_size=64 * 2 ** 10)
resp_checksum = md5() resp_checksum = md5(usedforsecurity=False)
for chunk in body: for chunk in body:
resp_checksum.update(chunk) resp_checksum.update(chunk)
return resp_checksum.hexdigest() return resp_checksum.hexdigest()

View File

@ -17,7 +17,6 @@
import errno import errno
import json import json
from contextlib import contextmanager from contextlib import contextmanager
from hashlib import md5
import unittest import unittest
import uuid import uuid
import shutil import shutil
@ -27,6 +26,7 @@ import time
import six import six
from swift.common.direct_client import DirectClientException from swift.common.direct_client import DirectClientException
from swift.common.utils import md5
from test.probe.common import ECProbeTest from test.probe.common import ECProbeTest
from swift.common import direct_client from swift.common import direct_client
@ -40,7 +40,7 @@ class Body(object):
def __init__(self, total=3.5 * 2 ** 20): def __init__(self, total=3.5 * 2 ** 20):
self.total = int(total) self.total = int(total)
self.hasher = md5() self.hasher = md5(usedforsecurity=False)
self.size = 0 self.size = 0
self.chunk = b'test' * 16 * 2 ** 10 self.chunk = b'test' * 16 * 2 ** 10
@ -118,7 +118,7 @@ class TestReconstructorRebuild(ECProbeTest):
self.container_name, self.container_name,
self.object_name, self.object_name,
resp_chunk_size=64 * 2 ** 10) resp_chunk_size=64 * 2 ** 10)
resp_checksum = md5() resp_checksum = md5(usedforsecurity=False)
for chunk in body: for chunk in body:
resp_checksum.update(chunk) resp_checksum.update(chunk)
return headers, resp_checksum.hexdigest() return headers, resp_checksum.hexdigest()
@ -140,7 +140,7 @@ class TestReconstructorRebuild(ECProbeTest):
headers, data = direct_client.direct_get_object( headers, data = direct_client.direct_get_object(
node, part, acc, con, obj, headers=req_headers, node, part, acc, con, obj, headers=req_headers,
resp_chunk_size=64 * 2 ** 20) resp_chunk_size=64 * 2 ** 20)
hasher = md5() hasher = md5(usedforsecurity=False)
for chunk in data: for chunk in data:
hasher.update(chunk) hasher.update(chunk)
return headers, hasher.hexdigest() return headers, hasher.hexdigest()

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from hashlib import md5
import itertools import itertools
import unittest import unittest
import uuid import uuid
@ -27,6 +26,7 @@ from test.probe.common import ECProbeTest, Body
from swift.common import direct_client from swift.common import direct_client
from swift.common.storage_policy import EC_POLICY from swift.common.storage_policy import EC_POLICY
from swift.common.manager import Manager from swift.common.manager import Manager
from swift.common.utils import md5
from swift.obj import reconstructor from swift.obj import reconstructor
from swiftclient import client from swiftclient import client
@ -49,7 +49,7 @@ class TestReconstructorRevert(ECProbeTest):
self.container_name, self.container_name,
self.object_name, self.object_name,
resp_chunk_size=64 * 2 ** 10) resp_chunk_size=64 * 2 ** 10)
resp_checksum = md5() resp_checksum = md5(usedforsecurity=False)
for chunk in body: for chunk in body:
resp_checksum.update(chunk) resp_checksum.update(chunk)
return resp_checksum.hexdigest() return resp_checksum.hexdigest()
@ -60,7 +60,7 @@ class TestReconstructorRevert(ECProbeTest):
node, part, self.account, self.container_name, node, part, self.account, self.container_name,
self.object_name, headers=req_headers, self.object_name, headers=req_headers,
resp_chunk_size=64 * 2 ** 20) resp_chunk_size=64 * 2 ** 20)
hasher = md5() hasher = md5(usedforsecurity=False)
for chunk in data: for chunk in data:
hasher.update(chunk) hasher.update(chunk)
return hasher.hexdigest() return hasher.hexdigest()

View File

@ -12,7 +12,6 @@
# implied. # implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import hashlib
import json import json
import os import os
import shutil import shutil
@ -27,7 +26,7 @@ from swift.common import direct_client, utils
from swift.common.manager import Manager from swift.common.manager import Manager
from swift.common.memcached import MemcacheRing from swift.common.memcached import MemcacheRing
from swift.common.utils import ShardRange, parse_db_filename, get_db_files, \ from swift.common.utils import ShardRange, parse_db_filename, get_db_files, \
quorum_size, config_true_value, Timestamp quorum_size, config_true_value, Timestamp, md5
from swift.container.backend import ContainerBroker, UNSHARDED, SHARDING from swift.container.backend import ContainerBroker, UNSHARDED, SHARDING
from swift.container.sharder import CleavingContext from swift.container.sharder import CleavingContext
from swiftclient import client, get_auth, ClientException from swiftclient import client, get_auth, ClientException
@ -2082,7 +2081,8 @@ class TestContainerSharding(BaseTestContainerSharding):
shard_broker.merge_items( shard_broker.merge_items(
[{'name': name, 'created_at': Timestamp.now().internal, [{'name': name, 'created_at': Timestamp.now().internal,
'size': 0, 'content_type': 'text/plain', 'size': 0, 'content_type': 'text/plain',
'etag': hashlib.md5().hexdigest(), 'deleted': deleted, 'etag': md5(usedforsecurity=False).hexdigest(),
'deleted': deleted,
'storage_policy_index': shard_broker.storage_policy_index}]) 'storage_policy_index': shard_broker.storage_policy_index}])
return shard_nodes[0] return shard_nodes[0]

View File

@ -14,13 +14,13 @@
# limitations under the License. # limitations under the License.
import time import time
import hashlib
from collections import defaultdict from collections import defaultdict
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import six import six
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import md5
from test.s3api import BaseS3TestCase from test.s3api import BaseS3TestCase
@ -123,7 +123,7 @@ class TestObjectVersioning(BaseS3TestCase):
def test_upload_fileobj_versioned(self): def test_upload_fileobj_versioned(self):
obj_data = self.create_name('some-data').encode('ascii') obj_data = self.create_name('some-data').encode('ascii')
obj_etag = hashlib.md5(obj_data).hexdigest() obj_etag = md5(obj_data, usedforsecurity=False).hexdigest()
obj_name = self.create_name('versioned-obj') obj_name = self.create_name('versioned-obj')
self.client.upload_fileobj(six.BytesIO(obj_data), self.client.upload_fileobj(six.BytesIO(obj_data),
self.bucket_name, obj_name) self.bucket_name, obj_name)
@ -157,7 +157,7 @@ class TestObjectVersioning(BaseS3TestCase):
# overwrite the object # overwrite the object
new_obj_data = self.create_name('some-new-data').encode('ascii') new_obj_data = self.create_name('some-new-data').encode('ascii')
new_obj_etag = hashlib.md5(new_obj_data).hexdigest() new_obj_etag = md5(new_obj_data, usedforsecurity=False).hexdigest()
self.client.upload_fileobj(six.BytesIO(new_obj_data), self.client.upload_fileobj(six.BytesIO(new_obj_data),
self.bucket_name, obj_name) self.bucket_name, obj_name)
@ -199,7 +199,7 @@ class TestObjectVersioning(BaseS3TestCase):
obj_name = self.create_name('versioned-obj') obj_name = self.create_name('versioned-obj')
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
etags.insert(0, hashlib.md5(obj_data).hexdigest()) etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj(six.BytesIO(obj_data), self.client.upload_fileobj(six.BytesIO(obj_data),
self.bucket_name, obj_name) self.bucket_name, obj_name)
@ -319,7 +319,7 @@ class TestObjectVersioning(BaseS3TestCase):
obj_name = self.create_name('versioned-obj') obj_name = self.create_name('versioned-obj')
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
etags.insert(0, hashlib.md5(obj_data).hexdigest()) etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj(six.BytesIO(obj_data), self.client.upload_fileobj(six.BytesIO(obj_data),
self.bucket_name, obj_name) self.bucket_name, obj_name)
# and make a delete marker # and make a delete marker
@ -490,7 +490,7 @@ class TestObjectVersioning(BaseS3TestCase):
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
# TODO: pull etag from response instead # TODO: pull etag from response instead
etags.insert(0, hashlib.md5(obj_data).hexdigest()) etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj( self.client.upload_fileobj(
six.BytesIO(obj_data), self.bucket_name, obj_name) six.BytesIO(obj_data), self.bucket_name, obj_name)
@ -571,12 +571,14 @@ class TestObjectVersioning(BaseS3TestCase):
etags = [] etags = []
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
etags.insert(0, '"%s"' % hashlib.md5(obj_data).hexdigest()) etags.insert(0, '"%s"' % md5(
obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj( self.client.upload_fileobj(
six.BytesIO(obj_data), self.bucket_name, obj01_name) six.BytesIO(obj_data), self.bucket_name, obj01_name)
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
etags.insert(0, '"%s"' % hashlib.md5(obj_data).hexdigest()) etags.insert(0, '"%s"' % md5(
obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj( self.client.upload_fileobj(
six.BytesIO(obj_data), self.bucket_name, obj00_name) six.BytesIO(obj_data), self.bucket_name, obj00_name)
resp = self.client.list_object_versions(Bucket=self.bucket_name) resp = self.client.list_object_versions(Bucket=self.bucket_name)
@ -653,7 +655,8 @@ class TestObjectVersioning(BaseS3TestCase):
obj_name = self.create_name('versioned-obj') obj_name = self.create_name('versioned-obj')
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
etags[obj_name].insert(0, hashlib.md5(obj_data).hexdigest()) etags[obj_name].insert(0, md5(
obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj( self.client.upload_fileobj(
six.BytesIO(obj_data), self.bucket_name, obj_name) six.BytesIO(obj_data), self.bucket_name, obj_name)
@ -708,7 +711,8 @@ class TestObjectVersioning(BaseS3TestCase):
obj_name = self.create_name('versioned-obj') obj_name = self.create_name('versioned-obj')
for i in range(3): for i in range(3):
obj_data = self.create_name('some-data-%s' % i).encode('ascii') obj_data = self.create_name('some-data-%s' % i).encode('ascii')
etags.insert(0, hashlib.md5(obj_data).hexdigest()) etags.insert(0, md5(
obj_data, usedforsecurity=False).hexdigest())
self.client.upload_fileobj( self.client.upload_fileobj(
six.BytesIO(obj_data), self.bucket_name, obj_name) six.BytesIO(obj_data), self.bucket_name, obj_name)

View File

@ -23,7 +23,6 @@ import logging.handlers
import sys import sys
from contextlib import contextmanager, closing from contextlib import contextmanager, closing
from collections import defaultdict, Iterable from collections import defaultdict, Iterable
from hashlib import md5
import itertools import itertools
from numbers import Number from numbers import Number
from tempfile import NamedTemporaryFile from tempfile import NamedTemporaryFile
@ -48,7 +47,7 @@ from six.moves.http_client import HTTPException
from swift.common import storage_policy, swob, utils from swift.common import storage_policy, swob, utils
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy, from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
VALID_EC_TYPES) VALID_EC_TYPES)
from swift.common.utils import Timestamp, NOTICE from swift.common.utils import Timestamp, NOTICE, md5
from test import get_config from test import get_config
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.ring import Ring, RingData, RingBuilder from swift.common.ring import Ring, RingData, RingBuilder
@ -65,7 +64,7 @@ class SkipTest(unittest.SkipTest):
pass pass
EMPTY_ETAG = md5().hexdigest() EMPTY_ETAG = md5(usedforsecurity=False).hexdigest()
# try not to import this module from swift # try not to import this module from swift
if not os.path.basename(sys.argv[0]).startswith('swift'): if not os.path.basename(sys.argv[0]).startswith('swift'):
@ -970,7 +969,8 @@ def fake_http_connect(*code_iter, **kwargs):
etag = self.etag etag = self.etag
if not etag: if not etag:
if isinstance(self.body, bytes): if isinstance(self.body, bytes):
etag = '"' + md5(self.body).hexdigest() + '"' etag = ('"' + md5(
self.body, usedforsecurity=False).hexdigest() + '"')
else: else:
etag = '"68b329da9893e34099c7d8ad5cb9c940"' etag = '"68b329da9893e34099c7d8ad5cb9c940"'
@ -1262,7 +1262,7 @@ def make_ec_object_stub(test_body, policy, timestamp):
test_body = test_body or ( test_body = test_body or (
b'test' * segment_size)[:-random.randint(1, 1000)] b'test' * segment_size)[:-random.randint(1, 1000)]
timestamp = timestamp or utils.Timestamp.now() timestamp = timestamp or utils.Timestamp.now()
etag = md5(test_body).hexdigest() etag = md5(test_body, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(policy, test_body) ec_archive_bodies = encode_frag_archive_bodies(policy, test_body)
return { return {

View File

@ -16,7 +16,6 @@
""" Tests for swift.account.backend """ """ Tests for swift.account.backend """
from collections import defaultdict from collections import defaultdict
import hashlib
import json import json
import unittest import unittest
import pickle import pickle
@ -40,6 +39,7 @@ from test.unit import patch_policies, with_tempdir, make_timestamp_iter
from swift.common.db import DatabaseConnectionError from swift.common.db import DatabaseConnectionError
from swift.common.request_helpers import get_reserved_name from swift.common.request_helpers import get_reserved_name
from swift.common.storage_policy import StoragePolicy, POLICIES from swift.common.storage_policy import StoragePolicy, POLICIES
from swift.common.utils import md5
from test.unit.common import test_db from test.unit.common import test_db
@ -821,10 +821,10 @@ class TestAccountBroker(unittest.TestCase):
POLICIES.default.idx) POLICIES.default.idx)
text = '%s-%s' % ('a', "%s-%s-%s-%s" % ( text = '%s-%s' % ('a', "%s-%s-%s-%s" % (
Timestamp(1).internal, Timestamp(0).internal, 0, 0)) Timestamp(1).internal, Timestamp(0).internal, 0, 0))
hasha = hashlib.md5(text.encode('ascii')).digest() hasha = md5(text.encode('ascii'), usedforsecurity=False).digest()
text = '%s-%s' % ('b', "%s-%s-%s-%s" % ( text = '%s-%s' % ('b', "%s-%s-%s-%s" % (
Timestamp(2).internal, Timestamp(0).internal, 0, 0)) Timestamp(2).internal, Timestamp(0).internal, 0, 0))
hashb = hashlib.md5(text.encode('ascii')).digest() hashb = md5(text.encode('ascii'), usedforsecurity=False).digest()
hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b) hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b)
for a, b in zip(hasha, hashb))) for a, b in zip(hasha, hashb)))
self.assertEqual(broker.get_info()['hash'], hashc) self.assertEqual(broker.get_info()['hash'], hashc)
@ -833,7 +833,7 @@ class TestAccountBroker(unittest.TestCase):
POLICIES.default.idx) POLICIES.default.idx)
text = '%s-%s' % ('b', "%s-%s-%s-%s" % ( text = '%s-%s' % ('b', "%s-%s-%s-%s" % (
Timestamp(3).internal, Timestamp(0).internal, 0, 0)) Timestamp(3).internal, Timestamp(0).internal, 0, 0))
hashb = hashlib.md5(text.encode('ascii')).digest() hashb = md5(text.encode('ascii'), usedforsecurity=False).digest()
hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b) hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b)
for a, b in zip(hasha, hashb))) for a, b in zip(hasha, hashb)))
self.assertEqual(broker.get_info()['hash'], hashc) self.assertEqual(broker.get_info()['hash'], hashc)

View File

@ -13,10 +13,10 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import base64 import base64
import hashlib
from swift.common.exceptions import UnknownSecretIdError from swift.common.exceptions import UnknownSecretIdError
from swift.common.middleware.crypto.crypto_utils import Crypto from swift.common.middleware.crypto.crypto_utils import Crypto
from swift.common.utils import md5
def fetch_crypto_keys(key_id=None): def fetch_crypto_keys(key_id=None):
@ -41,7 +41,7 @@ def fetch_crypto_keys(key_id=None):
def md5hex(s): def md5hex(s):
return hashlib.md5(s).hexdigest() return md5(s, usedforsecurity=False).hexdigest()
def encrypt(val, key=None, iv=None, ctxt=None): def encrypt(val, key=None, iv=None, ctxt=None):

View File

@ -16,14 +16,13 @@
# This stuff can't live in test/unit/__init__.py due to its swob dependency. # This stuff can't live in test/unit/__init__.py due to its swob dependency.
from collections import defaultdict, namedtuple from collections import defaultdict, namedtuple
from hashlib import md5
from six.moves.urllib import parse from six.moves.urllib import parse
from swift.common import swob from swift.common import swob
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.request_helpers import is_user_meta, \ from swift.common.request_helpers import is_user_meta, \
is_object_transient_sysmeta, resolve_etag_is_at_header is_object_transient_sysmeta, resolve_etag_is_at_header
from swift.common.swob import HTTPNotImplemented from swift.common.swob import HTTPNotImplemented
from swift.common.utils import split_path from swift.common.utils import split_path, md5
from test.unit import FakeLogger, FakeRing from test.unit import FakeLogger, FakeRing
@ -159,7 +158,7 @@ class FakeSwift(object):
footers = HeaderKeyDict() footers = HeaderKeyDict()
env['swift.callback.update_footers'](footers) env['swift.callback.update_footers'](footers)
req.headers.update(footers) req.headers.update(footers)
etag = md5(req_body).hexdigest() etag = md5(req_body, usedforsecurity=False).hexdigest()
headers.setdefault('Etag', etag) headers.setdefault('Etag', etag)
headers.setdefault('Content-Length', len(req_body)) headers.setdefault('Content-Length', len(req_body))

View File

@ -18,13 +18,13 @@ import unittest
import mock import mock
from io import BytesIO from io import BytesIO
from hashlib import md5
from swift.common.swob import Request, HTTPAccepted from swift.common.swob import Request, HTTPAccepted
from swift.common.middleware.s3api.etree import fromstring, tostring, \ from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement, XMLNS_XSI Element, SubElement, XMLNS_XSI
from swift.common.middleware.s3api.s3response import InvalidArgument from swift.common.middleware.s3api.s3response import InvalidArgument
from swift.common.middleware.s3api.acl_utils import handle_acl_header from swift.common.middleware.s3api.acl_utils import handle_acl_header
from swift.common.utils import md5
from test.unit.common.middleware.s3api import S3ApiTestCase from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import UnreadableInput from test.unit.common.middleware.s3api.helpers import UnreadableInput
@ -133,7 +133,8 @@ class TestS3ApiAcl(S3ApiTestCase):
def _test_put_no_body(self, use_content_length=False, def _test_put_no_body(self, use_content_length=False,
use_transfer_encoding=False, string_to_md5=b''): use_transfer_encoding=False, string_to_md5=b''):
content_md5 = base64.b64encode(md5(string_to_md5).digest()).strip() content_md5 = base64.b64encode(
md5(string_to_md5, usedforsecurity=False).digest()).strip()
with UnreadableInput(self) as fake_input: with UnreadableInput(self) as fake_input:
req = Request.blank( req = Request.blank(
'/bucket?acl', '/bucket?acl',

View File

@ -17,7 +17,6 @@ import base64
import json import json
import unittest import unittest
from datetime import datetime from datetime import datetime
from hashlib import md5
import mock import mock
from swift.common import swob from swift.common import swob
@ -28,6 +27,7 @@ from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import UnreadableInput from test.unit.common.middleware.s3api.helpers import UnreadableInput
from swift.common.middleware.s3api.etree import fromstring, tostring, \ from swift.common.middleware.s3api.etree import fromstring, tostring, \
Element, SubElement Element, SubElement
from swift.common.utils import md5
from test.unit.common.middleware.s3api.test_s3_acl import s3acl from test.unit.common.middleware.s3api.test_s3_acl import s3acl
@ -47,7 +47,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'object' SubElement(obj, 'Key').text = 'object'
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket/object?delete', req = Request.blank('/bucket/object?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -84,7 +85,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -138,7 +140,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -186,7 +189,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -213,7 +217,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key') SubElement(obj, 'Key')
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -262,7 +267,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
if version: if version:
SubElement(obj, 'VersionId').text = version SubElement(obj, 'VersionId').text = version
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -319,7 +325,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
if ts: if ts:
SubElement(obj, 'VersionId').text = ts.normal SubElement(obj, 'VersionId').text = ts.normal
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip()
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -392,7 +399,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = name SubElement(obj, 'Key').text = name
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = (base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip())
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -414,7 +422,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'x' * 1000 + str(i) SubElement(obj, 'Key').text = 'x' * 1000 + str(i)
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = (base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip())
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -435,7 +444,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = 'Key1' SubElement(obj, 'Key').text = 'Key1'
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = (base64.b64encode(
md5(body, usedforsecurity=False).digest()).strip())
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -460,7 +470,9 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
obj = SubElement(elem, 'Object') obj = SubElement(elem, 'Object')
SubElement(obj, 'Key').text = key SubElement(obj, 'Key').text = key
body = tostring(elem, use_s3ns=False) body = tostring(elem, use_s3ns=False)
content_md5 = base64.b64encode(md5(body).digest()).strip() content_md5 = (
base64.b64encode(md5(body, usedforsecurity=False).digest())
.strip())
req = Request.blank('/bucket?delete', req = Request.blank('/bucket?delete',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
@ -502,7 +514,9 @@ class TestS3ApiMultiDelete(S3ApiTestCase):
def _test_no_body(self, use_content_length=False, def _test_no_body(self, use_content_length=False,
use_transfer_encoding=False, string_to_md5=b''): use_transfer_encoding=False, string_to_md5=b''):
content_md5 = base64.b64encode(md5(string_to_md5).digest()).strip() content_md5 = (base64.b64encode(
md5(string_to_md5, usedforsecurity=False).digest())
.strip())
with UnreadableInput(self) as fake_input: with UnreadableInput(self) as fake_input:
req = Request.blank( req = Request.blank(
'/bucket?delete', '/bucket?delete',

View File

@ -15,7 +15,6 @@
import base64 import base64
import binascii import binascii
import hashlib
from mock import patch from mock import patch
import os import os
import time import time
@ -24,7 +23,7 @@ from six.moves.urllib.parse import quote, quote_plus
from swift.common import swob from swift.common import swob
from swift.common.swob import Request from swift.common.swob import Request
from swift.common.utils import json from swift.common.utils import json, md5
from test.unit import FakeMemcache, patch_policies from test.unit import FakeMemcache, patch_policies
from test.unit.common.middleware.s3api import S3ApiTestCase from test.unit.common.middleware.s3api import S3ApiTestCase
@ -70,9 +69,9 @@ MULTIPARTS_TEMPLATE = \
('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210', ('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210',
41)) 41))
S3_ETAG = '"%s-2"' % hashlib.md5(binascii.a2b_hex( S3_ETAG = '"%s-2"' % md5(binascii.a2b_hex(
'0123456789abcdef0123456789abcdef' '0123456789abcdef0123456789abcdef'
'fedcba9876543210fedcba9876543210')).hexdigest() 'fedcba9876543210fedcba9876543210'), usedforsecurity=False).hexdigest()
class TestS3ApiMultiUpload(S3ApiTestCase): class TestS3ApiMultiUpload(S3ApiTestCase):
@ -826,8 +825,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
self.assertEqual(self._get_error_code(body), 'NoSuchBucket') self.assertEqual(self._get_error_code(body), 'NoSuchBucket')
def test_object_multipart_upload_complete(self): def test_object_multipart_upload_complete(self):
content_md5 = base64.b64encode(hashlib.md5( content_md5 = base64.b64encode(md5(
XML.encode('ascii')).digest()) XML.encode('ascii'), usedforsecurity=False).digest())
req = Request.blank('/bucket/object?uploadId=X', req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac', headers={'Authorization': 'AWS test:tester:hmac',
@ -863,8 +862,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X') self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X')
def test_object_multipart_upload_retry_complete(self): def test_object_multipart_upload_retry_complete(self):
content_md5 = base64.b64encode(hashlib.md5( content_md5 = base64.b64encode(md5(
XML.encode('ascii')).digest()) XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X', self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None) swob.HTTPNotFound, {}, None)
recent_ts = S3Timestamp.now(delta=-1000000).internal # 10s ago recent_ts = S3Timestamp.now(delta=-1000000).internal # 10s ago
@ -899,8 +898,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
]) ])
def test_object_multipart_upload_retry_complete_etag_mismatch(self): def test_object_multipart_upload_retry_complete_etag_mismatch(self):
content_md5 = base64.b64encode(hashlib.md5( content_md5 = base64.b64encode(md5(
XML.encode('ascii')).digest()) XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X', self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None) swob.HTTPNotFound, {}, None)
recent_ts = S3Timestamp.now(delta=-1000000).internal recent_ts = S3Timestamp.now(delta=-1000000).internal
@ -948,8 +947,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X') self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X')
def test_object_multipart_upload_retry_complete_upload_id_mismatch(self): def test_object_multipart_upload_retry_complete_upload_id_mismatch(self):
content_md5 = base64.b64encode(hashlib.md5( content_md5 = base64.b64encode(md5(
XML.encode('ascii')).digest()) XML.encode('ascii'), usedforsecurity=False).digest())
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X', self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X',
swob.HTTPNotFound, {}, None) swob.HTTPNotFound, {}, None)
recent_ts = S3Timestamp.now(delta=-1000000).internal recent_ts = S3Timestamp.now(delta=-1000000).internal
@ -982,8 +981,9 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
]) ])
def test_object_multipart_upload_invalid_md5(self): def test_object_multipart_upload_invalid_md5(self):
bad_md5 = base64.b64encode(hashlib.md5( bad_md5 = base64.b64encode(md5(
XML.encode('ascii') + b'some junk').digest()) XML.encode('ascii') + b'some junk', usedforsecurity=False)
.digest())
req = Request.blank('/bucket/object?uploadId=X', req = Request.blank('/bucket/object?uploadId=X',
environ={'REQUEST_METHOD': 'POST'}, environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac', headers={'Authorization': 'AWS test:tester:hmac',
@ -1413,8 +1413,9 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
self.assertEqual(status.split()[0], '200') self.assertEqual(status.split()[0], '200')
elem = fromstring(body, 'CompleteMultipartUploadResult') elem = fromstring(body, 'CompleteMultipartUploadResult')
self.assertNotIn('Etag', headers) self.assertNotIn('Etag', headers)
expected_etag = '"%s-3"' % hashlib.md5(binascii.unhexlify(''.join( expected_etag = ('"%s-3"' % md5(binascii.unhexlify(''.join(
x['hash'] for x in object_list))).hexdigest() x['hash'] for x in object_list)), usedforsecurity=False)
.hexdigest())
self.assertEqual(elem.find('ETag').text, expected_etag) self.assertEqual(elem.find('ETag').text, expected_etag)
self.assertEqual(self.swift.calls, [ self.assertEqual(self.swift.calls, [
@ -2217,7 +2218,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase):
def _test_no_body(self, use_content_length=False, def _test_no_body(self, use_content_length=False,
use_transfer_encoding=False, string_to_md5=b''): use_transfer_encoding=False, string_to_md5=b''):
raw_md5 = hashlib.md5(string_to_md5).digest() raw_md5 = md5(string_to_md5, usedforsecurity=False).digest()
content_md5 = base64.b64encode(raw_md5).strip() content_md5 = base64.b64encode(raw_md5).strip()
with UnreadableInput(self) as fake_input: with UnreadableInput(self) as fake_input:
req = Request.blank( req = Request.blank(

View File

@ -16,7 +16,7 @@
import binascii import binascii
import unittest import unittest
from datetime import datetime from datetime import datetime
import hashlib from hashlib import sha256
import os import os
from os.path import join from os.path import join
import time import time
@ -36,6 +36,7 @@ from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.utils import mktime, S3Timestamp from swift.common.middleware.s3api.utils import mktime, S3Timestamp
from swift.common.middleware.versioned_writes.object_versioning import \ from swift.common.middleware.versioned_writes.object_versioning import \
DELETE_MARKER_CONTENT_TYPE DELETE_MARKER_CONTENT_TYPE
from swift.common.utils import md5
class TestS3ApiObj(S3ApiTestCase): class TestS3ApiObj(S3ApiTestCase):
@ -44,7 +45,7 @@ class TestS3ApiObj(S3ApiTestCase):
super(TestS3ApiObj, self).setUp() super(TestS3ApiObj, self).setUp()
self.object_body = b'hello' self.object_body = b'hello'
self.etag = hashlib.md5(self.object_body).hexdigest() self.etag = md5(self.object_body, usedforsecurity=False).hexdigest()
self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT' self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT'
self.response_headers = {'Content-Type': 'text/html', self.response_headers = {'Content-Type': 'text/html',
@ -643,7 +644,7 @@ class TestS3ApiObj(S3ApiTestCase):
@s3acl @s3acl
def test_object_PUT_v4(self): def test_object_PUT_v4(self):
body_sha = hashlib.sha256(self.object_body).hexdigest() body_sha = sha256(self.object_body).hexdigest()
req = Request.blank( req = Request.blank(
'/bucket/object', '/bucket/object',
environ={'REQUEST_METHOD': 'PUT'}, environ={'REQUEST_METHOD': 'PUT'},

View File

@ -18,7 +18,6 @@ import unittest
from mock import patch, MagicMock from mock import patch, MagicMock
import calendar import calendar
from datetime import datetime from datetime import datetime
import hashlib
import mock import mock
import requests import requests
import json import json
@ -29,6 +28,7 @@ import swift.common.middleware.s3api
from swift.common.middleware.keystoneauth import KeystoneAuth from swift.common.middleware.keystoneauth import KeystoneAuth
from swift.common import swob, utils from swift.common import swob, utils
from swift.common.swob import Request from swift.common.swob import Request
from swift.common.utils import md5
from keystonemiddleware.auth_token import AuthProtocol from keystonemiddleware.auth_token import AuthProtocol
from keystoneauth1.access import AccessInfoV2 from keystoneauth1.access import AccessInfoV2
@ -177,7 +177,7 @@ class TestS3ApiMiddleware(S3ApiTestCase):
def verify(hash, path, headers): def verify(hash, path, headers):
s = canonical_string(path, headers) s = canonical_string(path, headers)
self.assertEqual(hash, hashlib.md5(s).hexdigest()) self.assertEqual(hash, md5(s, usedforsecurity=False).hexdigest())
verify('6dd08c75e42190a1ce9468d1fd2eb787', '/bucket/object', verify('6dd08c75e42190a1ce9468d1fd2eb787', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-Something': 'test', {'Content-Type': 'text/plain', 'X-Amz-Something': 'test',
@ -563,7 +563,7 @@ class TestS3ApiMiddleware(S3ApiTestCase):
self.assertEqual(self._get_error_code(body), 'InvalidDigest') self.assertEqual(self._get_error_code(body), 'InvalidDigest')
def test_object_create_bad_md5_too_short(self): def test_object_create_bad_md5_too_short(self):
too_short_digest = hashlib.md5(b'hey').digest()[:-1] too_short_digest = md5(b'hey', usedforsecurity=False).digest()[:-1]
md5_str = base64.b64encode(too_short_digest).strip() md5_str = base64.b64encode(too_short_digest).strip()
if not six.PY2: if not six.PY2:
md5_str = md5_str.decode('ascii') md5_str = md5_str.decode('ascii')
@ -577,7 +577,8 @@ class TestS3ApiMiddleware(S3ApiTestCase):
self.assertEqual(self._get_error_code(body), 'InvalidDigest') self.assertEqual(self._get_error_code(body), 'InvalidDigest')
def test_object_create_bad_md5_too_long(self): def test_object_create_bad_md5_too_long(self):
too_long_digest = hashlib.md5(b'hey').digest() + b'suffix' too_long_digest = md5(
b'hey', usedforsecurity=False).digest() + b'suffix'
md5_str = base64.b64encode(too_long_digest).strip() md5_str = base64.b64encode(too_long_digest).strip()
if not six.PY2: if not six.PY2:
md5_str = md5_str.decode('ascii') md5_str = md5_str.decode('ascii')

View File

@ -32,6 +32,7 @@ from swift.common.middleware.s3api.s3request import S3Request, \
from swift.common.middleware.s3api.s3response import InvalidArgument, \ from swift.common.middleware.s3api.s3response import InvalidArgument, \
NoSuchBucket, InternalError, \ NoSuchBucket, InternalError, \
AccessDenied, SignatureDoesNotMatch, RequestTimeTooSkewed AccessDenied, SignatureDoesNotMatch, RequestTimeTooSkewed
from swift.common.utils import md5, md5_factory
from test.unit import DebugLogger from test.unit import DebugLogger
@ -823,8 +824,9 @@ class TestRequest(S3ApiTestCase):
class TestHashingInput(S3ApiTestCase): class TestHashingInput(S3ApiTestCase):
def test_good(self): def test_good(self):
raw = b'123456789' raw = b'123456789'
wrapped = HashingInput(BytesIO(raw), 9, hashlib.md5, wrapped = HashingInput(
hashlib.md5(raw).hexdigest()) BytesIO(raw), 9, md5_factory,
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'56', wrapped.read(2)) self.assertEqual(b'56', wrapped.read(2))
# trying to read past the end gets us whatever's left # trying to read past the end gets us whatever's left
@ -848,8 +850,9 @@ class TestHashingInput(S3ApiTestCase):
def test_too_long(self): def test_too_long(self):
raw = b'123456789' raw = b'123456789'
wrapped = HashingInput(BytesIO(raw), 8, hashlib.md5, wrapped = HashingInput(
hashlib.md5(raw).hexdigest()) BytesIO(raw), 8, md5_factory,
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'56', wrapped.read(2)) self.assertEqual(b'56', wrapped.read(2))
# even though the hash matches, there was more data than we expected # even though the hash matches, there was more data than we expected
@ -861,8 +864,9 @@ class TestHashingInput(S3ApiTestCase):
def test_too_short(self): def test_too_short(self):
raw = b'123456789' raw = b'123456789'
wrapped = HashingInput(BytesIO(raw), 10, hashlib.md5, wrapped = HashingInput(
hashlib.md5(raw).hexdigest()) BytesIO(raw), 10, md5_factory,
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'56', wrapped.read(2)) self.assertEqual(b'56', wrapped.read(2))
# even though the hash matches, there was more data than we expected # even though the hash matches, there was more data than we expected
@ -873,8 +877,9 @@ class TestHashingInput(S3ApiTestCase):
def test_bad_hash(self): def test_bad_hash(self):
raw = b'123456789' raw = b'123456789'
wrapped = HashingInput(BytesIO(raw), 9, hashlib.sha256, wrapped = HashingInput(
hashlib.md5(raw).hexdigest()) BytesIO(raw), 9, hashlib.sha256,
md5(raw, usedforsecurity=False).hexdigest())
self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'1234', wrapped.read(4))
self.assertEqual(b'5678', wrapped.read(4)) self.assertEqual(b'5678', wrapped.read(4))
with self.assertRaises(swob.HTTPException) as raised: with self.assertRaises(swob.HTTPException) as raised:

View File

@ -16,14 +16,13 @@
import mock import mock
import unittest import unittest
from hashlib import md5
from six.moves import urllib from six.moves import urllib
from swift.common import swob from swift.common import swob
from swift.common.middleware import copy from swift.common.middleware import copy
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from swift.common.swob import Request, HTTPException from swift.common.swob import Request, HTTPException
from swift.common.utils import closing_if_possible from swift.common.utils import closing_if_possible, md5
from test.unit import patch_policies, debug_logger, FakeRing from test.unit import patch_policies, debug_logger, FakeRing
from test.unit.common.middleware.helpers import FakeSwift from test.unit.common.middleware.helpers import FakeSwift
from test.unit.proxy.controllers.test_obj import set_http_connect, \ from test.unit.proxy.controllers.test_obj import set_http_connect, \
@ -1386,7 +1385,7 @@ class TestServerSideCopyMiddlewareWithEC(unittest.TestCase):
def _test_invalid_ranges(self, method, real_body, segment_size, req_range): def _test_invalid_ranges(self, method, real_body, segment_size, req_range):
# make a request with range starts from more than real size. # make a request with range starts from more than real size.
body_etag = md5(real_body).hexdigest() body_etag = md5(real_body, usedforsecurity=False).hexdigest()
req = swob.Request.blank( req = swob.Request.blank(
'/v1/a/c/o', method=method, '/v1/a/c/o', method=method,
headers={'Destination': 'c1/o', headers={'Destination': 'c1/o',

View File

@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import hashlib
import json import json
import mock import mock
import shutil import shutil
@ -26,7 +25,7 @@ import unittest
from swift.common import swob from swift.common import swob
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.middleware import dlo from swift.common.middleware import dlo
from swift.common.utils import closing_if_possible from swift.common.utils import closing_if_possible, md5
from test.unit.common.middleware.helpers import FakeSwift from test.unit.common.middleware.helpers import FakeSwift
@ -36,7 +35,7 @@ LIMIT = 'swift.common.constraints.CONTAINER_LISTING_LIMIT'
def md5hex(s): def md5hex(s):
if not isinstance(s, bytes): if not isinstance(s, bytes):
s = s.encode('utf-8') s = s.encode('utf-8')
return hashlib.md5(s).hexdigest() return md5(s, usedforsecurity=False).hexdigest()
class DloTestCase(unittest.TestCase): class DloTestCase(unittest.TestCase):
@ -738,7 +737,8 @@ class TestDloGetManifest(DloTestCase):
status, headers, body = self.call_dlo(req) status, headers, body = self.call_dlo(req)
headers = HeaderKeyDict(headers) headers = HeaderKeyDict(headers)
self.assertEqual(headers["Etag"], self.assertEqual(headers["Etag"],
'"' + hashlib.md5(b"abcdef").hexdigest() + '"') '"' + md5(b"abcdef",
usedforsecurity=False).hexdigest() + '"')
def test_object_prefix_quoting(self): def test_object_prefix_quoting(self):
self.app.register( self.app.register(

View File

@ -19,7 +19,6 @@ import os
import time import time
import mock import mock
import unittest import unittest
from hashlib import md5
import six import six
from six.moves import urllib from six.moves import urllib
from swift.common import swob, utils from swift.common import swob, utils
@ -33,6 +32,7 @@ from swift.common.middleware.versioned_writes.object_versioning import \
SYSMETA_VERSIONS_SYMLINK, DELETE_MARKER_CONTENT_TYPE SYSMETA_VERSIONS_SYMLINK, DELETE_MARKER_CONTENT_TYPE
from swift.common.request_helpers import get_reserved_name from swift.common.request_helpers import get_reserved_name
from swift.common.storage_policy import StoragePolicy from swift.common.storage_policy import StoragePolicy
from swift.common.utils import md5
from swift.proxy.controllers.base import get_cache_key from swift.proxy.controllers.base import get_cache_key
from test.unit import patch_policies, FakeMemcache, make_timestamp_iter from test.unit import patch_policies, FakeMemcache, make_timestamp_iter
from test.unit.common.middleware.helpers import FakeSwift from test.unit.common.middleware.helpers import FakeSwift
@ -580,7 +580,8 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
'/v1/a/c/o', method='PUT', body=put_body, '/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain', headers={'Content-Type': 'text/plain',
'ETag': md5( 'ETag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)}, 'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_on, environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'}) 'swift.trans_id': 'fake_trans_id'})
@ -607,7 +608,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -757,7 +758,9 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
req = Request.blank( req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body, '/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain', headers={'Content-Type': 'text/plain',
'ETag': md5(put_body.encode('utf8')).hexdigest(), 'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)}, 'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_on, environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'}) 'swift.trans_id': 'fake_trans_id'})
@ -784,7 +787,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -842,7 +845,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
} }
for k, v in expected_headers.items(): for k, v in expected_headers.items():
@ -904,7 +907,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
} }
for k, v in expected_headers.items(): for k, v in expected_headers.items():
@ -984,7 +987,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
} }
for k, v in expected_headers.items(): for k, v in expected_headers.items():
@ -1013,7 +1016,8 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
'/v1/a/c/o', method='PUT', body=put_body, '/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain', headers={'Content-Type': 'text/plain',
'ETag': md5( 'ETag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)}, 'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_on, environ={'swift.cache': self.cache_version_on,
'swift.trans_id': 'fake_trans_id'}) 'swift.trans_id': 'fake_trans_id'})
@ -1042,7 +1046,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
put_body.encode('utf8')).hexdigest(), put_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -1196,7 +1200,9 @@ class ObjectVersioningTestDisabled(ObjectVersioningBaseTestCase):
req = Request.blank( req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body, '/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain', headers={'Content-Type': 'text/plain',
'ETag': md5(put_body.encode('utf8')).hexdigest(), 'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)}, 'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_off, environ={'swift.cache': self.cache_version_off,
'swift.trans_id': 'fake_trans_id'}) 'swift.trans_id': 'fake_trans_id'})
@ -1283,7 +1289,9 @@ class ObjectVersioningTestDisabled(ObjectVersioningBaseTestCase):
req = Request.blank( req = Request.blank(
'/v1/a/c/o', method='PUT', body=put_body, '/v1/a/c/o', method='PUT', body=put_body,
headers={'Content-Type': 'text/plain', headers={'Content-Type': 'text/plain',
'ETag': md5(put_body.encode('utf8')).hexdigest(), 'ETag': md5(
put_body.encode('utf8'),
usedforsecurity=False).hexdigest(),
'Content-Length': len(put_body)}, 'Content-Length': len(put_body)},
environ={'swift.cache': self.cache_version_off, environ={'swift.cache': self.cache_version_off,
'swift.trans_id': 'fake_trans_id'}) 'swift.trans_id': 'fake_trans_id'})
@ -1580,7 +1588,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8')).hexdigest(), src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -1633,7 +1641,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8')).hexdigest(), src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -1680,7 +1688,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8')).hexdigest(), src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -1730,7 +1738,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase):
TGT_OBJ_SYSMETA_SYMLINK_HDR: TGT_OBJ_SYSMETA_SYMLINK_HDR:
self.build_symlink_path('c', 'o', '9999998765.99999'), self.build_symlink_path('c', 'o', '9999998765.99999'),
'x-object-sysmeta-symlink-target-etag': md5( 'x-object-sysmeta-symlink-target-etag': md5(
src_body.encode('utf8')).hexdigest(), src_body.encode('utf8'), usedforsecurity=False).hexdigest(),
'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)),
} }
symlink_put_headers = self.app._calls[-1].headers symlink_put_headers = self.app._calls[-1].headers
@ -1792,7 +1800,7 @@ class ObjectVersioningTestVersionAPI(ObjectVersioningBaseTestCase):
timestamp = next(self.ts) timestamp = next(self.ts)
version_path = '%s?symlink=get' % self.build_versions_path( version_path = '%s?symlink=get' % self.build_versions_path(
obj='o', version=(~timestamp).normal) obj='o', version=(~timestamp).normal)
etag = md5(b'old-version-etag').hexdigest() etag = md5(b'old-version-etag', usedforsecurity=False).hexdigest()
self.app.register('HEAD', version_path, swob.HTTPNoContent, { self.app.register('HEAD', version_path, swob.HTTPNoContent, {
'Content-Length': 10, 'Content-Length': 10,
'Content-Type': 'application/old-version', 'Content-Type': 'application/old-version',
@ -2128,7 +2136,7 @@ class ObjectVersioningVersionAPIWhileDisabled(ObjectVersioningBaseTestCase):
timestamp = next(self.ts) timestamp = next(self.ts)
version_path = '%s?symlink=get' % self.build_versions_path( version_path = '%s?symlink=get' % self.build_versions_path(
obj='o', version=(~timestamp).normal) obj='o', version=(~timestamp).normal)
etag = md5(b'old-version-etag').hexdigest() etag = md5(b'old-version-etag', usedforsecurity=False).hexdigest()
self.app.register('HEAD', version_path, swob.HTTPNoContent, { self.app.register('HEAD', version_path, swob.HTTPNoContent, {
'Content-Length': 10, 'Content-Length': 10,
'Content-Type': 'application/old-version', 'Content-Type': 'application/old-version',

View File

@ -15,7 +15,6 @@
# limitations under the License. # limitations under the License.
import base64 import base64
import hashlib
import json import json
import time import time
import unittest import unittest
@ -32,7 +31,7 @@ from swift.common.swob import Request, HTTPException, str_to_wsgi, \
bytes_to_wsgi bytes_to_wsgi
from swift.common.utils import quote, closing_if_possible, close_if_possible, \ from swift.common.utils import quote, closing_if_possible, close_if_possible, \
parse_content_type, iter_multipart_mime_documents, parse_mime_headers, \ parse_content_type, iter_multipart_mime_documents, parse_mime_headers, \
Timestamp, get_expirer_container Timestamp, get_expirer_container, md5
from test.unit.common.middleware.helpers import FakeSwift from test.unit.common.middleware.helpers import FakeSwift
@ -57,7 +56,7 @@ def fake_start_response(*args, **kwargs):
def md5hex(s): def md5hex(s):
if not isinstance(s, bytes): if not isinstance(s, bytes):
s = s.encode('ascii') s = s.encode('ascii')
return hashlib.md5(s).hexdigest() return md5(s, usedforsecurity=False).hexdigest()
class SloTestCase(unittest.TestCase): class SloTestCase(unittest.TestCase):
@ -3004,7 +3003,7 @@ class TestSloGetManifest(SloTestCase):
def test_get_segment_with_non_ascii_path(self): def test_get_segment_with_non_ascii_path(self):
segment_body = u"a møøse once bit my sister".encode("utf-8") segment_body = u"a møøse once bit my sister".encode("utf-8")
segment_etag = hashlib.md5(segment_body).hexdigest() segment_etag = md5(segment_body, usedforsecurity=False).hexdigest()
if six.PY2: if six.PY2:
path = u'/v1/AUTH_test/ünicode/öbject-segment'.encode('utf-8') path = u'/v1/AUTH_test/ünicode/öbject-segment'.encode('utf-8')
else: else:

View File

@ -16,7 +16,6 @@
import array import array
import collections import collections
import six.moves.cPickle as pickle import six.moves.cPickle as pickle
import hashlib
import os import os
import unittest import unittest
import stat import stat
@ -30,9 +29,9 @@ import copy
import mock import mock
from six.moves import range from six.moves import range
from swift.common import ring, utils from swift.common import ring, utils
from swift.common.ring import utils as ring_utils from swift.common.ring import utils as ring_utils
from swift.common.utils import md5
class TestRingBase(unittest.TestCase): class TestRingBase(unittest.TestCase):
@ -236,7 +235,7 @@ class TestRing(TestRingBase):
self.assertIsNone(self.ring.version) self.assertIsNone(self.ring.version)
with open(self.testgz, 'rb') as fp: with open(self.testgz, 'rb') as fp:
expected_md5 = hashlib.md5() expected_md5 = md5(usedforsecurity=False)
expected_size = 0 expected_size = 0
for chunk in iter(lambda: fp.read(2 ** 16), b''): for chunk in iter(lambda: fp.read(2 ** 16), b''):
expected_md5.update(chunk) expected_md5.update(chunk)

View File

@ -18,7 +18,6 @@ import json
import unittest import unittest
import os import os
from contextlib import contextmanager from contextlib import contextmanager
from hashlib import md5
import time import time
import pickle import pickle
@ -29,7 +28,7 @@ from swift.common import direct_client
from swift.common.direct_client import DirectClientException from swift.common.direct_client import DirectClientException
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import Timestamp, quote from swift.common.utils import Timestamp, quote, md5
from swift.common.swob import RESPONSE_REASONS from swift.common.swob import RESPONSE_REASONS
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from six.moves.http_client import HTTPException from six.moves.http_client import HTTPException
@ -81,7 +80,7 @@ class FakeConn(object):
def send(self, data): def send(self, data):
if not self.etag: if not self.etag:
self.etag = md5() self.etag = md5(usedforsecurity=False)
self.etag.update(data) self.etag.update(data)
@ -546,7 +545,9 @@ class TestDirectClient(unittest.TestCase):
self.assertEqual(conn.req_headers['User-Agent'], 'my UA') self.assertEqual(conn.req_headers['User-Agent'], 'my UA')
self.assertTrue('x-timestamp' in conn.req_headers) self.assertTrue('x-timestamp' in conn.req_headers)
self.assertEqual('bar', conn.req_headers.get('x-foo')) self.assertEqual('bar', conn.req_headers.get('x-foo'))
self.assertEqual(md5(body).hexdigest(), conn.etag.hexdigest()) self.assertEqual(
md5(body, usedforsecurity=False).hexdigest(),
conn.etag.hexdigest())
self.assertIsNone(rv) self.assertIsNone(rv)
def test_direct_put_container_chunked(self): def test_direct_put_container_chunked(self):
@ -568,8 +569,9 @@ class TestDirectClient(unittest.TestCase):
self.assertEqual('bar', conn.req_headers.get('x-foo')) self.assertEqual('bar', conn.req_headers.get('x-foo'))
self.assertNotIn('Content-Length', conn.req_headers) self.assertNotIn('Content-Length', conn.req_headers)
expected_sent = b'%0x\r\n%s\r\n0\r\n\r\n' % (len(body), body) expected_sent = b'%0x\r\n%s\r\n0\r\n\r\n' % (len(body), body)
self.assertEqual(md5(expected_sent).hexdigest(), self.assertEqual(
conn.etag.hexdigest()) md5(expected_sent, usedforsecurity=False).hexdigest(),
conn.etag.hexdigest())
self.assertIsNone(rv) self.assertIsNone(rv)
def test_direct_put_container_fail(self): def test_direct_put_container_fail(self):
@ -849,7 +851,9 @@ class TestDirectClient(unittest.TestCase):
self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.port, self.node['port'])
self.assertEqual(conn.method, 'PUT') self.assertEqual(conn.method, 'PUT')
self.assertEqual(conn.path, self.obj_path) self.assertEqual(conn.path, self.obj_path)
self.assertEqual(md5(b'123456').hexdigest(), resp) self.assertEqual(
md5(b'123456', usedforsecurity=False).hexdigest(),
resp)
def test_direct_put_object_fail(self): def test_direct_put_object_fail(self):
contents = io.BytesIO(b'123456') contents = io.BytesIO(b'123456')
@ -876,7 +880,10 @@ class TestDirectClient(unittest.TestCase):
self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.port, self.node['port'])
self.assertEqual(conn.method, 'PUT') self.assertEqual(conn.method, 'PUT')
self.assertEqual(conn.path, self.obj_path) self.assertEqual(conn.path, self.obj_path)
self.assertEqual(md5(b'6\r\n123456\r\n0\r\n\r\n').hexdigest(), resp) self.assertEqual(
md5(b'6\r\n123456\r\n0\r\n\r\n',
usedforsecurity=False).hexdigest(),
resp)
def test_direct_put_object_args(self): def test_direct_put_object_args(self):
# One test to cover all missing checks # One test to cover all missing checks
@ -891,7 +898,9 @@ class TestDirectClient(unittest.TestCase):
self.assertEqual(self.obj_path, conn.path) self.assertEqual(self.obj_path, conn.path)
self.assertEqual(conn.req_headers['Content-Length'], '0') self.assertEqual(conn.req_headers['Content-Length'], '0')
self.assertEqual(conn.req_headers['Content-Type'], 'Text') self.assertEqual(conn.req_headers['Content-Type'], 'Text')
self.assertEqual(md5(b'0\r\n\r\n').hexdigest(), resp) self.assertEqual(
md5(b'0\r\n\r\n', usedforsecurity=False).hexdigest(),
resp)
def test_direct_put_object_header_content_length(self): def test_direct_put_object_header_content_length(self):
contents = io.BytesIO(b'123456') contents = io.BytesIO(b'123456')
@ -906,7 +915,9 @@ class TestDirectClient(unittest.TestCase):
self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.port, self.node['port'])
self.assertEqual('PUT', conn.method) self.assertEqual('PUT', conn.method)
self.assertEqual(conn.req_headers['Content-length'], '6') self.assertEqual(conn.req_headers['Content-length'], '6')
self.assertEqual(md5(b'123456').hexdigest(), resp) self.assertEqual(
md5(b'123456', usedforsecurity=False).hexdigest(),
resp)
def test_retry(self): def test_retry(self):
headers = HeaderKeyDict({'key': 'value'}) headers = HeaderKeyDict({'key': 'value'})

View File

@ -18,7 +18,6 @@
from collections import defaultdict from collections import defaultdict
import errno import errno
from hashlib import md5
import io import io
import logging import logging
import six import six
@ -34,6 +33,7 @@ from eventlet import GreenPool, sleep, Queue
from eventlet.pools import Pool from eventlet.pools import Pool
from swift.common import memcached from swift.common import memcached
from swift.common.utils import md5
from mock import patch, MagicMock from mock import patch, MagicMock
from test.unit import debug_logger from test.unit import debug_logger
@ -337,7 +337,8 @@ class TestMemcached(unittest.TestCase):
mock = MockMemcached() mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2) [(mock, mock)] * 2)
cache_key = md5(b'some_key').hexdigest().encode('ascii') cache_key = md5(b'some_key',
usedforsecurity=False).hexdigest().encode('ascii')
memcache_client.set('some_key', [1, 2, 3]) memcache_client.set('some_key', [1, 2, 3])
self.assertEqual(memcache_client.get('some_key'), [1, 2, 3]) self.assertEqual(memcache_client.get('some_key'), [1, 2, 3])
@ -443,7 +444,8 @@ class TestMemcached(unittest.TestCase):
mock = MockMemcached() mock = MockMemcached()
memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool(
[(mock, mock)] * 2) [(mock, mock)] * 2)
cache_key = md5(b'some_key').hexdigest().encode('ascii') cache_key = md5(b'some_key',
usedforsecurity=False).hexdigest().encode('ascii')
memcache_client.incr('some_key', delta=5, time=55) memcache_client.incr('some_key', delta=5, time=55)
self.assertEqual(memcache_client.get('some_key'), b'5') self.assertEqual(memcache_client.get('some_key'), b'5')
@ -653,7 +655,7 @@ class TestMemcached(unittest.TestCase):
memcache_client.get_multi(('some_key2', 'some_key1'), 'multi_key'), memcache_client.get_multi(('some_key2', 'some_key1'), 'multi_key'),
[[4, 5, 6], [1, 2, 3]]) [[4, 5, 6], [1, 2, 3]])
for key in (b'some_key1', b'some_key2'): for key in (b'some_key1', b'some_key2'):
key = md5(key).hexdigest().encode('ascii') key = md5(key, usedforsecurity=False).hexdigest().encode('ascii')
self.assertIn(key, mock.cache) self.assertIn(key, mock.cache)
_junk, cache_timeout, _junk = mock.cache[key] _junk, cache_timeout, _junk = mock.cache[key]
self.assertEqual(cache_timeout, b'0') self.assertEqual(cache_timeout, b'0')
@ -662,7 +664,7 @@ class TestMemcached(unittest.TestCase):
{'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key', {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key',
time=20) time=20)
for key in (b'some_key1', b'some_key2'): for key in (b'some_key1', b'some_key2'):
key = md5(key).hexdigest().encode('ascii') key = md5(key, usedforsecurity=False).hexdigest().encode('ascii')
_junk, cache_timeout, _junk = mock.cache[key] _junk, cache_timeout, _junk = mock.cache[key]
self.assertEqual(cache_timeout, b'20') self.assertEqual(cache_timeout, b'20')
@ -672,7 +674,7 @@ class TestMemcached(unittest.TestCase):
{'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key', {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key',
time=fortydays) time=fortydays)
for key in (b'some_key1', b'some_key2'): for key in (b'some_key1', b'some_key2'):
key = md5(key).hexdigest().encode('ascii') key = md5(key, usedforsecurity=False).hexdigest().encode('ascii')
_junk, cache_timeout, _junk = mock.cache[key] _junk, cache_timeout, _junk = mock.cache[key]
self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1) self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1)
self.assertEqual(memcache_client.get_multi( self.assertEqual(memcache_client.get_multi(
@ -709,14 +711,15 @@ class TestMemcached(unittest.TestCase):
memcache_client.get_multi(('some_key1', 'some_key0'), 'multi_key'), memcache_client.get_multi(('some_key1', 'some_key0'), 'multi_key'),
[[4, 5, 6], [1, 2, 3]]) [[4, 5, 6], [1, 2, 3]])
for key in (b'some_key0', b'some_key1'): for key in (b'some_key0', b'some_key1'):
key = md5(key).hexdigest().encode('ascii') key = md5(key, usedforsecurity=False).hexdigest().encode('ascii')
self.assertIn(key, mock1.cache) self.assertIn(key, mock1.cache)
_junk, cache_timeout, _junk = mock1.cache[key] _junk, cache_timeout, _junk = mock1.cache[key]
self.assertEqual(cache_timeout, b'0') self.assertEqual(cache_timeout, b'0')
memcache_client.set('some_key0', [7, 8, 9]) memcache_client.set('some_key0', [7, 8, 9])
self.assertEqual(memcache_client.get('some_key0'), [7, 8, 9]) self.assertEqual(memcache_client.get('some_key0'), [7, 8, 9])
key = md5(b'some_key0').hexdigest().encode('ascii') key = md5(b'some_key0',
usedforsecurity=False).hexdigest().encode('ascii')
self.assertIn(key, mock2.cache) self.assertIn(key, mock2.cache)
# Delete 'some_key0' with server_key='multi_key' # Delete 'some_key0' with server_key='multi_key'

View File

@ -74,7 +74,7 @@ from swift.common.exceptions import Timeout, MessageTimeout, \
MimeInvalid MimeInvalid
from swift.common import utils from swift.common import utils
from swift.common.utils import is_valid_ip, is_valid_ipv4, is_valid_ipv6, \ from swift.common.utils import is_valid_ip, is_valid_ipv4, is_valid_ipv6, \
set_swift_dir set_swift_dir, md5
from swift.common.container_sync_realms import ContainerSyncRealms from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.storage_policy import POLICIES, reload_storage_policies from swift.common.storage_policy import POLICIES, reload_storage_policies
@ -1024,6 +1024,13 @@ class TestUtils(unittest.TestCase):
def setUp(self): def setUp(self):
utils.HASH_PATH_SUFFIX = b'endcap' utils.HASH_PATH_SUFFIX = b'endcap'
utils.HASH_PATH_PREFIX = b'startcap' utils.HASH_PATH_PREFIX = b'startcap'
self.md5_test_data = "Openstack forever".encode('utf-8')
try:
self.md5_digest = hashlib.md5(self.md5_test_data).hexdigest()
self.fips_enabled = False
except ValueError:
self.md5_digest = '0d6dc3c588ae71a04ce9a6beebbbba06'
self.fips_enabled = True
def test_get_zero_indexed_base_string(self): def test_get_zero_indexed_base_string(self):
self.assertEqual(utils.get_zero_indexed_base_string('something', 0), self.assertEqual(utils.get_zero_indexed_base_string('something', 0),
@ -4501,6 +4508,79 @@ cluster_dfw1 = http://dfw1.host/v1/
self.assertEqual(msg, b'READY=1') self.assertEqual(msg, b'READY=1')
self.assertNotIn('NOTIFY_SOCKET', os.environ) self.assertNotIn('NOTIFY_SOCKET', os.environ)
def test_md5_with_data(self):
if not self.fips_enabled:
digest = md5(self.md5_test_data).hexdigest()
self.assertEqual(digest, self.md5_digest)
else:
# on a FIPS enabled system, this throws a ValueError:
# [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS
self.assertRaises(ValueError, md5, self.md5_test_data)
if not self.fips_enabled:
digest = md5(self.md5_test_data, usedforsecurity=True).hexdigest()
self.assertEqual(digest, self.md5_digest)
else:
self.assertRaises(
ValueError, md5, self.md5_test_data, usedforsecurity=True)
digest = md5(self.md5_test_data, usedforsecurity=False).hexdigest()
self.assertEqual(digest, self.md5_digest)
def test_md5_without_data(self):
if not self.fips_enabled:
test_md5 = md5()
test_md5.update(self.md5_test_data)
digest = test_md5.hexdigest()
self.assertEqual(digest, self.md5_digest)
else:
self.assertRaises(ValueError, md5)
if not self.fips_enabled:
test_md5 = md5(usedforsecurity=True)
test_md5.update(self.md5_test_data)
digest = test_md5.hexdigest()
self.assertEqual(digest, self.md5_digest)
else:
self.assertRaises(ValueError, md5, usedforsecurity=True)
test_md5 = md5(usedforsecurity=False)
test_md5.update(self.md5_test_data)
digest = test_md5.hexdigest()
self.assertEqual(digest, self.md5_digest)
@unittest.skipIf(sys.version_info.major == 2,
"hashlib.md5 does not raise TypeError here in py2")
def test_string_data_raises_type_error(self):
if not self.fips_enabled:
self.assertRaises(TypeError, hashlib.md5, u'foo')
self.assertRaises(TypeError, md5, u'foo')
self.assertRaises(
TypeError, md5, u'foo', usedforsecurity=True)
else:
self.assertRaises(ValueError, hashlib.md5, u'foo')
self.assertRaises(ValueError, md5, u'foo')
self.assertRaises(
ValueError, md5, u'foo', usedforsecurity=True)
self.assertRaises(
TypeError, md5, u'foo', usedforsecurity=False)
def test_none_data_raises_type_error(self):
if not self.fips_enabled:
self.assertRaises(TypeError, hashlib.md5, None)
self.assertRaises(TypeError, md5, None)
self.assertRaises(
TypeError, md5, None, usedforsecurity=True)
else:
self.assertRaises(ValueError, hashlib.md5, None)
self.assertRaises(ValueError, md5, None)
self.assertRaises(
ValueError, md5, None, usedforsecurity=True)
self.assertRaises(
TypeError, md5, None, usedforsecurity=False)
class ResellerConfReader(unittest.TestCase): class ResellerConfReader(unittest.TestCase):
@ -8180,7 +8260,7 @@ class TestShardRange(unittest.TestCase):
def test_make_path(self): def test_make_path(self):
ts = utils.Timestamp.now() ts = utils.Timestamp.now()
actual = utils.ShardRange.make_path('a', 'root', 'parent', ts, 0) actual = utils.ShardRange.make_path('a', 'root', 'parent', ts, 0)
parent_hash = hashlib.md5(b'parent').hexdigest() parent_hash = md5(b'parent', usedforsecurity=False).hexdigest()
self.assertEqual('a/root-%s-%s-0' % (parent_hash, ts.internal), actual) self.assertEqual('a/root-%s-%s-0' % (parent_hash, ts.internal), actual)
actual = utils.ShardRange.make_path('a', 'root', 'parent', ts, 3) actual = utils.ShardRange.make_path('a', 'root', 'parent', ts, 3)
self.assertEqual('a/root-%s-%s-3' % (parent_hash, ts.internal), actual) self.assertEqual('a/root-%s-%s-3' % (parent_hash, ts.internal), actual)

View File

@ -17,7 +17,6 @@
import base64 import base64
import errno import errno
import os import os
import hashlib
import inspect import inspect
import unittest import unittest
from time import sleep, time from time import sleep, time
@ -40,7 +39,7 @@ from swift.container.backend import ContainerBroker, \
from swift.common.db import DatabaseAlreadyExists, GreenDBConnection from swift.common.db import DatabaseAlreadyExists, GreenDBConnection
from swift.common.request_helpers import get_reserved_name from swift.common.request_helpers import get_reserved_name
from swift.common.utils import Timestamp, encode_timestamps, hash_path, \ from swift.common.utils import Timestamp, encode_timestamps, hash_path, \
ShardRange, make_db_file_path ShardRange, make_db_file_path, md5
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
import mock import mock
@ -3161,7 +3160,7 @@ class TestContainerBroker(unittest.TestCase):
def md5_str(s): def md5_str(s):
if not isinstance(s, bytes): if not isinstance(s, bytes):
s = s.encode('utf8') s = s.encode('utf8')
return hashlib.md5(s).hexdigest() return md5(s, usedforsecurity=False).hexdigest()
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(Timestamp('1').internal, 0) broker.initialize(Timestamp('1').internal, 0)

View File

@ -12,7 +12,6 @@
# implied. # implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import hashlib
import json import json
import random import random
@ -42,7 +41,7 @@ from swift.container.sharder import ContainerSharder, sharding_enabled, \
CleavingContext, DEFAULT_SHARD_SHRINK_POINT, \ CleavingContext, DEFAULT_SHARD_SHRINK_POINT, \
DEFAULT_SHARD_CONTAINER_THRESHOLD DEFAULT_SHARD_CONTAINER_THRESHOLD
from swift.common.utils import ShardRange, Timestamp, hash_path, \ from swift.common.utils import ShardRange, Timestamp, hash_path, \
encode_timestamps, parse_db_filename, quorum_size, Everything encode_timestamps, parse_db_filename, quorum_size, Everything, md5
from test import annotate_failure from test import annotate_failure
from test.unit import debug_logger, FakeRing, \ from test.unit import debug_logger, FakeRing, \
@ -65,7 +64,8 @@ class BaseTestSharder(unittest.TestCase):
def _make_broker(self, account='a', container='c', epoch=None, def _make_broker(self, account='a', container='c', epoch=None,
device='sda', part=0, hash_=None): device='sda', part=0, hash_=None):
hash_ = hash_ or hashlib.md5(container.encode('utf-8')).hexdigest() hash_ = hash_ or md5(
container.encode('utf-8'), usedforsecurity=False).hexdigest()
datadir = os.path.join( datadir = os.path.join(
self.tempdir, device, 'containers', str(part), hash_[-3:], hash_) self.tempdir, device, 'containers', str(part), hash_[-3:], hash_)
if epoch: if epoch:

View File

@ -12,7 +12,6 @@
# implied. # implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import hashlib
import os import os
import shutil import shutil
import tempfile import tempfile
@ -20,7 +19,7 @@ import unittest
from swift.common import utils from swift.common import utils
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from swift.common.utils import Timestamp from swift.common.utils import Timestamp, md5
def write_diskfile(df, timestamp, data=b'test data', frag_index=None, def write_diskfile(df, timestamp, data=b'test data', frag_index=None,
@ -31,7 +30,7 @@ def write_diskfile(df, timestamp, data=b'test data', frag_index=None,
with df.create() as writer: with df.create() as writer:
writer.write(data) writer.write(data)
metadata = { metadata = {
'ETag': hashlib.md5(data).hexdigest(), 'ETag': md5(data, usedforsecurity=False).hexdigest(),
'X-Timestamp': timestamp.internal, 'X-Timestamp': timestamp.internal,
'Content-Length': str(len(data)), 'Content-Length': str(len(data)),
} }

View File

@ -23,7 +23,6 @@ import time
import string import string
import xattr import xattr
from shutil import rmtree from shutil import rmtree
from hashlib import md5
from tempfile import mkdtemp from tempfile import mkdtemp
import textwrap import textwrap
from os.path import dirname, basename from os.path import dirname, basename
@ -35,7 +34,7 @@ from swift.obj.diskfile import (
DiskFileManager, ECDiskFileManager, AuditLocation, clear_auditor_status, DiskFileManager, ECDiskFileManager, AuditLocation, clear_auditor_status,
get_auditor_status, HASH_FILE, HASH_INVALIDATIONS_FILE) get_auditor_status, HASH_FILE, HASH_INVALIDATIONS_FILE)
from swift.common.utils import ( from swift.common.utils import (
mkdirs, normalize_timestamp, Timestamp, readconf) mkdirs, normalize_timestamp, Timestamp, readconf, md5)
from swift.common.storage_policy import ( from swift.common.storage_policy import (
ECStoragePolicy, StoragePolicy, POLICIES, EC_POLICY) ECStoragePolicy, StoragePolicy, POLICIES, EC_POLICY)
from test.unit.obj.common import write_diskfile from test.unit.obj.common import write_diskfile
@ -160,7 +159,7 @@ class TestAuditor(unittest.TestCase):
data = b'0' * 1024 data = b'0' * 1024
if disk_file.policy.policy_type == EC_POLICY: if disk_file.policy.policy_type == EC_POLICY:
data = disk_file.policy.pyeclib_driver.encode(data)[0] data = disk_file.policy.pyeclib_driver.encode(data)[0]
etag = md5() etag = md5(usedforsecurity=False)
with disk_file.create() as writer: with disk_file.create() as writer:
writer.write(data) writer.write(data)
etag.update(data) etag.update(data)
@ -198,7 +197,7 @@ class TestAuditor(unittest.TestCase):
# simulate a PUT # simulate a PUT
now = time.time() now = time.time()
data = b'boots and cats and ' * 1024 data = b'boots and cats and ' * 1024
hasher = md5() hasher = md5(usedforsecurity=False)
with disk_file.create() as writer: with disk_file.create() as writer:
writer.write(data) writer.write(data)
hasher.update(data) hasher.update(data)
@ -257,14 +256,16 @@ class TestAuditor(unittest.TestCase):
checksum = xattr.getxattr( checksum = xattr.getxattr(
file_path, "user.swift.metadata_checksum") file_path, "user.swift.metadata_checksum")
self.assertEqual(checksum, self.assertEqual(
md5(metadata).hexdigest().encode('ascii')) checksum,
(md5(metadata, usedforsecurity=False).hexdigest()
.encode('ascii')))
def test_object_audit_diff_data(self): def test_object_audit_diff_data(self):
auditor_worker = auditor.AuditorWorker(self.conf, self.logger, auditor_worker = auditor.AuditorWorker(self.conf, self.logger,
self.rcache, self.devices) self.rcache, self.devices)
data = b'0' * 1024 data = b'0' * 1024
etag = md5() etag = md5(usedforsecurity=False)
timestamp = str(normalize_timestamp(time.time())) timestamp = str(normalize_timestamp(time.time()))
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
@ -287,7 +288,7 @@ class TestAuditor(unittest.TestCase):
AuditLocation(self.disk_file._datadir, 'sda', '0', AuditLocation(self.disk_file._datadir, 'sda', '0',
policy=POLICIES.legacy)) policy=POLICIES.legacy))
self.assertEqual(auditor_worker.quarantines, pre_quarantines) self.assertEqual(auditor_worker.quarantines, pre_quarantines)
etag = md5(b'1' + b'0' * 1023).hexdigest() etag = md5(b'1' + b'0' * 1023, usedforsecurity=False).hexdigest()
metadata['ETag'] = etag metadata['ETag'] = etag
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
@ -305,7 +306,7 @@ class TestAuditor(unittest.TestCase):
def do_test(data): def do_test(data):
# create diskfile and set ETag and content-length to match the data # create diskfile and set ETag and content-length to match the data
etag = md5(data).hexdigest() etag = md5(data, usedforsecurity=False).hexdigest()
timestamp = str(normalize_timestamp(time.time())) timestamp = str(normalize_timestamp(time.time()))
with disk_file.create() as writer: with disk_file.create() as writer:
writer.write(data) writer.write(data)
@ -515,7 +516,7 @@ class TestAuditor(unittest.TestCase):
policy=self.disk_file.policy) policy=self.disk_file.policy)
data = b'VERIFY' data = b'VERIFY'
etag = md5() etag = md5(usedforsecurity=False)
timestamp = str(normalize_timestamp(time.time())) timestamp = str(normalize_timestamp(time.time()))
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
@ -593,7 +594,7 @@ class TestAuditor(unittest.TestCase):
timestamp = str(normalize_timestamp(time.time())) timestamp = str(normalize_timestamp(time.time()))
pre_errors = auditor_worker.errors pre_errors = auditor_worker.errors
data = b'0' * 1024 data = b'0' * 1024
etag = md5() etag = md5(usedforsecurity=False)
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
etag.update(data) etag.update(data)
@ -622,7 +623,7 @@ class TestAuditor(unittest.TestCase):
with df.create() as writer: with df.create() as writer:
writer.write(data) writer.write(data)
metadata = { metadata = {
'ETag': md5(data).hexdigest(), 'ETag': md5(data, usedforsecurity=False).hexdigest(),
'X-Timestamp': timestamp, 'X-Timestamp': timestamp,
'Content-Length': str(os.fstat(writer._fd).st_size), 'Content-Length': str(os.fstat(writer._fd).st_size),
} }
@ -648,7 +649,7 @@ class TestAuditor(unittest.TestCase):
with df.create() as writer: with df.create() as writer:
writer.write(data) writer.write(data)
metadata = { metadata = {
'ETag': md5(data).hexdigest(), 'ETag': md5(data, usedforsecurity=False).hexdigest(),
'X-Timestamp': timestamp, 'X-Timestamp': timestamp,
'Content-Length': str(os.fstat(writer._fd).st_size), 'Content-Length': str(os.fstat(writer._fd).st_size),
} }
@ -696,7 +697,7 @@ class TestAuditor(unittest.TestCase):
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
metadata = { metadata = {
'ETag': md5(data).hexdigest(), 'ETag': md5(data, usedforsecurity=False).hexdigest(),
'X-Timestamp': ts.normal, 'X-Timestamp': ts.normal,
'Content-Length': str(os.fstat(writer._fd).st_size), 'Content-Length': str(os.fstat(writer._fd).st_size),
} }
@ -768,7 +769,7 @@ class TestAuditor(unittest.TestCase):
# pretend that we logged (and reset counters) just now # pretend that we logged (and reset counters) just now
auditor_worker.last_logged = time.time() auditor_worker.last_logged = time.time()
data = b'0' * 1024 data = b'0' * 1024
etag = md5() etag = md5(usedforsecurity=False)
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
etag.update(data) etag.update(data)
@ -792,7 +793,7 @@ class TestAuditor(unittest.TestCase):
timestamp = str(normalize_timestamp(time.time())) timestamp = str(normalize_timestamp(time.time()))
pre_quarantines = auditor_worker.quarantines pre_quarantines = auditor_worker.quarantines
data = b'0' * 10 data = b'0' * 10
etag = md5() etag = md5(usedforsecurity=False)
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
etag.update(data) etag.update(data)
@ -808,7 +809,7 @@ class TestAuditor(unittest.TestCase):
self.disk_file = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'ob', self.disk_file = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'ob',
policy=POLICIES.legacy) policy=POLICIES.legacy)
data = b'1' * 10 data = b'1' * 10
etag = md5() etag = md5(usedforsecurity=False)
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
etag.update(data) etag.update(data)
@ -828,7 +829,7 @@ class TestAuditor(unittest.TestCase):
self.auditor = auditor.ObjectAuditor(self.conf) self.auditor = auditor.ObjectAuditor(self.conf)
self.auditor.log_time = 0 self.auditor.log_time = 0
data = b'0' * 1024 data = b'0' * 1024
etag = md5() etag = md5(usedforsecurity=False)
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
writer.write(data) writer.write(data)
etag.update(data) etag.update(data)
@ -841,7 +842,7 @@ class TestAuditor(unittest.TestCase):
} }
writer.put(metadata) writer.put(metadata)
writer.commit(Timestamp(timestamp)) writer.commit(Timestamp(timestamp))
etag = md5() etag = md5(usedforsecurity=False)
etag.update(b'1' + b'0' * 1023) etag.update(b'1' + b'0' * 1023)
etag = etag.hexdigest() etag = etag.hexdigest()
metadata['ETag'] = etag metadata['ETag'] = etag
@ -863,7 +864,7 @@ class TestAuditor(unittest.TestCase):
timestamp = Timestamp.now() timestamp = Timestamp.now()
self.auditor = auditor.ObjectAuditor(self.conf) self.auditor = auditor.ObjectAuditor(self.conf)
self.auditor.log_time = 0 self.auditor.log_time = 0
etag = md5() etag = md5(usedforsecurity=False)
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
etag = etag.hexdigest() etag = etag.hexdigest()
metadata = { metadata = {
@ -873,7 +874,7 @@ class TestAuditor(unittest.TestCase):
} }
writer.put(metadata) writer.put(metadata)
writer.commit(Timestamp(timestamp)) writer.commit(Timestamp(timestamp))
etag = md5() etag = md5(usedforsecurity=False)
etag = etag.hexdigest() etag = etag.hexdigest()
metadata['ETag'] = etag metadata['ETag'] = etag
write_metadata(writer._fd, metadata) write_metadata(writer._fd, metadata)
@ -1424,7 +1425,7 @@ class TestAuditor(unittest.TestCase):
ts = Timestamp(time.time()) ts = Timestamp(time.time())
with self.disk_file.create() as writer: with self.disk_file.create() as writer:
metadata = { metadata = {
'ETag': md5(b'').hexdigest(), 'ETag': md5(b'', usedforsecurity=False).hexdigest(),
'X-Timestamp': ts.normal, 'X-Timestamp': ts.normal,
'Content-Length': str(os.fstat(writer._fd).st_size), 'Content-Length': str(os.fstat(writer._fd).st_size),
} }

View File

@ -35,7 +35,6 @@ from random import shuffle, randint
from shutil import rmtree from shutil import rmtree
from time import time from time import time
from tempfile import mkdtemp from tempfile import mkdtemp
from hashlib import md5 as _md5
from contextlib import closing, contextmanager from contextlib import closing, contextmanager
from gzip import GzipFile from gzip import GzipFile
import pyeclib.ec_iface import pyeclib.ec_iface
@ -50,7 +49,7 @@ from test.unit import (mock as unit_mock, temptree, mock_check_drive,
from swift.obj import diskfile from swift.obj import diskfile
from swift.common import utils from swift.common import utils
from swift.common.utils import hash_path, mkdirs, Timestamp, \ from swift.common.utils import hash_path, mkdirs, Timestamp, \
encode_timestamps, O_TMPFILE encode_timestamps, O_TMPFILE, md5 as _md5
from swift.common import ring from swift.common import ring
from swift.common.splice import splice from swift.common.splice import splice
from swift.common.exceptions import DiskFileNotExist, DiskFileQuarantined, \ from swift.common.exceptions import DiskFileNotExist, DiskFileQuarantined, \
@ -76,7 +75,7 @@ class md5(object):
def __init__(self, s=b''): def __init__(self, s=b''):
if not isinstance(s, bytes): if not isinstance(s, bytes):
s = s.encode('ascii') s = s.encode('ascii')
self.md = _md5(s) self.md = _md5(s, usedforsecurity=False)
def update(self, s=b''): def update(self, s=b''):
if not isinstance(s, bytes): if not isinstance(s, bytes):

View File

@ -16,7 +16,6 @@ import itertools
import json import json
import unittest import unittest
import os import os
from hashlib import md5
import mock import mock
import six import six
import six.moves.cPickle as pickle import six.moves.cPickle as pickle
@ -37,7 +36,7 @@ from six.moves.urllib.parse import unquote
from swift.common import utils from swift.common import utils
from swift.common.exceptions import DiskFileError from swift.common.exceptions import DiskFileError
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import dump_recon_cache from swift.common.utils import dump_recon_cache, md5
from swift.obj import diskfile, reconstructor as object_reconstructor from swift.obj import diskfile, reconstructor as object_reconstructor
from swift.common import ring from swift.common import ring
from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy, from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy,
@ -4414,7 +4413,7 @@ class TestObjectReconstructor(BaseTestObjectReconstructor):
metadata = { metadata = {
'X-Timestamp': ts.internal, 'X-Timestamp': ts.internal,
'Content-Length': len(test_data), 'Content-Length': len(test_data),
'Etag': md5(test_data).hexdigest(), 'Etag': md5(test_data, usedforsecurity=False).hexdigest(),
'X-Object-Sysmeta-Ec-Frag-Index': frag_index, 'X-Object-Sysmeta-Ec-Frag-Index': frag_index,
} }
writer.put(metadata) writer.put(metadata)
@ -4553,7 +4552,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(1) broken_body = ec_archive_bodies.pop(1)
@ -4583,8 +4582,8 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
self.assertEqual(0, df.content_length) self.assertEqual(0, df.content_length)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body).hexdigest()) md5(broken_body, usedforsecurity=False).hexdigest())
self.assertEqual(len(part_nodes) - 1, len(called_headers), self.assertEqual(len(part_nodes) - 1, len(called_headers),
'Expected %d calls, got %r' % (len(part_nodes) - 1, 'Expected %d calls, got %r' % (len(part_nodes) - 1,
called_headers)) called_headers))
@ -4617,7 +4616,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(4) broken_body = ec_archive_bodies.pop(4)
@ -4641,8 +4640,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, dict(self.obj_metadata)) job, node, dict(self.obj_metadata))
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
def test_reconstruct_fa_error_with_invalid_header(self): def test_reconstruct_fa_error_with_invalid_header(self):
job = { job = {
@ -4654,7 +4654,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(4) broken_body = ec_archive_bodies.pop(4)
@ -4687,8 +4687,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
# ... this bad response should be ignored like any other failure # ... this bad response should be ignored like any other failure
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
def test_reconstruct_parity_fa_with_data_node_failure(self): def test_reconstruct_parity_fa_with_data_node_failure(self):
job = { job = {
@ -4702,7 +4703,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
# make up some data (trim some amount to make it unaligned with # make up some data (trim some amount to make it unaligned with
# segment size) # segment size)
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-454] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-454]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
# the scheme is 10+4, so this gets a parity node # the scheme is 10+4, so this gets a parity node
broken_body = ec_archive_bodies.pop(-4) broken_body = ec_archive_bodies.pop(-4)
@ -4724,8 +4725,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, dict(self.obj_metadata)) job, node, dict(self.obj_metadata))
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
def test_reconstruct_fa_exceptions_fails(self): def test_reconstruct_fa_exceptions_fails(self):
job = { job = {
@ -4792,7 +4794,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
# bad response # bad response
@ -4826,8 +4828,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, self.obj_metadata) job, node, self.obj_metadata)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# no error and warning # no error and warning
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
@ -4843,7 +4846,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(1) broken_body = ec_archive_bodies.pop(1)
@ -4865,8 +4868,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, dict(self.obj_metadata)) job, node, dict(self.obj_metadata))
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# one newer etag won't spoil the bunch # one newer etag won't spoil the bunch
new_index = random.randint(0, self.policy.ec_ndata - 1) new_index = random.randint(0, self.policy.ec_ndata - 1)
@ -4881,8 +4885,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, dict(self.obj_metadata)) job, node, dict(self.obj_metadata))
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# no error and warning # no error and warning
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
@ -4898,7 +4903,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(1) broken_body = ec_archive_bodies.pop(1)
@ -4917,8 +4922,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, dict(self.obj_metadata)) job, node, dict(self.obj_metadata))
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# a response at same timestamp but different etag won't spoil the bunch # a response at same timestamp but different etag won't spoil the bunch
# N.B. (FIXME). if we choose the first response as garbage, the # N.B. (FIXME). if we choose the first response as garbage, the
@ -4937,8 +4943,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, dict(self.obj_metadata)) job, node, dict(self.obj_metadata))
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# expect an error log but no warnings # expect an error log but no warnings
error_log_lines = self.logger.get_lines_for_level('error') error_log_lines = self.logger.get_lines_for_level('error')
@ -4968,7 +4975,8 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
archive_bodies = encode_frag_archive_bodies(self.policy, body) archive_bodies = encode_frag_archive_bodies(self.policy, body)
# pop the index to the destination node # pop the index to the destination node
archive_bodies.pop(1) archive_bodies.pop(1)
key = (md5(body).hexdigest(), next(ts).internal, bool(i % 2)) key = (md5(body, usedforsecurity=False).hexdigest(),
next(ts).internal, bool(i % 2))
ec_archive_dict[key] = archive_bodies ec_archive_dict[key] = archive_bodies
responses = list() responses = list()
@ -5041,7 +5049,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
# instead of popping the broken body, we'll just leave it in the list # instead of popping the broken body, we'll just leave it in the list
@ -5062,8 +5070,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, self.obj_metadata) job, node, self.obj_metadata)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# no error, no warning # no error, no warning
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
@ -5100,7 +5109,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(1) broken_body = ec_archive_bodies.pop(1)
@ -5122,8 +5131,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, self.obj_metadata) job, node, self.obj_metadata)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# no error and warning # no error and warning
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
@ -5150,7 +5160,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(1) broken_body = ec_archive_bodies.pop(1)
@ -5180,8 +5190,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, self.obj_metadata) job, node, self.obj_metadata)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# no errors # no errors
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
@ -5222,7 +5233,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
node['backend_index'] = self.policy.get_backend_index(node['index']) node['backend_index'] = self.policy.get_backend_index(node['index'])
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(1) broken_body = ec_archive_bodies.pop(1)
@ -5247,8 +5258,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor):
job, node, self.obj_metadata) job, node, self.obj_metadata)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
# no errors # no errors
self.assertFalse(self.logger.get_lines_for_level('error')) self.assertFalse(self.logger.get_lines_for_level('error'))
@ -5305,7 +5317,7 @@ class TestObjectReconstructorECDuplicationFactor(TestObjectReconstructor):
} }
test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data)
broken_body = ec_archive_bodies.pop(index) broken_body = ec_archive_bodies.pop(index)
@ -5343,8 +5355,9 @@ class TestObjectReconstructorECDuplicationFactor(TestObjectReconstructor):
job, node, metadata) job, node, metadata)
fixed_body = b''.join(df.reader()) fixed_body = b''.join(df.reader())
self.assertEqual(len(fixed_body), len(broken_body)) self.assertEqual(len(fixed_body), len(broken_body))
self.assertEqual(md5(fixed_body).hexdigest(), self.assertEqual(
md5(broken_body).hexdigest()) md5(fixed_body, usedforsecurity=False).hexdigest(),
md5(broken_body, usedforsecurity=False).hexdigest())
for called_header in called_headers: for called_header in called_headers:
called_header = HeaderKeyDict(called_header) called_header = HeaderKeyDict(called_header)
self.assertIn('Content-Length', called_header) self.assertIn('Content-Length', called_header)

View File

@ -31,7 +31,6 @@ import random
from shutil import rmtree from shutil import rmtree
from time import gmtime, strftime, time, struct_time from time import gmtime, strftime, time, struct_time
from tempfile import mkdtemp from tempfile import mkdtemp
from hashlib import md5
from collections import defaultdict from collections import defaultdict
from contextlib import contextmanager from contextlib import contextmanager
from textwrap import dedent from textwrap import dedent
@ -53,7 +52,7 @@ from swift.common import utils, bufferedhttp
from swift.common.header_key_dict import HeaderKeyDict from swift.common.header_key_dict import HeaderKeyDict
from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \ from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \
NullLogger, storage_directory, public, replication, encode_timestamps, \ NullLogger, storage_directory, public, replication, encode_timestamps, \
Timestamp Timestamp, md5
from swift.common import constraints from swift.common import constraints
from swift.common.request_helpers import get_reserved_name from swift.common.request_helpers import get_reserved_name
from swift.common.swob import Request, WsgiBytesIO from swift.common.swob import Request, WsgiBytesIO
@ -261,7 +260,7 @@ class TestObjectController(unittest.TestCase):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers) headers=headers)
req.body = b'VERIFY' req.body = b'VERIFY'
etag = '"%s"' % md5(b'VERIFY').hexdigest() etag = '"%s"' % md5(b'VERIFY', usedforsecurity=False).hexdigest()
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 201) self.assertEqual(resp.status_int, 201)
self.assertEqual(dict(resp.headers), { self.assertEqual(dict(resp.headers), {
@ -1725,9 +1724,10 @@ class TestObjectController(unittest.TestCase):
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}, 'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'}) environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5(b"obj data").hexdigest() obj_etag = md5(b"obj data", usedforsecurity=False).hexdigest()
footer_meta = json.dumps({"Etag": obj_etag}).encode('ascii') footer_meta = json.dumps({"Etag": obj_etag}).encode('ascii')
footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_meta, usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -1773,11 +1773,12 @@ class TestObjectController(unittest.TestCase):
'/sda1/p/a/c/o', headers=headers, '/sda1/p/a/c/o', headers=headers,
environ={'REQUEST_METHOD': 'PUT'}) environ={'REQUEST_METHOD': 'PUT'})
obj_etag = md5(b"obj data").hexdigest() obj_etag = md5(b"obj data", usedforsecurity=False).hexdigest()
footers = {'Etag': obj_etag} footers = {'Etag': obj_etag}
footers.update(override_footers) footers.update(override_footers)
footer_meta = json.dumps(footers).encode('ascii') footer_meta = json.dumps(footers).encode('ascii')
footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_meta, usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -1863,9 +1864,10 @@ class TestObjectController(unittest.TestCase):
'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}, 'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'},
environ={'REQUEST_METHOD': 'PUT'}) environ={'REQUEST_METHOD': 'PUT'})
footers = {"Etag": md5(b"green").hexdigest()} footers = {"Etag": md5(b"green", usedforsecurity=False).hexdigest()}
footer_meta = json.dumps(footers).encode('ascii') footer_meta = json.dumps(footers).encode('ascii')
footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_meta, usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -1899,7 +1901,8 @@ class TestObjectController(unittest.TestCase):
'X-Object-Meta-X': 'Y', 'X-Object-Meta-X': 'Y',
'X-Object-Sysmeta-X': 'Y', 'X-Object-Sysmeta-X': 'Y',
}).encode('ascii') }).encode('ascii')
footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_meta, usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -1937,7 +1940,7 @@ class TestObjectController(unittest.TestCase):
environ={'REQUEST_METHOD': 'PUT'}) environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({ footer_meta = json.dumps({
"Etag": md5(b"obj data").hexdigest() "Etag": md5(b"obj data", usedforsecurity=False).hexdigest()
}).encode('ascii') }).encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
@ -1967,10 +1970,11 @@ class TestObjectController(unittest.TestCase):
environ={'REQUEST_METHOD': 'PUT'}) environ={'REQUEST_METHOD': 'PUT'})
footer_meta = json.dumps({ footer_meta = json.dumps({
"Etag": md5(b"obj data").hexdigest() "Etag": md5(b"obj data", usedforsecurity=False).hexdigest()
}).encode('ascii') }).encode('ascii')
bad_footer_meta_cksum = \ bad_footer_meta_cksum = \
md5(footer_meta + b"bad").hexdigest().encode('ascii') md5(footer_meta + b"bad",
usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -1999,7 +2003,8 @@ class TestObjectController(unittest.TestCase):
environ={'REQUEST_METHOD': 'PUT'}) environ={'REQUEST_METHOD': 'PUT'})
footer_meta = b"{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{[" footer_meta = b"{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{["
footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_meta, usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -2030,7 +2035,8 @@ class TestObjectController(unittest.TestCase):
footer_meta = json.dumps({ footer_meta = json.dumps({
'X-Object-Meta-Mint': 'pepper' 'X-Object-Meta-Mint': 'pepper'
}).encode('ascii') }).encode('ascii')
footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_meta, usedforsecurity=False).hexdigest().encode('ascii')
req.body = b"\r\n".join(( req.body = b"\r\n".join((
b"--boundary", b"--boundary",
@ -3921,7 +3927,7 @@ class TestObjectController(unittest.TestCase):
policy=POLICIES.legacy) policy=POLICIES.legacy)
disk_file.open() disk_file.open()
file_name = os.path.basename(disk_file._data_file) file_name = os.path.basename(disk_file._data_file)
etag = md5() etag = md5(usedforsecurity=False)
etag.update(b'VERIF') etag.update(b'VERIF')
etag = etag.hexdigest() etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o', metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
@ -3983,7 +3989,7 @@ class TestObjectController(unittest.TestCase):
policy=POLICIES.legacy) policy=POLICIES.legacy)
disk_file.open(timestamp) disk_file.open(timestamp)
file_name = os.path.basename(disk_file._data_file) file_name = os.path.basename(disk_file._data_file)
etag = md5() etag = md5(usedforsecurity=False)
etag.update(b'VERIF') etag.update(b'VERIF')
etag = etag.hexdigest() etag = etag.hexdigest()
metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o', metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o',
@ -4314,7 +4320,7 @@ class TestObjectController(unittest.TestCase):
self.assertEqual(path, '/sda1/p/a/c/o') self.assertEqual(path, '/sda1/p/a/c/o')
expected = { expected = {
'X-Size': len(b'test1'), 'X-Size': len(b'test1'),
'X-Etag': md5(b'test1').hexdigest(), 'X-Etag': md5(b'test1', usedforsecurity=False).hexdigest(),
'X-Content-Type': 'text/plain', 'X-Content-Type': 'text/plain',
'X-Timestamp': create_timestamp, 'X-Timestamp': create_timestamp,
} }
@ -4354,7 +4360,7 @@ class TestObjectController(unittest.TestCase):
self.assertEqual(path, '/sda1/p/a/c/o') self.assertEqual(path, '/sda1/p/a/c/o')
expected = { expected = {
'X-Size': len(b'test2'), 'X-Size': len(b'test2'),
'X-Etag': md5(b'test2').hexdigest(), 'X-Etag': md5(b'test2', usedforsecurity=False).hexdigest(),
'X-Content-Type': 'text/html', 'X-Content-Type': 'text/html',
'X-Timestamp': offset_timestamp, 'X-Timestamp': offset_timestamp,
} }
@ -4393,7 +4399,7 @@ class TestObjectController(unittest.TestCase):
self.assertEqual(path, '/sda1/p/a/c/o') self.assertEqual(path, '/sda1/p/a/c/o')
expected = { expected = {
'X-Size': len(b'test3'), 'X-Size': len(b'test3'),
'X-Etag': md5(b'test3').hexdigest(), 'X-Etag': md5(b'test3', usedforsecurity=False).hexdigest(),
'X-Content-Type': 'text/enriched', 'X-Content-Type': 'text/enriched',
'X-Timestamp': overwrite_timestamp, 'X-Timestamp': overwrite_timestamp,
} }
@ -4562,7 +4568,7 @@ class TestObjectController(unittest.TestCase):
return False return False
def my_hash_path(*args): def my_hash_path(*args):
return md5(b'collide').hexdigest() return md5(b'collide', usedforsecurity=False).hexdigest()
with mock.patch("swift.obj.diskfile.hash_path", my_hash_path): with mock.patch("swift.obj.diskfile.hash_path", my_hash_path):
with mock.patch("swift.obj.server.check_object_creation", with mock.patch("swift.obj.server.check_object_creation",
@ -7677,10 +7683,11 @@ class TestObjectController(unittest.TestCase):
test_data = b'obj data' test_data = b'obj data'
footer_meta = { footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "7", "X-Object-Sysmeta-Ec-Frag-Index": "7",
"Etag": md5(test_data).hexdigest(), "Etag": md5(test_data, usedforsecurity=False).hexdigest(),
} }
footer_json = json.dumps(footer_meta).encode('ascii') footer_json = json.dumps(footer_meta).encode('ascii')
footer_meta_cksum = md5(footer_json).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_json, usedforsecurity=False).hexdigest().encode('ascii')
test_doc = b"\r\n".join(( test_doc = b"\r\n".join((
b"--boundary123", b"--boundary123",
b"X-Document: object body", b"X-Document: object body",
@ -7921,10 +7928,11 @@ class TestObjectServer(unittest.TestCase):
test_data = encode_frag_archive_bodies(POLICIES[1], b'obj data')[0] test_data = encode_frag_archive_bodies(POLICIES[1], b'obj data')[0]
footer_meta = { footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2", "X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(), "Etag": md5(test_data, usedforsecurity=False).hexdigest(),
} }
footer_json = json.dumps(footer_meta).encode('ascii') footer_json = json.dumps(footer_meta).encode('ascii')
footer_meta_cksum = md5(footer_json).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_json, usedforsecurity=False).hexdigest().encode('ascii')
test_doc = test_doc or b"\r\n".join(( test_doc = test_doc or b"\r\n".join((
b"--boundary123", b"--boundary123",
b"X-Document: object body", b"X-Document: object body",
@ -8193,10 +8201,11 @@ class TestObjectServer(unittest.TestCase):
# make footer doc # make footer doc
footer_meta = { footer_meta = {
"X-Object-Sysmeta-Ec-Frag-Index": "2", "X-Object-Sysmeta-Ec-Frag-Index": "2",
"Etag": md5(test_data).hexdigest(), "Etag": md5(test_data, usedforsecurity=False).hexdigest(),
} }
footer_json = json.dumps(footer_meta).encode('ascii') footer_json = json.dumps(footer_meta).encode('ascii')
footer_meta_cksum = md5(footer_json).hexdigest().encode('ascii') footer_meta_cksum = md5(
footer_json, usedforsecurity=False).hexdigest().encode('ascii')
# send most of the footer doc # send most of the footer doc
footer_doc = b"\r\n".join(( footer_doc = b"\r\n".join((

View File

@ -23,7 +23,6 @@ import unittest
from collections import defaultdict from collections import defaultdict
from contextlib import contextmanager from contextlib import contextmanager
import json import json
from hashlib import md5
import mock import mock
from eventlet import Timeout, sleep from eventlet import Timeout, sleep
@ -41,7 +40,7 @@ else:
import swift import swift
from swift.common import utils, swob, exceptions from swift.common import utils, swob, exceptions
from swift.common.exceptions import ChunkWriteTimeout from swift.common.exceptions import ChunkWriteTimeout
from swift.common.utils import Timestamp, list_from_csv from swift.common.utils import Timestamp, list_from_csv, md5
from swift.proxy import server as proxy_server from swift.proxy import server as proxy_server
from swift.proxy.controllers import obj from swift.proxy.controllers import obj
from swift.proxy.controllers.base import \ from swift.proxy.controllers.base import \
@ -143,7 +142,8 @@ def make_footers_callback(body=None):
crypto_etag = '20242af0cd21dd7195a10483eb7472c9' crypto_etag = '20242af0cd21dd7195a10483eb7472c9'
etag_crypto_meta = \ etag_crypto_meta = \
'{"cipher": "AES_CTR_256", "iv": "sD+PSw/DfqYwpsVGSo0GEw=="}' '{"cipher": "AES_CTR_256", "iv": "sD+PSw/DfqYwpsVGSo0GEw=="}'
etag = md5(body).hexdigest() if body is not None else None etag = md5(body,
usedforsecurity=False).hexdigest() if body is not None else None
footers_to_add = { footers_to_add = {
'X-Object-Sysmeta-Container-Update-Override-Etag': cont_etag, 'X-Object-Sysmeta-Container-Update-Override-Etag': cont_etag,
'X-Object-Sysmeta-Crypto-Etag': crypto_etag, 'X-Object-Sysmeta-Crypto-Etag': crypto_etag,
@ -1078,7 +1078,7 @@ class TestReplicatedObjController(CommonObjectControllerMixin,
body=test_body) body=test_body)
if chunked: if chunked:
req.headers['Transfer-Encoding'] = 'chunked' req.headers['Transfer-Encoding'] = 'chunked'
etag = md5(test_body).hexdigest() etag = md5(test_body, usedforsecurity=False).hexdigest()
req.headers['Etag'] = etag req.headers['Etag'] = etag
put_requests = defaultdict( put_requests = defaultdict(
@ -2188,7 +2188,8 @@ class ECObjectControllerMixin(CommonObjectControllerMixin):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
# expect a request to all primaries plus one handoff # expect a request to all primaries plus one handoff
self.assertEqual(self.replicas() + 1, len(log)) self.assertEqual(self.replicas() + 1, len(log))
@ -2524,7 +2525,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_with_slow_primaries(self): def test_GET_with_slow_primaries(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-743] test_data = (b'test' * segment_size)[:-743]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
ts = self.ts() ts = self.ts()
headers = [] headers = []
@ -2562,7 +2563,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_with_some_slow_primaries(self): def test_GET_with_some_slow_primaries(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-289] test_data = (b'test' * segment_size)[:-289]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
ts = self.ts() ts = self.ts()
headers = [] headers = []
@ -2649,7 +2650,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_with_slow_nodes_and_failures(self): def test_GET_with_slow_nodes_and_failures(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-289] test_data = (b'test' * segment_size)[:-289]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
ts = self.ts() ts = self.ts()
headers = [] headers = []
@ -2703,7 +2704,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_with_one_slow_frag_lane(self): def test_GET_with_one_slow_frag_lane(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-454] test_data = (b'test' * segment_size)[:-454]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
ts = self.ts() ts = self.ts()
headers = [] headers = []
@ -2748,7 +2749,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_with_concurrent_ec_extra_requests(self): def test_GET_with_concurrent_ec_extra_requests(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-454] test_data = (b'test' * segment_size)[:-454]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
ts = self.ts() ts = self.ts()
headers = [] headers = []
@ -2825,7 +2826,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_with_frags_swapped_around(self): def test_GET_with_frags_swapped_around(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-657] test_data = (b'test' * segment_size)[:-657]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
_part, primary_nodes = self.obj_ring.get_nodes('a', 'c', 'o') _part, primary_nodes = self.obj_ring.get_nodes('a', 'c', 'o')
@ -2909,7 +2910,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
collected_responses = defaultdict(list) collected_responses = defaultdict(list)
for conn in log: for conn in log:
@ -2964,7 +2966,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
obj1['etag']: {True}, obj1['etag']: {True},
obj2['etag']: {False}, obj2['etag']: {False},
}, closed_conn) }, closed_conn)
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj2['etag'])
self.assertEqual({True}, {conn.closed for conn in log}) self.assertEqual({True}, {conn.closed for conn in log})
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
@ -3011,7 +3014,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj2['etag']) self.assertEqual(resp.headers['etag'], obj2['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj2['etag'])
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -3075,7 +3079,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj2['etag']) self.assertEqual(resp.headers['etag'], obj2['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj2['etag'])
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -3195,7 +3200,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
# Expect a maximum of one request to each primary plus one extra # Expect a maximum of one request to each primary plus one extra
# request to node 1. Actual value could be less if the extra request # request to node 1. Actual value could be less if the extra request
@ -3489,7 +3495,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
self.assertGreaterEqual(len(log), self.policy.ec_ndata) self.assertGreaterEqual(len(log), self.policy.ec_ndata)
collected_durables = [] collected_durables = []
@ -3534,7 +3541,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
collected_durables = [] collected_durables = []
for conn in log: for conn in log:
@ -3625,7 +3633,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
# Quorum of non-durables for a different object won't # Quorum of non-durables for a different object won't
# prevent us hunting down the durable object # prevent us hunting down the durable object
@ -3670,7 +3679,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj1['etag']) self.assertEqual(resp.headers['etag'], obj1['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj1['etag'])
def test_GET_with_missing_durables_and_older_durables(self): def test_GET_with_missing_durables_and_older_durables(self):
# scenario: non-durable frags of newer obj1 obscure all durable frags # scenario: non-durable frags of newer obj1 obscure all durable frags
@ -3725,7 +3735,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj2['etag']) self.assertEqual(resp.headers['etag'], obj2['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj2['etag'])
# max: proxy will GET all non-durable obj1 frags and then 10 obj frags # max: proxy will GET all non-durable obj1 frags and then 10 obj frags
self.assertLessEqual(len(log), self.replicas() + self.policy.ec_ndata) self.assertLessEqual(len(log), self.replicas() + self.policy.ec_ndata)
# min: proxy will GET 10 non-durable obj1 frags and then 10 obj frags # min: proxy will GET 10 non-durable obj1 frags and then 10 obj frags
@ -3770,7 +3781,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj3['etag']) self.assertEqual(resp.headers['etag'], obj3['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj3['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj3['etag'])
self.assertGreaterEqual(len(log), self.policy.ec_ndata + 1) self.assertGreaterEqual(len(log), self.policy.ec_ndata + 1)
self.assertLessEqual(len(log), (self.policy.ec_ndata * 2) + 1) self.assertLessEqual(len(log), (self.policy.ec_ndata * 2) + 1)
@ -3817,7 +3829,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj2['etag']) self.assertEqual(resp.headers['etag'], obj2['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(md5(
resp.body, usedforsecurity=False).hexdigest(), obj2['etag'])
# max: proxy will GET all non-durable obj1 frags and then 10 obj2 frags # max: proxy will GET all non-durable obj1 frags and then 10 obj2 frags
self.assertLessEqual(len(log), self.replicas() + self.policy.ec_ndata) self.assertLessEqual(len(log), self.replicas() + self.policy.ec_ndata)
# min: proxy will GET 10 non-durable obj1 frags and then 10 obj2 frags # min: proxy will GET 10 non-durable obj1 frags and then 10 obj2 frags
@ -4055,10 +4068,10 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
frag_size = self.policy.fragment_size frag_size = self.policy.fragment_size
new_data = (b'test' * segment_size)[:-492] new_data = (b'test' * segment_size)[:-492]
new_etag = md5(new_data).hexdigest() new_etag = md5(new_data, usedforsecurity=False).hexdigest()
new_archives = self._make_ec_archive_bodies(new_data) new_archives = self._make_ec_archive_bodies(new_data)
old_data = (b'junk' * segment_size)[:-492] old_data = (b'junk' * segment_size)[:-492]
old_etag = md5(old_data).hexdigest() old_etag = md5(old_data, usedforsecurity=False).hexdigest()
old_archives = self._make_ec_archive_bodies(old_data) old_archives = self._make_ec_archive_bodies(old_data)
frag_archive_size = len(new_archives[0]) frag_archive_size = len(new_archives[0])
@ -4135,8 +4148,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
# N.B. the object data *length* here is different # N.B. the object data *length* here is different
test_data2 = (b'blah1' * segment_size)[:-333] test_data2 = (b'blah1' * segment_size)[:-333]
etag1 = md5(test_data1).hexdigest() etag1 = md5(test_data1, usedforsecurity=False).hexdigest()
etag2 = md5(test_data2).hexdigest() etag2 = md5(test_data2, usedforsecurity=False).hexdigest()
ec_archive_bodies1 = self._make_ec_archive_bodies(test_data1) ec_archive_bodies1 = self._make_ec_archive_bodies(test_data1)
ec_archive_bodies2 = self._make_ec_archive_bodies(test_data2) ec_archive_bodies2 = self._make_ec_archive_bodies(test_data2)
@ -4161,7 +4174,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
headers=headers): headers=headers):
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(md5(resp.body).hexdigest(), etag1) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
etag1)
# sanity check responses2 # sanity check responses2
responses = responses2[:self.policy.ec_ndata] responses = responses2[:self.policy.ec_ndata]
@ -4170,7 +4185,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
headers=headers): headers=headers):
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(md5(resp.body).hexdigest(), etag2) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
etag2)
# now mix the responses a bit # now mix the responses a bit
mix_index = random.randint(0, self.policy.ec_ndata - 1) mix_index = random.randint(0, self.policy.ec_ndata - 1)
@ -4199,7 +4216,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_read_timeout(self): def test_GET_read_timeout(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-333] test_data = (b'test' * segment_size)[:-333]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
headers = {'X-Object-Sysmeta-Ec-Etag': etag} headers = {'X-Object-Sysmeta-Ec-Etag': etag}
self.app.recoverable_node_timeout = 0.01 self.app.recoverable_node_timeout = 0.01
@ -4219,7 +4236,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
# do this inside the fake http context manager, it'll try to # do this inside the fake http context manager, it'll try to
# resume but won't be able to give us all the right bytes # resume but won't be able to give us all the right bytes
self.assertNotEqual(md5(resp.body).hexdigest(), etag) self.assertNotEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
etag)
error_lines = self.logger.get_lines_for_level('error') error_lines = self.logger.get_lines_for_level('error')
nparity = self.policy.ec_nparity nparity = self.policy.ec_nparity
self.assertGreater(len(error_lines), nparity) self.assertGreater(len(error_lines), nparity)
@ -4233,7 +4252,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def test_GET_read_timeout_resume(self): def test_GET_read_timeout_resume(self):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data = (b'test' * segment_size)[:-333] test_data = (b'test' * segment_size)[:-333]
etag = md5(test_data).hexdigest() etag = md5(test_data, usedforsecurity=False).hexdigest()
ec_archive_bodies = self._make_ec_archive_bodies(test_data) ec_archive_bodies = self._make_ec_archive_bodies(test_data)
headers = { headers = {
'X-Object-Sysmeta-Ec-Etag': etag, 'X-Object-Sysmeta-Ec-Etag': etag,
@ -4255,7 +4274,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
headers=headers): headers=headers):
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(md5(resp.body).hexdigest(), etag) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
etag)
error_lines = self.logger.get_lines_for_level('error') error_lines = self.logger.get_lines_for_level('error')
self.assertEqual(1, len(error_lines)) self.assertEqual(1, len(error_lines))
self.assertIn('retrying', error_lines[0]) self.assertIn('retrying', error_lines[0])
@ -4303,8 +4324,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_data2 = (b'blah1' * segment_size)[:-333] test_data2 = (b'blah1' * segment_size)[:-333]
test_data1 = (b'test' * segment_size)[:-333] test_data1 = (b'test' * segment_size)[:-333]
etag2 = md5(test_data2).hexdigest() etag2 = md5(test_data2, usedforsecurity=False).hexdigest()
etag1 = md5(test_data1).hexdigest() etag1 = md5(test_data1, usedforsecurity=False).hexdigest()
ec_archive_bodies2 = self._make_ec_archive_bodies(test_data2) ec_archive_bodies2 = self._make_ec_archive_bodies(test_data2)
ec_archive_bodies1 = self._make_ec_archive_bodies(test_data1) ec_archive_bodies1 = self._make_ec_archive_bodies(test_data1)
headers2 = {'X-Object-Sysmeta-Ec-Etag': etag2, headers2 = {'X-Object-Sysmeta-Ec-Etag': etag2,
@ -4382,7 +4403,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
headers=headers) as log: headers=headers) as log:
resp = req.get_response(self.app) resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(md5(resp.body).hexdigest(), etag1) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
etag1)
error_lines = self.logger.get_lines_for_level('error') error_lines = self.logger.get_lines_for_level('error')
self.assertEqual(2, len(error_lines)) self.assertEqual(2, len(error_lines))
for line in error_lines: for line in error_lines:
@ -4439,7 +4462,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase):
def _test_invalid_ranges(self, method, real_body, segment_size, req_range): def _test_invalid_ranges(self, method, real_body, segment_size, req_range):
# make a request with range starts from more than real size. # make a request with range starts from more than real size.
body_etag = md5(real_body).hexdigest() body_etag = md5(real_body, usedforsecurity=False).hexdigest()
req = swift.common.swob.Request.blank( req = swift.common.swob.Request.blank(
'/v1/a/c/o', method=method, '/v1/a/c/o', method=method,
headers={'Destination': 'c1/o', headers={'Destination': 'c1/o',
@ -4643,7 +4666,9 @@ class TestECDuplicationObjController(
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj['etag']) self.assertEqual(resp.headers['etag'], obj['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj['etag']) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
obj['etag'])
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -4819,7 +4844,9 @@ class TestECDuplicationObjController(
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj2['etag']) self.assertEqual(resp.headers['etag'], obj2['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
obj2['etag'])
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -4891,7 +4918,9 @@ class TestECDuplicationObjController(
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['etag'], obj2['etag']) self.assertEqual(resp.headers['etag'], obj2['etag'])
self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) self.assertEqual(
md5(resp.body, usedforsecurity=False).hexdigest(),
obj2['etag'])
collected_responses = defaultdict(set) collected_responses = defaultdict(set)
for conn in log: for conn in log:
@ -5414,7 +5443,7 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin,
env = {'swift.callback.update_footers': footers_callback} env = {'swift.callback.update_footers': footers_callback}
req = swift.common.swob.Request.blank( req = swift.common.swob.Request.blank(
'/v1/a/c/o', method='PUT', environ=env) '/v1/a/c/o', method='PUT', environ=env)
etag = md5(test_body).hexdigest() etag = md5(test_body, usedforsecurity=False).hexdigest()
size = len(test_body) size = len(test_body)
req.body = test_body req.body = test_body
if chunked: if chunked:
@ -5515,7 +5544,7 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin,
'X-Object-Sysmeta-Ec-Etag': etag, 'X-Object-Sysmeta-Ec-Etag': etag,
'X-Backend-Container-Update-Override-Etag': etag, 'X-Backend-Container-Update-Override-Etag': etag,
'X-Object-Sysmeta-Ec-Segment-Size': str(segment_size), 'X-Object-Sysmeta-Ec-Segment-Size': str(segment_size),
'Etag': md5(obj_payload).hexdigest()}) 'Etag': md5(obj_payload, usedforsecurity=False).hexdigest()})
for header, value in expected.items(): for header, value in expected.items():
self.assertEqual(footer_metadata[header], value) self.assertEqual(footer_metadata[header], value)
@ -5547,7 +5576,7 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin,
# trailing metadata # trailing metadata
segment_size = self.policy.ec_segment_size segment_size = self.policy.ec_segment_size
test_body = (b'asdf' * segment_size)[:-10] test_body = (b'asdf' * segment_size)[:-10]
etag = md5(test_body).hexdigest() etag = md5(test_body, usedforsecurity=False).hexdigest()
size = len(test_body) size = len(test_body)
codes = [201] * self.replicas() codes = [201] * self.replicas()
resp_headers = { resp_headers = {
@ -5623,7 +5652,9 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin,
'X-Object-Sysmeta-Ec-Content-Length': str(size), 'X-Object-Sysmeta-Ec-Content-Length': str(size),
'X-Object-Sysmeta-Ec-Etag': etag, 'X-Object-Sysmeta-Ec-Etag': etag,
'X-Object-Sysmeta-Ec-Segment-Size': str(segment_size), 'X-Object-Sysmeta-Ec-Segment-Size': str(segment_size),
'Etag': md5(obj_part.get_payload(decode=True)).hexdigest()} 'Etag': md5(
obj_part.get_payload(decode=True),
usedforsecurity=False).hexdigest()}
expected.update(expect_added) expected.update(expect_added)
for header, value in expected.items(): for header, value in expected.items():
self.assertIn(header, footer_metadata) self.assertIn(header, footer_metadata)

View File

@ -30,7 +30,6 @@ from shutil import rmtree, copyfile, move
import gc import gc
import time import time
from textwrap import dedent from textwrap import dedent
from hashlib import md5
import collections import collections
from pyeclib.ec_iface import ECDriverError from pyeclib.ec_iface import ECDriverError
from tempfile import mkdtemp, NamedTemporaryFile from tempfile import mkdtemp, NamedTemporaryFile
@ -71,7 +70,7 @@ from swift.common.exceptions import ChunkReadTimeout, DiskFileNotExist, \
from swift.common import utils, constraints from swift.common import utils, constraints
from swift.common.utils import hash_path, storage_directory, \ from swift.common.utils import hash_path, storage_directory, \
parse_content_type, parse_mime_headers, \ parse_content_type, parse_mime_headers, \
iter_multipart_mime_documents, public, mkdirs, NullLogger iter_multipart_mime_documents, public, mkdirs, NullLogger, md5
from swift.common.wsgi import loadapp, ConfigString, SwiftHttpProtocol from swift.common.wsgi import loadapp, ConfigString, SwiftHttpProtocol
from swift.proxy.controllers import base as proxy_base from swift.proxy.controllers import base as proxy_base
from swift.proxy.controllers.base import get_cache_key, cors_validation, \ from swift.proxy.controllers.base import get_cache_key, cors_validation, \
@ -2152,8 +2151,8 @@ class BaseTestObjectController(object):
self.put_container(policy.name, container_name) self.put_container(policy.name, container_name)
obj = b'this object has an etag and is otherwise unimportant' obj = b'this object has an etag and is otherwise unimportant'
etag = md5(obj).hexdigest() etag = md5(obj, usedforsecurity=False).hexdigest()
not_etag = md5(obj + b"blahblah").hexdigest() not_etag = md5(obj + b"blahblah", usedforsecurity=False).hexdigest()
prolis = _test_sockets[0] prolis = _test_sockets[0]
prosrv = _test_servers[0] prosrv = _test_servers[0]
@ -5687,7 +5686,8 @@ class TestReplicatedObjectController(
self.assertEqual(headers[:len(exp)], exp) self.assertEqual(headers[:len(exp)], exp)
# request with both If-None-Match and Range # request with both If-None-Match and Range
etag = md5(b"abcdefghij").hexdigest().encode('ascii') etag = md5(b"abcdefghij",
usedforsecurity=False).hexdigest().encode('ascii')
sock = connect_tcp(('localhost', prolis.getsockname()[1])) sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile('rwb') fd = sock.makefile('rwb')
fd.write(b'GET /v1/a/con/o HTTP/1.1\r\n' + fd.write(b'GET /v1/a/con/o HTTP/1.1\r\n' +
@ -6562,7 +6562,8 @@ class BaseTestECObjectController(BaseTestObjectController):
'Content-Length: %d\r\n' 'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), '\r\n' % (self.ec_policy.name,
md5(obj, usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -6597,7 +6598,7 @@ class BaseTestECObjectController(BaseTestObjectController):
self.assertEqual( self.assertEqual(
lmeta['x-object-sysmeta-ec-etag'], lmeta['x-object-sysmeta-ec-etag'],
md5(obj).hexdigest()) md5(obj, usedforsecurity=False).hexdigest())
self.assertEqual( self.assertEqual(
lmeta['x-object-sysmeta-ec-content-length'], lmeta['x-object-sysmeta-ec-content-length'],
str(len(obj))) str(len(obj)))
@ -6609,7 +6610,7 @@ class BaseTestECObjectController(BaseTestObjectController):
'%s 2+1' % DEFAULT_TEST_EC_TYPE) '%s 2+1' % DEFAULT_TEST_EC_TYPE)
self.assertEqual( self.assertEqual(
lmeta['etag'], lmeta['etag'],
md5(contents).hexdigest()) md5(contents, usedforsecurity=False).hexdigest())
# check presence for a durable data file for the timestamp # check presence for a durable data file for the timestamp
durable_file = ( durable_file = (
@ -6738,7 +6739,8 @@ class BaseTestECObjectController(BaseTestObjectController):
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (self.ec_policy.name, '\r\n' % (self.ec_policy.name,
md5(b'something else').hexdigest(), md5(b'something else',
usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -6769,8 +6771,8 @@ class BaseTestECObjectController(BaseTestObjectController):
self.ec_policy.object_ring.replica_count - self.ec_policy.ec_ndata) self.ec_policy.object_ring.replica_count - self.ec_policy.ec_ndata)
countdown = [count] countdown = [count]
def busted_md5_constructor(initial_str=b""): def busted_md5_constructor(initial_str=b"", usedforsecurity=True):
hasher = md5(initial_str) hasher = md5(initial_str, usedforsecurity=usedforsecurity)
if countdown[0] > 0: if countdown[0] > 0:
hasher.update(b'wrong') hasher.update(b'wrong')
countdown[0] -= 1 countdown[0] -= 1
@ -6790,7 +6792,8 @@ class BaseTestECObjectController(BaseTestObjectController):
'Content-Length: %d\r\n' 'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), '\r\n' % (self.ec_policy.name,
md5(obj, usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -6830,9 +6833,9 @@ class BaseTestECObjectController(BaseTestObjectController):
def test_PUT_ec_fragment_quorum_archive_etag_mismatch(self): def test_PUT_ec_fragment_quorum_archive_etag_mismatch(self):
self.put_container("ec", "ec-con") self.put_container("ec", "ec-con")
def busted_md5_constructor(initial_str=""): def busted_md5_constructor(initial_str=b"", usedforsecurity=True):
hasher = md5(initial_str) hasher = md5(initial_str, usedforsecurity=usedforsecurity)
hasher.update('wrong') hasher.update(b'wrong')
return hasher return hasher
obj = b'uvarovite-esurience-cerated-symphysic' obj = b'uvarovite-esurience-cerated-symphysic'
@ -6848,7 +6851,7 @@ class BaseTestECObjectController(BaseTestObjectController):
commit_confirmation = \ commit_confirmation = \
'swift.proxy.controllers.obj.MIMEPutter.send_commit_confirmation' 'swift.proxy.controllers.obj.MIMEPutter.send_commit_confirmation'
diskfile_md5 = 'swift.obj.diskfile.md5' diskfile_md5 = 'swift.obj.diskfile.md5'
mem_diskfile_md5 = 'swift.obj.mem_diskfile.hashlib.md5' mem_diskfile_md5 = 'swift.obj.mem_diskfile.md5'
with mock.patch(diskfile_md5, busted_md5_constructor), \ with mock.patch(diskfile_md5, busted_md5_constructor), \
mock.patch(mem_diskfile_md5, busted_md5_constructor), \ mock.patch(mem_diskfile_md5, busted_md5_constructor), \
@ -6861,7 +6864,7 @@ class BaseTestECObjectController(BaseTestObjectController):
'Content-Length: %d\r\n' 'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (md5(obj).hexdigest(), '\r\n' % (md5(obj, usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -6917,7 +6920,7 @@ class BaseTestECObjectController(BaseTestObjectController):
'Content-Length: %d\r\n' 'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (md5(obj).hexdigest(), '\r\n' % (md5(obj, usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -6958,7 +6961,8 @@ class BaseTestECObjectController(BaseTestObjectController):
'Content-Length: %d\r\n' 'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), '\r\n' % (self.ec_policy.name,
md5(obj, usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -6976,7 +6980,8 @@ class BaseTestECObjectController(BaseTestObjectController):
'Content-Length: %d\r\n' 'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n' 'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n' 'Content-Type: application/octet-stream\r\n'
'\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), '\r\n' % (self.ec_policy.name,
md5(obj, usedforsecurity=False).hexdigest(),
len(obj))).encode('ascii')) len(obj))).encode('ascii'))
fd.write(obj) fd.write(obj)
fd.flush() fd.flush()
@ -7022,7 +7027,9 @@ class BaseTestECObjectController(BaseTestObjectController):
headers = parse_headers_string(headers) headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length']) self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag']) self.assertEqual(
md5(obj, usedforsecurity=False).hexdigest(),
headers['Etag'])
self.assertEqual('chartreuse', headers['X-Object-Meta-Color']) self.assertEqual('chartreuse', headers['X-Object-Meta-Color'])
gotten_obj = b'' gotten_obj = b''
@ -7084,7 +7091,9 @@ class BaseTestECObjectController(BaseTestObjectController):
headers = parse_headers_string(headers) headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length']) self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag']) self.assertEqual(
md5(obj, usedforsecurity=False).hexdigest(),
headers['Etag'])
gotten_obj = b'' gotten_obj = b''
while True: while True:
@ -7165,7 +7174,9 @@ class BaseTestECObjectController(BaseTestObjectController):
headers = parse_headers_string(headers) headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length']) self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag']) self.assertEqual(
md5(obj, usedforsecurity=False).hexdigest(),
headers['Etag'])
gotten_obj = b'' gotten_obj = b''
try: try:
@ -7221,7 +7232,9 @@ class BaseTestECObjectController(BaseTestObjectController):
headers = parse_headers_string(headers) headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length']) self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag']) self.assertEqual(
md5(obj, usedforsecurity=False).hexdigest(),
headers['Etag'])
self.assertEqual('chartreuse', headers['X-Object-Meta-Color']) self.assertEqual('chartreuse', headers['X-Object-Meta-Color'])
error_lines = prosrv.logger.get_lines_for_level('error') error_lines = prosrv.logger.get_lines_for_level('error')
@ -7310,7 +7323,8 @@ class BaseTestECObjectController(BaseTestObjectController):
obj = b'abCD' * 10 obj = b'abCD' * 10
extra_trans_data = [ extra_trans_data = [
'Etag: "%s"\r\n' % md5(obj).hexdigest(), 'Etag: "%s"\r\n' % md5(
obj, usedforsecurity=False).hexdigest(),
'Content-Length: %d\r\n' % len(obj), 'Content-Length: %d\r\n' % len(obj),
'Content-Type: application/octet-stream\r\n', 'Content-Type: application/octet-stream\r\n',
'\r\n%s' % obj.decode('ascii') '\r\n%s' % obj.decode('ascii')
@ -8214,7 +8228,7 @@ class TestObjectECRangedGET(unittest.TestCase):
str(s) for s in range(431)).encode('ascii') str(s) for s in range(431)).encode('ascii')
assert seg_size * 4 > len(cls.obj) > seg_size * 3, \ assert seg_size * 4 > len(cls.obj) > seg_size * 3, \
"object is wrong number of segments" "object is wrong number of segments"
cls.obj_etag = md5(cls.obj).hexdigest() cls.obj_etag = md5(cls.obj, usedforsecurity=False).hexdigest()
cls.tiny_obj = b'tiny, tiny object' cls.tiny_obj = b'tiny, tiny object'
assert len(cls.tiny_obj) < seg_size, "tiny_obj too large" assert len(cls.tiny_obj) < seg_size, "tiny_obj too large"