diff --git a/bandit.yaml b/bandit.yaml index e21301446e..ed1c2b61e1 100644 --- a/bandit.yaml +++ b/bandit.yaml @@ -80,7 +80,7 @@ # B703 : django_mark_safe # (optional) list included test IDs here, eg '[B101, B406]': -tests: [B102, B103, B302, B306, B308, B309, B310, B401, B501, B502, B506, B601, B602, B609] +tests: [B102, B103, B302, B303, B304, B305, B306, B308, B309, B310, B401, B501, B502, B506, B601, B602, B609] # (optional) list skipped test IDs here, eg '[B101, B406]': skips: diff --git a/swift/account/reaper.py b/swift/account/reaper.py index 7b62df03fc..bd4ee98e4c 100644 --- a/swift/account/reaper.py +++ b/swift/account/reaper.py @@ -19,7 +19,6 @@ import socket from logging import DEBUG from math import sqrt from time import time -from hashlib import md5 import itertools from eventlet import GreenPool, sleep, Timeout @@ -35,7 +34,7 @@ from swift.common.request_helpers import USE_REPLICATION_NETWORK_HEADER from swift.common.ring import Ring from swift.common.ring.utils import is_local_device from swift.common.utils import get_logger, whataremyips, config_true_value, \ - Timestamp + Timestamp, md5 from swift.common.daemon import Daemon from swift.common.storage_policy import POLICIES, PolicyError @@ -271,8 +270,9 @@ class AccountReaper(Daemon): container_ = container.encode('utf-8') else: container_ = container - this_shard = int(md5(container_).hexdigest(), 16) % \ - len(nodes) + this_shard = ( + int(md5(container_, usedforsecurity=False) + .hexdigest(), 16) % len(nodes)) if container_shard not in (this_shard, None): continue diff --git a/swift/cli/info.py b/swift/cli/info.py index 08a04e361e..529f210434 100644 --- a/swift/cli/info.py +++ b/swift/cli/info.py @@ -15,7 +15,6 @@ import itertools import json import os import sqlite3 -from hashlib import md5 from collections import defaultdict from six.moves import urllib @@ -32,6 +31,7 @@ from swift.obj.diskfile import get_data_dir, read_metadata, DATADIR_BASE, \ extract_policy from swift.common.storage_policy import POLICIES from swift.common.middleware.crypto.crypto_utils import load_crypto_meta +from swift.common.utils import md5 class InfoSystemExit(Exception): @@ -545,7 +545,7 @@ def print_obj(datafile, check_etag=True, swift_dir='/etc/swift', # Optional integrity check; it's useful, but slow. file_len = None if check_etag: - h = md5() + h = md5(usedforsecurity=False) file_len = 0 while True: data = fp.read(64 * 1024) diff --git a/swift/common/db.py b/swift/common/db.py index e06baf5c6c..48aeb5fcb5 100644 --- a/swift/common/db.py +++ b/swift/common/db.py @@ -17,7 +17,6 @@ from contextlib import contextmanager, closing import base64 -import hashlib import json import logging import os @@ -36,7 +35,7 @@ import sqlite3 from swift.common.constraints import MAX_META_COUNT, MAX_META_OVERALL_SIZE, \ check_utf8 from swift.common.utils import Timestamp, renamer, \ - mkdirs, lock_parent_directory, fallocate + mkdirs, lock_parent_directory, fallocate, md5 from swift.common.exceptions import LockTimeout from swift.common.swob import HTTPBadRequest @@ -186,7 +185,8 @@ def chexor(old, name, timestamp): """ if name is None: raise Exception('name is None!') - new = hashlib.md5(('%s-%s' % (name, timestamp)).encode('utf8')).hexdigest() + new = md5(('%s-%s' % (name, timestamp)).encode('utf8'), + usedforsecurity=False).hexdigest() return '%032x' % (int(old, 16) ^ int(new, 16)) diff --git a/swift/common/memcached.py b/swift/common/memcached.py index bc1216283a..6efe5669c5 100644 --- a/swift/common/memcached.py +++ b/swift/common/memcached.py @@ -50,13 +50,13 @@ import json import logging import time from bisect import bisect -from hashlib import md5 from eventlet.green import socket from eventlet.pools import Pool from eventlet import Timeout from six.moves import range from swift.common import utils +from swift.common.utils import md5 DEFAULT_MEMCACHED_PORT = 11211 @@ -81,7 +81,7 @@ def md5hash(key): key = key.encode('utf-8') else: key = key.encode('utf-8', errors='surrogateescape') - return md5(key).hexdigest().encode('ascii') + return md5(key, usedforsecurity=False).hexdigest().encode('ascii') def sanitize_timeout(timeout): diff --git a/swift/common/middleware/crypto/encrypter.py b/swift/common/middleware/crypto/encrypter.py index 15f92d7641..c450694b38 100644 --- a/swift/common/middleware/crypto/encrypter.py +++ b/swift/common/middleware/crypto/encrypter.py @@ -27,7 +27,7 @@ from swift.common.request_helpers import get_object_transient_sysmeta, \ from swift.common.swob import Request, Match, HTTPException, \ HTTPUnprocessableEntity, wsgi_to_bytes, bytes_to_wsgi, normalize_etag from swift.common.utils import get_logger, config_true_value, \ - MD5_OF_EMPTY_STRING + MD5_OF_EMPTY_STRING, md5 def encrypt_header_val(crypto, value, key): @@ -91,8 +91,8 @@ class EncInputWrapper(object): self.body_crypto_meta['key_id'] = self.keys['id'] self.body_crypto_ctxt = self.crypto.create_encryption_ctxt( body_key, self.body_crypto_meta.get('iv')) - self.plaintext_md5 = hashlib.md5() - self.ciphertext_md5 = hashlib.md5() + self.plaintext_md5 = md5(usedforsecurity=False) + self.ciphertext_md5 = md5(usedforsecurity=False) def install_footers_callback(self, req): # the proxy controller will call back for footer metadata after diff --git a/swift/common/middleware/dlo.py b/swift/common/middleware/dlo.py index d9222273df..5cb5223099 100644 --- a/swift/common/middleware/dlo.py +++ b/swift/common/middleware/dlo.py @@ -122,7 +122,6 @@ import json import six -from hashlib import md5 from swift.common import constraints from swift.common.exceptions import ListingIterError, SegmentError from swift.common.http import is_success @@ -131,7 +130,7 @@ from swift.common.swob import Request, Response, HTTPException, \ str_to_wsgi, wsgi_to_str, wsgi_quote, wsgi_unquote, normalize_etag from swift.common.utils import get_logger, \ RateLimitedIterator, quote, close_if_possible, closing_if_possible, \ - drain_and_close + drain_and_close, md5 from swift.common.request_helpers import SegmentedIterable, \ update_ignore_range_header from swift.common.wsgi import WSGIContext, make_subrequest, load_app_config @@ -333,7 +332,7 @@ class GetContext(WSGIContext): if have_complete_listing: response_headers = [(h, v) for h, v in response_headers if h.lower() != "etag"] - etag = md5() + etag = md5(usedforsecurity=False) for seg_dict in segments: etag.update(normalize_etag(seg_dict['hash']).encode('utf8')) response_headers.append(('Etag', '"%s"' % etag.hexdigest())) diff --git a/swift/common/middleware/s3api/controllers/multi_upload.py b/swift/common/middleware/s3api/controllers/multi_upload.py index 3ab7e78016..e7b998460d 100644 --- a/swift/common/middleware/s3api/controllers/multi_upload.py +++ b/swift/common/middleware/s3api/controllers/multi_upload.py @@ -61,7 +61,6 @@ Static Large Object when the multipart upload is completed. import binascii import copy -from hashlib import md5 import os import re import time @@ -69,7 +68,7 @@ import time import six from swift.common.swob import Range, bytes_to_wsgi, normalize_etag -from swift.common.utils import json, public, reiterate +from swift.common.utils import json, public, reiterate, md5 from swift.common.db import utf8encode from swift.common.request_helpers import get_container_update_override_key @@ -636,7 +635,7 @@ class UploadController(Controller): headers['Content-Type'] = content_type container = req.container_name + MULTIUPLOAD_SUFFIX - s3_etag_hasher = md5() + s3_etag_hasher = md5(usedforsecurity=False) manifest = [] previous_number = 0 try: @@ -646,7 +645,8 @@ class UploadController(Controller): if 'content-md5' in req.headers: # If an MD5 was provided, we need to verify it. # Note that S3Request already took care of translating to ETag - if req.headers['etag'] != md5(xml).hexdigest(): + if req.headers['etag'] != md5( + xml, usedforsecurity=False).hexdigest(): raise BadDigest(content_md5=req.headers['content-md5']) # We're only interested in the body here, in the # multipart-upload controller -- *don't* let it get diff --git a/swift/common/middleware/s3api/s3request.py b/swift/common/middleware/s3api/s3request.py index 384b645381..f3ae063076 100644 --- a/swift/common/middleware/s3api/s3request.py +++ b/swift/common/middleware/s3api/s3request.py @@ -17,7 +17,7 @@ import base64 import binascii from collections import defaultdict, OrderedDict from email.header import Header -from hashlib import sha1, sha256, md5 +from hashlib import sha1, sha256 import hmac import re import six @@ -26,7 +26,7 @@ from six.moves.urllib.parse import quote, unquote, parse_qsl import string from swift.common.utils import split_path, json, get_swift_info, \ - close_if_possible + close_if_possible, md5 from swift.common import swob from swift.common.http import HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED, \ HTTP_NO_CONTENT, HTTP_UNAUTHORIZED, HTTP_FORBIDDEN, HTTP_NOT_FOUND, \ @@ -866,7 +866,8 @@ class S3Request(swob.Request): raise InvalidRequest('Missing required header for this request: ' 'Content-MD5') - digest = base64.b64encode(md5(body).digest()).strip().decode('ascii') + digest = base64.b64encode(md5( + body, usedforsecurity=False).digest()).strip().decode('ascii') if self.environ['HTTP_CONTENT_MD5'] != digest: raise BadDigest(content_md5=self.environ['HTTP_CONTENT_MD5']) diff --git a/swift/common/middleware/slo.py b/swift/common/middleware/slo.py index e9d46b0a75..88554e1cb4 100644 --- a/swift/common/middleware/slo.py +++ b/swift/common/middleware/slo.py @@ -330,7 +330,6 @@ import json import mimetypes import re import time -from hashlib import md5 import six @@ -348,7 +347,7 @@ from swift.common.utils import get_logger, config_true_value, \ get_valid_utf8_str, override_bytes_from_content_type, split_path, \ register_swift_info, RateLimitedIterator, quote, close_if_possible, \ closing_if_possible, LRUCache, StreamingPile, strict_b64decode, \ - Timestamp, drain_and_close, get_expirer_container + Timestamp, drain_and_close, get_expirer_container, md5 from swift.common.request_helpers import SegmentedIterable, \ get_sys_meta_prefix, update_etag_is_at_header, resolve_etag_is_at_header, \ get_container_update_override_key, update_ignore_range_header @@ -927,7 +926,9 @@ class SloGetContext(WSGIContext): if header.lower() == 'content-length': new_headers.append(('Content-Length', len(json_data))) elif header.lower() == 'etag': - new_headers.append(('Etag', md5(json_data).hexdigest())) + new_headers.append( + ('Etag', md5(json_data, usedforsecurity=False) + .hexdigest())) else: new_headers.append((header, value)) self._response_headers = new_headers @@ -965,7 +966,7 @@ class SloGetContext(WSGIContext): # Prep to calculate content_length & etag if necessary if slo_etag is None: - calculated_etag = md5() + calculated_etag = md5(usedforsecurity=False) if content_length is None: calculated_content_length = 0 @@ -977,7 +978,8 @@ class SloGetContext(WSGIContext): if slo_etag is None: if 'raw_data' in seg_dict: - r = md5(seg_dict['raw_data']).hexdigest() + r = md5(seg_dict['raw_data'], + usedforsecurity=False).hexdigest() elif seg_dict.get('range'): r = '%s:%s;' % (seg_dict['hash'], seg_dict['range']) else: @@ -1347,11 +1349,11 @@ class StaticLargeObject(object): out_content_type, resp_dict, problem_segments, 'upload') return - slo_etag = md5() + slo_etag = md5(usedforsecurity=False) for seg_data in data_for_storage: if 'data' in seg_data: raw_data = base64.b64decode(seg_data['data']) - r = md5(raw_data).hexdigest() + r = md5(raw_data, usedforsecurity=False).hexdigest() elif seg_data.get('range'): r = '%s:%s;' % (seg_data['hash'], seg_data['range']) else: @@ -1386,7 +1388,7 @@ class StaticLargeObject(object): SYSMETA_SLO_ETAG: slo_etag, SYSMETA_SLO_SIZE: total_size, 'X-Static-Large-Object': 'True', - 'Etag': md5(json_data).hexdigest(), + 'Etag': md5(json_data, usedforsecurity=False).hexdigest(), }) # Ensure container listings have both etags. However, if any diff --git a/swift/common/request_helpers.py b/swift/common/request_helpers.py index 0bf4196543..8b3940fe54 100644 --- a/swift/common/request_helpers.py +++ b/swift/common/request_helpers.py @@ -20,7 +20,6 @@ Why not swift.common.utils, you ask? Because this way we can import things from swob in here without creating circular imports. """ -import hashlib import itertools import sys import time @@ -40,7 +39,7 @@ from swift.common.utils import split_path, validate_device_partition, \ close_if_possible, maybe_multipart_byteranges_to_document_iters, \ multipart_byteranges_to_document_iters, parse_content_type, \ parse_content_range, csv_append, list_from_csv, Spliterator, quote, \ - RESERVED, config_true_value + RESERVED, config_true_value, md5 from swift.common.wsgi import make_subrequest @@ -604,7 +603,7 @@ class SegmentedIterable(object): seg_hash = None if seg_resp.etag and not seg_req.headers.get('Range'): # Only calculate the MD5 if it we can use it to validate - seg_hash = hashlib.md5() + seg_hash = md5(usedforsecurity=False) document_iters = maybe_multipart_byteranges_to_document_iters( seg_resp.app_iter, diff --git a/swift/common/ring/ring.py b/swift/common/ring/ring.py index 43eac5df8f..99370fd284 100644 --- a/swift/common/ring/ring.py +++ b/swift/common/ring/ring.py @@ -22,7 +22,6 @@ from os.path import getmtime import struct from time import time import os -from hashlib import md5 from itertools import chain, count from tempfile import NamedTemporaryFile import sys @@ -32,7 +31,7 @@ import six from six.moves import range from swift.common.exceptions import RingLoadError -from swift.common.utils import hash_path, validate_configuration +from swift.common.utils import hash_path, validate_configuration, md5 from swift.common.ring.utils import tiers_for_dev @@ -53,7 +52,7 @@ class RingReader(object): self._buffer = b'' self.size = 0 self.raw_size = 0 - self._md5 = md5() + self._md5 = md5(usedforsecurity=False) self._decomp = zlib.decompressobj(32 + zlib.MAX_WBITS) @property @@ -538,7 +537,8 @@ class Ring(object): (d['region'], d['zone'], d['ip']) for d in primary_nodes) parts = len(self._replica2part2dev_id[0]) - part_hash = md5(str(part).encode('ascii')).digest() + part_hash = md5(str(part).encode('ascii'), + usedforsecurity=False).digest() start = struct.unpack_from('>I', part_hash)[0] >> self._part_shift inc = int(parts / 65536) or 1 # Multiple loops for execution speed; the checks and bookkeeping get diff --git a/swift/common/utils.py b/swift/common/utils.py index ee61835eb9..908096e00d 100644 --- a/swift/common/utils.py +++ b/swift/common/utils.py @@ -40,7 +40,7 @@ import uuid import functools import platform import email.parser -from hashlib import md5, sha1 +from hashlib import sha1 from random import random, shuffle from contextlib import contextmanager, closing import ctypes @@ -674,7 +674,10 @@ class StrAnonymizer(str): if not self: return self else: - h = getattr(hashlib, self.method)() + if self.method == 'md5': + h = md5(usedforsecurity=False) + else: + h = getattr(hashlib, self.method)() if self.salt: h.update(six.b(self.salt)) h.update(six.b(self)) @@ -2735,10 +2738,10 @@ def hash_path(account, container=None, object=None, raw_digest=False): else object.encode('utf8')) if raw_digest: return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths) - + HASH_PATH_SUFFIX).digest() + + HASH_PATH_SUFFIX, usedforsecurity=False).digest() else: return md5(HASH_PATH_PREFIX + b'/' + b'/'.join(paths) - + HASH_PATH_SUFFIX).hexdigest() + + HASH_PATH_SUFFIX, usedforsecurity=False).hexdigest() def get_zero_indexed_base_string(base, index): @@ -4858,6 +4861,31 @@ def get_md5_socket(): return md5_sockfd +try: + _test_md5 = hashlib.md5(usedforsecurity=False) # nosec + + def md5(string=b'', usedforsecurity=True): + """Return an md5 hashlib object using usedforsecurity parameter + + For python distributions that support the usedforsecurity keyword + parameter, this passes the parameter through as expected. + See https://bugs.python.org/issue9216 + """ + return hashlib.md5(string, usedforsecurity=usedforsecurity) # nosec +except TypeError: + def md5(string=b'', usedforsecurity=True): + """Return an md5 hashlib object without usedforsecurity parameter + + For python distributions that do not yet support this keyword + parameter, we drop the parameter + """ + return hashlib.md5(string) # nosec + + +def md5_factory(): + return md5(usedforsecurity=False) + + class ShardRange(object): """ A ShardRange encapsulates sharding state related to a container including @@ -4999,7 +5027,8 @@ class ShardRange(object): if not isinstance(parent_container, bytes): parent_container = parent_container.encode('utf-8') return "%s-%s-%s-%s" % (root_container, - hashlib.md5(parent_container).hexdigest(), + md5(parent_container, + usedforsecurity=False).hexdigest(), cls._to_timestamp(timestamp).internal, index) @@ -5583,7 +5612,7 @@ def md5_hash_for_file(fname): :returns: MD5 checksum, hex encoded """ with open(fname, 'rb') as f: - md5sum = md5() + md5sum = md5(usedforsecurity=False) for block in iter(lambda: f.read(MD5_BLOCK_READ_BYTES), b''): md5sum.update(block) return md5sum.hexdigest() diff --git a/swift/obj/diskfile.py b/swift/obj/diskfile.py index a1287a2477..0bb157d866 100644 --- a/swift/obj/diskfile.py +++ b/swift/obj/diskfile.py @@ -40,7 +40,6 @@ import os import re import time import uuid -from hashlib import md5 import logging import traceback import xattr @@ -66,7 +65,8 @@ from swift.common.utils import mkdirs, Timestamp, \ config_true_value, listdir, split_path, remove_file, \ get_md5_socket, F_SETPIPE_SZ, decode_timestamps, encode_timestamps, \ MD5_OF_EMPTY_STRING, link_fd_to_path, \ - O_TMPFILE, makedirs_count, replace_partition_in_path, remove_directory + O_TMPFILE, makedirs_count, replace_partition_in_path, remove_directory, \ + md5, md5_factory from swift.common.splice import splice, tee from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \ DiskFileCollision, DiskFileNoSpace, DiskFileDeviceUnavailable, \ @@ -222,14 +222,16 @@ def read_metadata(fd, add_missing_checksum=False): # exist. This is fine; it just means that this object predates the # introduction of metadata checksums. if add_missing_checksum: - new_checksum = md5(metadata).hexdigest().encode('ascii') + new_checksum = (md5(metadata, usedforsecurity=False) + .hexdigest().encode('ascii')) try: xattr.setxattr(fd, METADATA_CHECKSUM_KEY, new_checksum) except (IOError, OSError) as e: logging.error("Error adding metadata: %s" % e) if metadata_checksum: - computed_checksum = md5(metadata).hexdigest().encode('ascii') + computed_checksum = (md5(metadata, usedforsecurity=False) + .hexdigest().encode('ascii')) if metadata_checksum != computed_checksum: raise DiskFileBadMetadataChecksum( "Metadata checksum mismatch for %s: " @@ -254,7 +256,8 @@ def write_metadata(fd, metadata, xattr_size=65536): :param metadata: metadata to write """ metastr = pickle.dumps(_encode_metadata(metadata), PICKLE_PROTOCOL) - metastr_md5 = md5(metastr).hexdigest().encode('ascii') + metastr_md5 = ( + md5(metastr, usedforsecurity=False).hexdigest().encode('ascii')) key = 0 try: while metastr: @@ -1113,11 +1116,11 @@ class BaseDiskFileManager(object): :param policy: storage policy used """ if six.PY2: - hashes = defaultdict(md5) + hashes = defaultdict(md5_factory) else: class shim(object): def __init__(self): - self.md5 = md5() + self.md5 = md5(usedforsecurity=False) def update(self, s): if isinstance(s, str): @@ -1686,7 +1689,7 @@ class BaseDiskFileWriter(object): self._fd = None self._tmppath = None self._size = size - self._chunks_etag = md5() + self._chunks_etag = md5(usedforsecurity=False) self._bytes_per_sync = bytes_per_sync self._diskfile = diskfile self.next_part_power = next_part_power @@ -2003,7 +2006,7 @@ class BaseDiskFileReader(object): def _init_checks(self): if self._fp.tell() == 0: self._started_at_0 = True - self._iter_etag = md5() + self._iter_etag = md5(usedforsecurity=False) def _update_checks(self, chunk): if self._iter_etag: diff --git a/swift/obj/expirer.py b/swift/obj/expirer.py index 6f506b1d0f..1c8f9bb958 100644 --- a/swift/obj/expirer.py +++ b/swift/obj/expirer.py @@ -20,7 +20,6 @@ from time import time from os.path import join from swift import gettext_ as _ from collections import defaultdict, deque -import hashlib from eventlet import sleep, Timeout from eventlet.greenpool import GreenPool @@ -30,7 +29,7 @@ from swift.common.daemon import Daemon from swift.common.internal_client import InternalClient, UnexpectedResponse from swift.common.utils import get_logger, dump_recon_cache, split_path, \ Timestamp, config_true_value, normalize_delete_at_timestamp, \ - RateLimitedIterator + RateLimitedIterator, md5 from swift.common.http import HTTP_NOT_FOUND, HTTP_CONFLICT, \ HTTP_PRECONDITION_FAILED from swift.common.swob import wsgi_quote, str_to_wsgi @@ -218,7 +217,8 @@ class ObjectExpirer(Daemon): if not isinstance(name, bytes): name = name.encode('utf8') # md5 is only used for shuffling mod - return int(hashlib.md5(name).hexdigest(), 16) % divisor + return int(md5( + name, usedforsecurity=False).hexdigest(), 16) % divisor def iter_task_accounts_to_expire(self): """ diff --git a/swift/obj/mem_diskfile.py b/swift/obj/mem_diskfile.py index 6fdf33d7cf..fa72372fe1 100644 --- a/swift/obj/mem_diskfile.py +++ b/swift/obj/mem_diskfile.py @@ -17,7 +17,6 @@ import io import time -import hashlib from contextlib import contextmanager from eventlet import Timeout @@ -27,6 +26,7 @@ from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \ DiskFileCollision, DiskFileDeleted, DiskFileNotOpen from swift.common.request_helpers import is_sys_meta from swift.common.swob import multi_range_iterator +from swift.common.utils import md5 from swift.obj.diskfile import DATAFILE_SYSTEM_META, RESERVED_DATAFILE_META @@ -103,7 +103,7 @@ class DiskFileWriter(object): self._name = name self._fp = None self._upload_size = 0 - self._chunks_etag = hashlib.md5() + self._chunks_etag = md5(usedforsecurity=False) def open(self): """ @@ -197,7 +197,7 @@ class DiskFileReader(object): self._read_to_eof = False if self._fp.tell() == 0: self._started_at_0 = True - self._iter_etag = hashlib.md5() + self._iter_etag = md5(usedforsecurity=False) while True: chunk = self._fp.read() if chunk: diff --git a/swift/obj/server.py b/swift/obj/server.py index 73d9205020..52d6b139e9 100644 --- a/swift/obj/server.py +++ b/swift/obj/server.py @@ -26,7 +26,6 @@ import traceback import socket import math from swift import gettext_ as _ -from hashlib import md5 from eventlet import sleep, wsgi, Timeout, tpool from eventlet.greenthread import spawn @@ -37,7 +36,7 @@ from swift.common.utils import public, get_logger, \ get_expirer_container, parse_mime_headers, \ iter_multipart_mime_documents, extract_swift_bytes, safe_json_loads, \ config_auto_int_value, split_path, get_redirect_data, \ - normalize_timestamp + normalize_timestamp, md5 from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_object_creation, \ valid_timestamp, check_utf8, AUTO_CREATE_ACCOUNT_PREFIX @@ -583,7 +582,7 @@ class ObjectController(BaseStorageServer): footer_md5 = footer_hdrs.get('Content-MD5') if not footer_md5: raise HTTPBadRequest(body="no Content-MD5 in footer") - if footer_md5 != md5(footer_body).hexdigest(): + if footer_md5 != md5(footer_body, usedforsecurity=False).hexdigest(): raise HTTPUnprocessableEntity(body="footer MD5 mismatch") try: diff --git a/swift/proxy/controllers/obj.py b/swift/proxy/controllers/obj.py index 67b9a8b760..549245a44c 100644 --- a/swift/proxy/controllers/obj.py +++ b/swift/proxy/controllers/obj.py @@ -35,7 +35,6 @@ import mimetypes import time import math import random -from hashlib import md5 import sys from greenlet import GreenletExit @@ -49,7 +48,8 @@ from swift.common.utils import ( GreenAsyncPile, GreenthreadSafeIterator, Timestamp, WatchdogTimeout, normalize_delete_at_timestamp, public, get_expirer_container, document_iters_to_http_response_body, parse_content_range, - quorum_size, reiterate, close_if_possible, safe_json_loads) + quorum_size, reiterate, close_if_possible, safe_json_loads, md5, + md5_factory) from swift.common.bufferedhttp import http_connect from swift.common.constraints import check_metadata, check_object_creation from swift.common import constraints @@ -1784,7 +1784,8 @@ class MIMEPutter(Putter): self._start_object_data() footer_body = json.dumps(footer_metadata).encode('ascii') - footer_md5 = md5(footer_body).hexdigest().encode('ascii') + footer_md5 = md5( + footer_body, usedforsecurity=False).hexdigest().encode('ascii') tail_boundary = (b"--%s" % (self.mime_boundary,)) if not self.multiphase: @@ -3178,7 +3179,7 @@ class ECObjectController(BaseObjectController): bytes_transferred = 0 chunk_transform = chunk_transformer(policy) chunk_transform.send(None) - frag_hashers = collections.defaultdict(md5) + frag_hashers = collections.defaultdict(md5_factory) def send_chunk(chunk): # Note: there's two different hashers in here. etag_hasher is @@ -3411,7 +3412,7 @@ class ECObjectController(BaseObjectController): # the same as the request body sent proxy -> object, we # can't rely on the object-server to do the etag checking - # so we have to do it here. - etag_hasher = md5() + etag_hasher = md5(usedforsecurity=False) min_conns = policy.quorum putters = self._get_put_connections( diff --git a/test/functional/s3api/test_multi_upload.py b/test/functional/s3api/test_multi_upload.py index b2aa006a28..3319e8b1ac 100644 --- a/test/functional/s3api/test_multi_upload.py +++ b/test/functional/s3api/test_multi_upload.py @@ -23,13 +23,13 @@ import boto # pylint: disable-msg=E0611,F0401 from distutils.version import StrictVersion -from hashlib import md5 from six.moves import zip, zip_longest import test.functional as tf from swift.common.middleware.s3api.etree import fromstring, tostring, \ Element, SubElement from swift.common.middleware.s3api.utils import mktime +from swift.common.utils import md5 from test.functional.s3api import S3ApiBase from test.functional.s3api.s3_test_client import Connection @@ -180,7 +180,7 @@ class TestS3ApiMultiUpload(S3ApiBase): # Upload Part key, upload_id = uploads[0] content = b'a' * self.min_segment_size - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() status, headers, body = \ self._upload_part(bucket, key, upload_id, content) self.assertEqual(status, 200) @@ -196,7 +196,7 @@ class TestS3ApiMultiUpload(S3ApiBase): src_bucket = 'bucket2' src_obj = 'obj3' src_content = b'b' * self.min_segment_size - etag = md5(src_content).hexdigest() + etag = md5(src_content, usedforsecurity=False).hexdigest() # prepare src obj self.conn.make_request('PUT', src_bucket) @@ -312,7 +312,8 @@ class TestS3ApiMultiUpload(S3ApiBase): concatted_etags = b''.join( etag.strip('"').encode('ascii') for etag in etags) exp_etag = '"%s-%s"' % ( - md5(binascii.unhexlify(concatted_etags)).hexdigest(), len(etags)) + md5(binascii.unhexlify(concatted_etags), + usedforsecurity=False).hexdigest(), len(etags)) etag = elem.find('ETag').text self.assertEqual(etag, exp_etag) @@ -324,7 +325,8 @@ class TestS3ApiMultiUpload(S3ApiBase): self.assertEqual(headers['content-type'], 'foo/bar') self.assertEqual(headers['x-amz-meta-baz'], 'quux') - swift_etag = '"%s"' % md5(concatted_etags).hexdigest() + swift_etag = '"%s"' % md5( + concatted_etags, usedforsecurity=False).hexdigest() # TODO: GET via swift api, check against swift_etag # Should be safe to retry @@ -375,7 +377,7 @@ class TestS3ApiMultiUpload(S3ApiBase): self.assertIsNotNone(last_modified) exp_content = b'a' * self.min_segment_size - etag = md5(exp_content).hexdigest() + etag = md5(exp_content, usedforsecurity=False).hexdigest() self.assertEqual(resp_etag, etag) # Also check that the etag is correct in part listings @@ -858,7 +860,9 @@ class TestS3ApiMultiUpload(S3ApiBase): src_content = b'y' * (self.min_segment_size // 2) + b'z' * \ self.min_segment_size src_range = 'bytes=0-%d' % (self.min_segment_size - 1) - etag = md5(src_content[:self.min_segment_size]).hexdigest() + etag = md5( + src_content[:self.min_segment_size], + usedforsecurity=False).hexdigest() # prepare src obj self.conn.make_request('PUT', src_bucket) @@ -951,7 +955,7 @@ class TestS3ApiMultiUpload(S3ApiBase): src_obj = 'obj4' src_content = b'y' * (self.min_segment_size // 2) + b'z' * \ self.min_segment_size - etags = [md5(src_content).hexdigest()] + etags = [md5(src_content, usedforsecurity=False).hexdigest()] # prepare null-version src obj self.conn.make_request('PUT', src_bucket) @@ -969,7 +973,7 @@ class TestS3ApiMultiUpload(S3ApiBase): src_obj2 = 'obj5' src_content2 = b'stub' - etags.append(md5(src_content2).hexdigest()) + etags.append(md5(src_content2, usedforsecurity=False).hexdigest()) # prepare src obj w/ real version self.conn.make_request('PUT', src_bucket, src_obj2, body=src_content2) @@ -1098,7 +1102,7 @@ class TestS3ApiMultiUploadSigV4(TestS3ApiMultiUpload): # Complete Multipart Upload key, upload_id = uploads[0] - etags = [md5(content).hexdigest()] + etags = [md5(content, usedforsecurity=False).hexdigest()] xml = self._gen_comp_xml(etags) status, headers, body = \ self._complete_multi_upload(bucket, key, upload_id, xml) diff --git a/test/functional/s3api/test_object.py b/test/functional/s3api/test_object.py index 8079c157bd..6e09c3af52 100644 --- a/test/functional/s3api/test_object.py +++ b/test/functional/s3api/test_object.py @@ -24,13 +24,13 @@ from distutils.version import StrictVersion import email.parser from email.utils import formatdate, parsedate from time import mktime -from hashlib import md5 import six from six.moves.urllib.parse import quote import test.functional as tf from swift.common.middleware.s3api.etree import fromstring +from swift.common.utils import md5 from test.functional.s3api import S3ApiBase from test.functional.s3api.s3_test_client import Connection @@ -61,7 +61,7 @@ class TestS3ApiObject(S3ApiBase): def test_object(self): obj = 'object name with %-sign' content = b'abc123' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() # PUT Object status, headers, body = \ @@ -252,7 +252,7 @@ class TestS3ApiObject(S3ApiBase): def test_put_object_content_encoding(self): obj = 'object' - etag = md5().hexdigest() + etag = md5(usedforsecurity=False).hexdigest() headers = {'Content-Encoding': 'gzip'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers) @@ -267,7 +267,7 @@ class TestS3ApiObject(S3ApiBase): def test_put_object_content_md5(self): obj = 'object' content = b'abcdefghij' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() headers = {'Content-MD5': calculate_md5(content)} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) @@ -278,7 +278,7 @@ class TestS3ApiObject(S3ApiBase): def test_put_object_content_type(self): obj = 'object' content = b'abcdefghij' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() headers = {'Content-Type': 'text/plain'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) @@ -320,7 +320,7 @@ class TestS3ApiObject(S3ApiBase): def test_put_object_expect(self): obj = 'object' content = b'abcdefghij' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() headers = {'Expect': '100-continue'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) @@ -333,7 +333,7 @@ class TestS3ApiObject(S3ApiBase): expected_headers = req_headers obj = 'object' content = b'abcdefghij' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, req_headers, content) @@ -389,7 +389,7 @@ class TestS3ApiObject(S3ApiBase): def test_put_object_storage_class(self): obj = 'object' content = b'abcdefghij' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() headers = {'X-Amz-Storage-Class': 'STANDARD'} status, headers, body = \ self.conn.make_request('PUT', self.bucket, obj, headers, content) @@ -435,7 +435,7 @@ class TestS3ApiObject(S3ApiBase): def test_put_object_copy_source(self): obj = 'object' content = b'abcdefghij' - etag = md5(content).hexdigest() + etag = md5(content, usedforsecurity=False).hexdigest() self.conn.make_request('PUT', self.bucket, obj, body=content) dst_bucket = 'dst-bucket' @@ -521,7 +521,7 @@ class TestS3ApiObject(S3ApiBase): obj = 'object' dst_bucket = 'dst-bucket' dst_obj = 'dst_object' - etag = md5().hexdigest() + etag = md5(usedforsecurity=False).hexdigest() self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', dst_bucket) @@ -541,7 +541,7 @@ class TestS3ApiObject(S3ApiBase): obj = 'object' dst_bucket = 'dst-bucket' dst_obj = 'dst_object' - etag = md5().hexdigest() + etag = md5(usedforsecurity=False).hexdigest() self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', dst_bucket) @@ -561,7 +561,7 @@ class TestS3ApiObject(S3ApiBase): obj = 'object' dst_bucket = 'dst-bucket' dst_obj = 'dst_object' - etag = md5().hexdigest() + etag = md5(usedforsecurity=False).hexdigest() self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', dst_bucket) @@ -580,7 +580,7 @@ class TestS3ApiObject(S3ApiBase): obj = 'object' dst_bucket = 'dst-bucket' dst_obj = 'dst_object' - etag = md5().hexdigest() + etag = md5(usedforsecurity=False).hexdigest() self.conn.make_request('PUT', self.bucket, obj) self.conn.make_request('PUT', dst_bucket) diff --git a/test/functional/s3api/utils.py b/test/functional/s3api/utils.py index 5a979fdcb8..e8fde39541 100644 --- a/test/functional/s3api/utils.py +++ b/test/functional/s3api/utils.py @@ -14,8 +14,8 @@ # limitations under the License. from base64 import b64encode -from hashlib import md5 from swift.common.middleware.s3api.etree import fromstring +from swift.common.utils import md5 def get_error_code(body): @@ -29,4 +29,5 @@ def get_error_msg(body): def calculate_md5(body): - return b64encode(md5(body).digest()).strip().decode('ascii') + return b64encode( + md5(body, usedforsecurity=False).digest()).strip().decode('ascii') diff --git a/test/functional/swift_test_client.py b/test/functional/swift_test_client.py index b5ae465612..7b119d8a0a 100644 --- a/test/functional/swift_test_client.py +++ b/test/functional/swift_test_client.py @@ -13,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import hashlib import io import json import os @@ -33,7 +32,7 @@ from swiftclient import get_auth from swift.common import constraints from swift.common.http import is_success from swift.common.swob import str_to_wsgi, wsgi_to_str -from swift.common.utils import config_true_value +from swift.common.utils import config_true_value, md5 from test import safe_repr @@ -851,7 +850,7 @@ class File(Base): if isinstance(data, bytes): data = io.BytesIO(data) - checksum = hashlib.md5() + checksum = md5(usedforsecurity=False) buff = data.read(block_size) while buff: checksum.update(buff) @@ -1058,7 +1057,7 @@ class File(Base): raise ResponseError(self.conn.response, 'GET', self.conn.make_path(self.path)) - checksum = hashlib.md5() + checksum = md5(usedforsecurity=False) scratch = self.conn.response.read(8192) while len(scratch) > 0: diff --git a/test/functional/test_object.py b/test/functional/test_object.py index 6145d4a98e..768de19c05 100644 --- a/test/functional/test_object.py +++ b/test/functional/test_object.py @@ -16,7 +16,6 @@ # limitations under the License. import datetime -import hashlib import json import unittest from uuid import uuid4 @@ -29,6 +28,7 @@ from six.moves import range from test.functional import check_response, retry, requires_acls, \ requires_policies, SkipTest, requires_bulk import test.functional as tf +from swift.common.utils import md5 def setUpModule(): @@ -1741,7 +1741,7 @@ class TestObject(unittest.TestCase): expect_quoted = tf.cluster_info.get('etag_quoter', {}).get( 'enable_by_default', False) - expected_etag = hashlib.md5(b'test').hexdigest() + expected_etag = md5(b'test', usedforsecurity=False).hexdigest() if expect_quoted: expected_etag = '"%s"' % expected_etag self.assertEqual(resp.headers['etag'], expected_etag) diff --git a/test/functional/test_object_versioning.py b/test/functional/test_object_versioning.py index d7db187c08..a6ac9027d4 100644 --- a/test/functional/test_object_versioning.py +++ b/test/functional/test_object_versioning.py @@ -21,13 +21,12 @@ import time import six from copy import deepcopy -from hashlib import md5 from six.moves.urllib.parse import quote, unquote import test.functional as tf from swift.common.swob import normalize_etag -from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value +from swift.common.utils import MD5_OF_EMPTY_STRING, config_true_value, md5 from swift.common.middleware.versioned_writes.object_versioning import \ DELETE_MARKER_CONTENT_TYPE @@ -338,7 +337,9 @@ class TestObjectVersioning(TestObjectVersioningBase): obj = self.env.unversioned_container.file(oname) resp = obj.write(body, return_resp=True) etag = resp.getheader('etag') - self.assertEqual(md5(body).hexdigest(), normalize_etag(etag)) + self.assertEqual( + md5(body, usedforsecurity=False).hexdigest(), + normalize_etag(etag)) # un-versioned object is cool with with if-match self.assertEqual(body, obj.read(hdrs={'if-match': etag})) @@ -569,7 +570,7 @@ class TestObjectVersioning(TestObjectVersioningBase): 'name': obj_name, 'content_type': version['content_type'], 'version_id': version['version_id'], - 'hash': md5(version['body']).hexdigest(), + 'hash': md5(version['body'], usedforsecurity=False).hexdigest(), 'bytes': len(version['body'],) } for version in reversed(versions)] for item, is_latest in zip(expected, (True, False, False)): @@ -1263,14 +1264,14 @@ class TestContainerOperations(TestObjectVersioningBase): # v1 resp = obj.write(b'version1', hdrs={ 'Content-Type': 'text/jibberish11', - 'ETag': md5(b'version1').hexdigest(), + 'ETag': md5(b'version1', usedforsecurity=False).hexdigest(), }, return_resp=True) obj1_v1['id'] = resp.getheader('x-object-version-id') # v2 resp = obj.write(b'version2', hdrs={ 'Content-Type': 'text/jibberish12', - 'ETag': md5(b'version2').hexdigest(), + 'ETag': md5(b'version2', usedforsecurity=False).hexdigest(), }, return_resp=True) obj1_v2 = {} obj1_v2['name'] = obj1_v1['name'] @@ -1279,7 +1280,7 @@ class TestContainerOperations(TestObjectVersioningBase): # v3 resp = obj.write(b'version3', hdrs={ 'Content-Type': 'text/jibberish13', - 'ETag': md5(b'version3').hexdigest(), + 'ETag': md5(b'version3', usedforsecurity=False).hexdigest(), }, return_resp=True) obj1_v3 = {} obj1_v3['name'] = obj1_v1['name'] @@ -1333,20 +1334,20 @@ class TestContainerOperations(TestObjectVersioningBase): obj = self.env.unversioned_container.file(objs[0]) obj.write(b'data', hdrs={ 'Content-Type': 'text/jibberish11', - 'ETag': md5(b'data').hexdigest(), + 'ETag': md5(b'data', usedforsecurity=False).hexdigest(), }) obj.delete() obj = self.env.unversioned_container.file(objs[1]) obj.write(b'first', hdrs={ 'Content-Type': 'text/blah-blah-blah', - 'ETag': md5(b'first').hexdigest(), + 'ETag': md5(b'first', usedforsecurity=False).hexdigest(), }) obj = self.env.unversioned_container.file(objs[2]) obj.write(b'second', hdrs={ 'Content-Type': 'text/plain', - 'ETag': md5(b'second').hexdigest(), + 'ETag': md5(b'second', usedforsecurity=False).hexdigest(), }) return objs @@ -1385,21 +1386,21 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v3['name'], 'bytes': 8, 'content_type': 'text/jibberish13', - 'hash': md5(b'version3').hexdigest(), + 'hash': md5(b'version3', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v3['id'], }, { 'name': obj1_v2['name'], 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v2['id'], }, { 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v1['id'], }]) @@ -1418,21 +1419,21 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v1['id'], }, { 'name': obj1_v2['name'], 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v2['id'], }, { 'name': obj1_v3['name'], 'bytes': 8, 'content_type': 'text/jibberish13', - 'hash': md5(b'version3').hexdigest(), + 'hash': md5(b'version3', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v3['id'], }, { @@ -1481,21 +1482,21 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v3['name'], 'bytes': 8, 'content_type': 'text/jibberish13', - 'hash': md5(b'version3').hexdigest(), + 'hash': md5(b'version3', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v3['id'], }, { 'name': obj1_v2['name'], 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v2['id'], }, { 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v1['id'], }]) @@ -1516,21 +1517,21 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v1['id'], }, { 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v2['id'], }, { 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish13', - 'hash': md5(b'version3').hexdigest(), + 'hash': md5(b'version3', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v3['id'], }, { @@ -1601,7 +1602,7 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v3['name'], 'bytes': 8, 'content_type': 'text/jibberish13', - 'hash': md5(b'version3').hexdigest(), + 'hash': md5(b'version3', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v3['id'], }]) @@ -1623,14 +1624,14 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v2['name'], 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v2['id'], }, { 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v1['id'], }]) @@ -1690,14 +1691,14 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj1_v2['name'], 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v2['id'], }, { 'name': obj1_v1['name'], 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj1_v1['id'], }]) @@ -2052,7 +2053,7 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj2, 'bytes': 5, 'content_type': 'text/blah-blah-blah', - 'hash': md5(b'first').hexdigest(), + 'hash': md5(b'first', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': 'null', } @@ -2060,7 +2061,7 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj3, 'bytes': 6, 'content_type': 'text/plain', - 'hash': md5(b'second').hexdigest(), + 'hash': md5(b'second', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': 'null', } @@ -2112,14 +2113,14 @@ class TestContainerOperations(TestObjectVersioningBase): # v1 resp = obj.write(b'version1', hdrs={ 'Content-Type': 'text/jibberish11', - 'ETag': md5(b'version1').hexdigest(), + 'ETag': md5(b'version1', usedforsecurity=False).hexdigest(), }, return_resp=True) obj_v1 = resp.getheader('x-object-version-id') # v2 resp = obj.write(b'version2', hdrs={ 'Content-Type': 'text/jibberish12', - 'ETag': md5(b'version2').hexdigest(), + 'ETag': md5(b'version2', usedforsecurity=False).hexdigest(), }, return_resp=True) obj_v2 = resp.getheader('x-object-version-id') @@ -2130,7 +2131,7 @@ class TestContainerOperations(TestObjectVersioningBase): resp = obj.write(b'version4', hdrs={ 'Content-Type': 'text/jibberish14', - 'ETag': md5(b'version4').hexdigest(), + 'ETag': md5(b'version4', usedforsecurity=False).hexdigest(), }, return_resp=True) obj_v4 = resp.getheader('x-object-version-id') @@ -2143,7 +2144,7 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish14', - 'hash': md5(b'version4').hexdigest(), + 'hash': md5(b'version4', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': obj_v4, }, { @@ -2157,14 +2158,14 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj_v2, }, { 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj_v1, }]) @@ -2175,7 +2176,7 @@ class TestContainerOperations(TestObjectVersioningBase): # v5 - non-versioned obj.write(b'version5', hdrs={ 'Content-Type': 'text/jibberish15', - 'ETag': md5(b'version5').hexdigest(), + 'ETag': md5(b'version5', usedforsecurity=False).hexdigest(), }) listing_parms = {'format': 'json', 'versions': None} @@ -2187,14 +2188,14 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish15', - 'hash': md5(b'version5').hexdigest(), + 'hash': md5(b'version5', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': 'null', }, { 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish14', - 'hash': md5(b'version4').hexdigest(), + 'hash': md5(b'version4', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj_v4, }, { @@ -2208,14 +2209,14 @@ class TestContainerOperations(TestObjectVersioningBase): 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj_v2, }, { 'name': obj.name, 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': False, 'version_id': obj_v1, }]) @@ -2496,19 +2497,19 @@ class TestVersionsLocationWithVersioning(TestObjectVersioningBase): # v1 obj.write(b'version1', hdrs={ 'Content-Type': 'text/jibberish11', - 'ETag': md5(b'version1').hexdigest(), + 'ETag': md5(b'version1', usedforsecurity=False).hexdigest(), }) # v2 obj.write(b'version2', hdrs={ 'Content-Type': 'text/jibberish12', - 'ETag': md5(b'version2').hexdigest(), + 'ETag': md5(b'version2', usedforsecurity=False).hexdigest(), }) # v3 obj.write(b'version3', hdrs={ 'Content-Type': 'text/jibberish13', - 'ETag': md5(b'version3').hexdigest(), + 'ETag': md5(b'version3', usedforsecurity=False).hexdigest(), }) return obj @@ -2526,7 +2527,7 @@ class TestVersionsLocationWithVersioning(TestObjectVersioningBase): 'name': obj_name, 'bytes': 8, 'content_type': 'text/jibberish13', - 'hash': md5(b'version3').hexdigest(), + 'hash': md5(b'version3', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': 'null' }]) @@ -2543,13 +2544,13 @@ class TestVersionsLocationWithVersioning(TestObjectVersioningBase): self.assertEqual(prev_versions, [{ 'bytes': 8, 'content_type': 'text/jibberish11', - 'hash': md5(b'version1').hexdigest(), + 'hash': md5(b'version1', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': 'null', }, { 'bytes': 8, 'content_type': 'text/jibberish12', - 'hash': md5(b'version2').hexdigest(), + 'hash': md5(b'version2', usedforsecurity=False).hexdigest(), 'is_latest': True, 'version_id': 'null' }]) diff --git a/test/functional/test_slo.py b/test/functional/test_slo.py index 8003a2d70a..178e12680c 100644 --- a/test/functional/test_slo.py +++ b/test/functional/test_slo.py @@ -16,7 +16,6 @@ import base64 import email.parser -import hashlib import itertools import json from copy import deepcopy @@ -24,6 +23,7 @@ from copy import deepcopy import six from swift.common.swob import normalize_etag +from swift.common.utils import md5 import test.functional as tf from test.functional import cluster_info, SkipTest @@ -124,9 +124,9 @@ class TestSloEnv(BaseEnv): cd_json = json.dumps([ seg_info['seg_c'], seg_info['seg_d']]).encode('ascii') file_item.write(cd_json, parms={'multipart-manifest': 'put'}) - cd_etag = hashlib.md5(( + cd_etag = md5(( seg_info['seg_c']['etag'] + seg_info['seg_d']['etag'] - ).encode('ascii')).hexdigest() + ).encode('ascii'), usedforsecurity=False).hexdigest() file_item = cls.container.file("manifest-bcd-submanifest") file_item.write( @@ -137,8 +137,9 @@ class TestSloEnv(BaseEnv): 'path': '/%s/%s' % (cls.container.name, 'manifest-cd')}]).encode('ascii'), parms={'multipart-manifest': 'put'}) - bcd_submanifest_etag = hashlib.md5(( - seg_info['seg_b']['etag'] + cd_etag).encode('ascii')).hexdigest() + bcd_submanifest_etag = md5(( + seg_info['seg_b']['etag'] + cd_etag).encode('ascii'), + usedforsecurity=False).hexdigest() file_item = cls.container.file("manifest-abcde-submanifest") file_item.write( @@ -152,9 +153,10 @@ class TestSloEnv(BaseEnv): 'manifest-bcd-submanifest')}, seg_info['seg_e']]).encode('ascii'), parms={'multipart-manifest': 'put'}) - abcde_submanifest_etag = hashlib.md5(( + abcde_submanifest_etag = md5(( seg_info['seg_a']['etag'] + bcd_submanifest_etag + - seg_info['seg_e']['etag']).encode('ascii')).hexdigest() + seg_info['seg_e']['etag']).encode('ascii'), + usedforsecurity=False).hexdigest() abcde_submanifest_size = (seg_info['seg_a']['size_bytes'] + seg_info['seg_b']['size_bytes'] + seg_info['seg_c']['size_bytes'] + @@ -180,11 +182,11 @@ class TestSloEnv(BaseEnv): 'manifest-abcde-submanifest'), 'range': '3145727-3145728'}]).encode('ascii'), # 'cd' parms={'multipart-manifest': 'put'}) - ranged_manifest_etag = hashlib.md5(( + ranged_manifest_etag = md5(( abcde_submanifest_etag + ':3145727-4194304;' + abcde_submanifest_etag + ':524288-1572863;' + abcde_submanifest_etag + ':3145727-3145728;' - ).encode('ascii')).hexdigest() + ).encode('ascii'), usedforsecurity=False).hexdigest() ranged_manifest_size = 2 * 1024 * 1024 + 4 file_item = cls.container.file("ranged-submanifest") @@ -263,12 +265,13 @@ class TestSlo(Base): "Expected slo_enabled to be True/False, got %r" % (self.env.slo_enabled,)) - manifest_abcde_hash = hashlib.md5() + manifest_abcde_hash = md5(usedforsecurity=False) for letter in (b'a', b'b', b'c', b'd'): - manifest_abcde_hash.update(hashlib.md5( - letter * 1024 * 1024).hexdigest().encode('ascii')) - manifest_abcde_hash.update(hashlib.md5( - b'e').hexdigest().encode('ascii')) + manifest_abcde_hash.update( + md5(letter * 1024 * 1024, usedforsecurity=False) + .hexdigest().encode('ascii')) + manifest_abcde_hash.update( + md5(b'e', usedforsecurity=False).hexdigest().encode('ascii')) self.manifest_abcde_etag = manifest_abcde_hash.hexdigest() def test_slo_get_simple_manifest(self): @@ -501,7 +504,7 @@ class TestSlo(Base): def test_slo_etag_is_quote_wrapped_hash_of_etags_submanifests(self): def hd(x): - return hashlib.md5(x).hexdigest().encode('ascii') + return md5(x, usedforsecurity=False).hexdigest().encode('ascii') expected_etag = hd(hd(b'a' * 1024 * 1024) + hd(hd(b'b' * 1024 * 1024) + @@ -534,7 +537,9 @@ class TestSlo(Base): file_item.write( json.dumps([{ 'size_bytes': 1024 * 1024 - 1, - 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'etag': md5( + b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'), }]).encode('ascii'), parms={'multipart-manifest': 'put'}) @@ -549,7 +554,8 @@ class TestSlo(Base): file_item.write( json.dumps([{ 'size_bytes': 1024 * 1024, - 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'etag': md5(b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'), }]).encode('ascii'), parms={'multipart-manifest': 'put'}, @@ -559,8 +565,8 @@ class TestSlo(Base): def test_slo_client_etag(self): file_item = self.env.container.file("manifest-a-b-etag") - etag_a = hashlib.md5(b'a' * 1024 * 1024).hexdigest() - etag_b = hashlib.md5(b'b' * 1024 * 1024).hexdigest() + etag_a = md5(b'a' * 1024 * 1024, usedforsecurity=False).hexdigest() + etag_b = md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest() file_item.write( json.dumps([{ 'size_bytes': 1024 * 1024, @@ -571,7 +577,8 @@ class TestSlo(Base): 'path': '/%s/%s' % (self.env.container.name, 'seg_b'), }]).encode('ascii'), parms={'multipart-manifest': 'put'}, - hdrs={'Etag': hashlib.md5((etag_a + etag_b).encode()).hexdigest()}) + hdrs={'Etag': md5((etag_a + etag_b).encode(), + usedforsecurity=False).hexdigest()}) self.assert_status(201) def test_slo_unspecified_etag(self): @@ -590,7 +597,8 @@ class TestSlo(Base): file_item.write( json.dumps([{ 'size_bytes': None, - 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'etag': md5(b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'), }]).encode('ascii'), parms={'multipart-manifest': 'put'}) @@ -622,7 +630,8 @@ class TestSlo(Base): file_item = self.env.container.file("manifest-a-missing-size") file_item.write( json.dumps([{ - 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'etag': md5(b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_a'), }]).encode('ascii'), parms={'multipart-manifest': 'put'}) @@ -642,7 +651,8 @@ class TestSlo(Base): try: file_item.write( json.dumps([{ - 'teag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'teag': md5(b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'size_bytes': 1024 * 1024, 'path': '/%s/%s' % (self.env.container.name, 'seg_a'), }]).encode('ascii'), @@ -657,7 +667,8 @@ class TestSlo(Base): try: file_item.write( json.dumps([{ - 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'etag': md5(b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'siz_bytes': 1024 * 1024, 'path': '/%s/%s' % (self.env.container.name, 'seg_a'), }]).encode('ascii'), @@ -673,13 +684,16 @@ class TestSlo(Base): file_item.write( json.dumps([ {'size_bytes': 1024 * 1024, - 'etag': hashlib.md5(b'a' * 1024 * 1024).hexdigest(), + 'etag': md5(b'a' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_a')}, {'size_bytes': 1024 * 1024, - 'etag': hashlib.md5(b'b' * 1024 * 1024).hexdigest(), + 'etag': md5(b'b' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_b')}, {'size_bytes': 1024 * 1024, - 'etag': hashlib.md5(b'c' * 1024 * 1024).hexdigest(), + 'etag': md5(b'c' * 1024 * 1024, + usedforsecurity=False).hexdigest(), 'path': '/%s/%s' % (self.env.container.name, 'seg_c')}, ]).encode('ascii'), parms={'multipart-manifest': 'put'}) @@ -722,7 +736,7 @@ class TestSlo(Base): source.initialize(parms={'multipart-manifest': 'get'}) source_contents = source.read(parms={'multipart-manifest': 'get'}) source_json = json.loads(source_contents) - manifest_etag = hashlib.md5(source_contents).hexdigest() + manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest() if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'): manifest_etag = '"%s"' % manifest_etag self.assertEqual(manifest_etag, source.etag) @@ -798,7 +812,7 @@ class TestSlo(Base): source.initialize(parms={'multipart-manifest': 'get'}) source_contents = source.read(parms={'multipart-manifest': 'get'}) source_json = json.loads(source_contents) - manifest_etag = hashlib.md5(source_contents).hexdigest() + manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest() if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'): manifest_etag = '"%s"' % manifest_etag self.assertEqual(manifest_etag, source.etag) @@ -1091,16 +1105,18 @@ class TestSlo(Base): self.assertEqual(len(value), 2) self.assertEqual(value[0]['bytes'], 1024 * 1024) - self.assertEqual(value[0]['hash'], - hashlib.md5(b'd' * 1024 * 1024).hexdigest()) + self.assertEqual( + value[0]['hash'], + md5(b'd' * 1024 * 1024, usedforsecurity=False).hexdigest()) expected_name = '/%s/seg_d' % self.env.container.name if six.PY2: expected_name = expected_name.decode("utf-8") self.assertEqual(value[0]['name'], expected_name) self.assertEqual(value[1]['bytes'], 1024 * 1024) - self.assertEqual(value[1]['hash'], - hashlib.md5(b'b' * 1024 * 1024).hexdigest()) + self.assertEqual( + value[1]['hash'], + md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest()) expected_name = '/%s/seg_b' % self.env.container.name if six.PY2: expected_name = expected_name.decode("utf-8") @@ -1110,7 +1126,8 @@ class TestSlo(Base): manifest = self.env.container.file("manifest-db") got_body = manifest.read(parms={'multipart-manifest': 'get', 'format': 'raw'}) - self.assert_etag(hashlib.md5(got_body).hexdigest()) + self.assert_etag( + md5(got_body, usedforsecurity=False).hexdigest()) # raw format should have the actual manifest object content-type self.assertEqual('application/octet-stream', manifest.content_type) @@ -1124,15 +1141,17 @@ class TestSlo(Base): set(value[0].keys()), set(('size_bytes', 'etag', 'path'))) self.assertEqual(len(value), 2) self.assertEqual(value[0]['size_bytes'], 1024 * 1024) - self.assertEqual(value[0]['etag'], - hashlib.md5(b'd' * 1024 * 1024).hexdigest()) + self.assertEqual( + value[0]['etag'], + md5(b'd' * 1024 * 1024, usedforsecurity=False).hexdigest()) expected_name = '/%s/seg_d' % self.env.container.name if six.PY2: expected_name = expected_name.decode("utf-8") self.assertEqual(value[0]['path'], expected_name) self.assertEqual(value[1]['size_bytes'], 1024 * 1024) - self.assertEqual(value[1]['etag'], - hashlib.md5(b'b' * 1024 * 1024).hexdigest()) + self.assertEqual( + value[1]['etag'], + md5(b'b' * 1024 * 1024, usedforsecurity=False).hexdigest()) expected_name = '/%s/seg_b' % self.env.container.name if six.PY2: expected_name = expected_name.decode("utf-8") diff --git a/test/functional/test_symlink.py b/test/functional/test_symlink.py index 1b6ec820f9..372d2b3a83 100755 --- a/test/functional/test_symlink.py +++ b/test/functional/test_symlink.py @@ -26,7 +26,7 @@ from uuid import uuid4 from swift.common.http import is_success from swift.common.swob import normalize_etag -from swift.common.utils import json, MD5_OF_EMPTY_STRING +from swift.common.utils import json, MD5_OF_EMPTY_STRING, md5 from swift.common.middleware.slo import SloGetContext from test.functional import check_response, retry, requires_acls, \ cluster_info, SkipTest @@ -1798,11 +1798,13 @@ class TestSymlinkToSloSegments(Base): self.fail('Failed to find manifest file in container listing') def test_slo_etag_is_hash_of_etags(self): - expected_hash = hashlib.md5() - expected_hash.update(hashlib.md5( - b'a' * 1024 * 1024).hexdigest().encode('ascii')) - expected_hash.update(hashlib.md5( - b'b' * 1024 * 1024).hexdigest().encode('ascii')) + expected_hash = md5(usedforsecurity=False) + expected_hash.update(( + md5(b'a' * 1024 * 1024, usedforsecurity=False) + .hexdigest().encode('ascii'))) + expected_hash.update(( + md5(b'b' * 1024 * 1024, usedforsecurity=False) + .hexdigest().encode('ascii'))) expected_etag = expected_hash.hexdigest() file_item = self.env.container.file('manifest-linkto-ab') @@ -1823,7 +1825,7 @@ class TestSymlinkToSloSegments(Base): source = self.env.container.file("manifest-linkto-ab") source_contents = source.read(parms={'multipart-manifest': 'get'}) source_json = json.loads(source_contents) - manifest_etag = hashlib.md5(source_contents).hexdigest() + manifest_etag = md5(source_contents, usedforsecurity=False).hexdigest() if tf.cluster_info.get('etag_quoter', {}).get('enable_by_default'): manifest_etag = '"%s"' % manifest_etag diff --git a/test/functional/tests.py b/test/functional/tests.py index 51b4c663f8..ff3cf5c934 100644 --- a/test/functional/tests.py +++ b/test/functional/tests.py @@ -15,7 +15,6 @@ # limitations under the License. from datetime import datetime -import hashlib import io import locale import random @@ -28,6 +27,7 @@ from copy import deepcopy import eventlet from swift.common.http import is_success, is_client_error from swift.common.swob import normalize_etag +from swift.common.utils import md5 from email.utils import parsedate if six.PY2: @@ -1371,7 +1371,8 @@ class TestFile(Base): 'x-object-meta-fruit': 'Banana', 'accept-ranges': 'bytes', 'content-type': 'application/test', - 'etag': hashlib.md5(obj_data).hexdigest(), + 'etag': md5( + obj_data, usedforsecurity=False).hexdigest(), 'last-modified': mock.ANY, 'date': mock.ANY, 'x-delete-at': mock.ANY, @@ -1538,7 +1539,7 @@ class TestFile(Base): self.assertTrue(dest_cont.create()) expected_body = data[100:201] - expected_etag = hashlib.md5(expected_body) + expected_etag = md5(expected_body, usedforsecurity=False) # copy both from within and across containers for cont in (self.env.container, dest_cont): # copy both with and without initial slash diff --git a/test/probe/common.py b/test/probe/common.py index 03557843aa..9277198f7b 100644 --- a/test/probe/common.py +++ b/test/probe/common.py @@ -24,7 +24,6 @@ from textwrap import dedent from time import sleep, time from collections import defaultdict import unittest -from hashlib import md5 from uuid import uuid4 import shutil from six.moves.http_client import HTTPConnection @@ -34,7 +33,8 @@ from swiftclient import get_auth, head_account, client from swift.common import internal_client, direct_client from swift.common.direct_client import DirectClientException from swift.common.ring import Ring -from swift.common.utils import readconf, renamer, rsync_module_interpolation +from swift.common.utils import readconf, renamer, \ + rsync_module_interpolation, md5 from swift.common.manager import Manager from swift.common.storage_policy import POLICIES, EC_POLICY, REPL_POLICY from swift.obj.diskfile import get_data_dir @@ -297,7 +297,7 @@ class Body(object): def __init__(self, total=3.5 * 2 ** 20): self.length = int(total) - self.hasher = md5() + self.hasher = md5(usedforsecurity=False) self.read_amount = 0 self.chunk = uuid4().hex.encode('ascii') * 2 ** 10 self.buff = b'' diff --git a/test/probe/test_container_merge_policy_index.py b/test/probe/test_container_merge_policy_index.py index c8b1c6fcd7..4413805901 100644 --- a/test/probe/test_container_merge_policy_index.py +++ b/test/probe/test_container_merge_policy_index.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hashlib import md5 import time import uuid import random @@ -23,6 +22,7 @@ from swift.common.internal_client import InternalClient from swift.common import utils, direct_client from swift.common.storage_policy import POLICIES from swift.common.http import HTTP_NOT_FOUND +from swift.common.utils import md5 from swift.container.reconciler import MISPLACED_OBJECTS_ACCOUNT from test.probe.brain import BrainSplitter, InternalBrainSplitter from swift.common.request_helpers import get_reserved_name @@ -266,7 +266,7 @@ class TestContainerMergePolicyIndex(ReplProbeTest): part_name = self.get_object_name('manifest_part_%0.2d' % i) manifest_entry = { "path": "/%s/%s" % (self.container_name, part_name), - "etag": md5(body).hexdigest(), + "etag": md5(body, usedforsecurity=False).hexdigest(), "size_bytes": len(body), } self.brain.client.put_object(self.container_name, part_name, {}, diff --git a/test/probe/test_object_handoff.py b/test/probe/test_object_handoff.py index 568237a2ac..f4dc21ab3d 100644 --- a/test/probe/test_object_handoff.py +++ b/test/probe/test_object_handoff.py @@ -19,7 +19,6 @@ from __future__ import print_function from unittest import main from uuid import uuid4 import random -from hashlib import md5 from collections import defaultdict import os import socket @@ -30,6 +29,7 @@ from swiftclient import client from swift.common import direct_client from swift.common.exceptions import ClientException from swift.common.manager import Manager +from swift.common.utils import md5 from test.probe.common import (kill_server, start_server, ReplProbeTest, ECProbeTest, Body) @@ -373,7 +373,7 @@ class TestECObjectHandoff(ECProbeTest): container_name, object_name, resp_chunk_size=64 * 2 ** 10) - resp_checksum = md5() + resp_checksum = md5(usedforsecurity=False) for chunk in body: resp_checksum.update(chunk) return resp_checksum.hexdigest() diff --git a/test/probe/test_reconstructor_rebuild.py b/test/probe/test_reconstructor_rebuild.py index 37ffddacda..60fe89bf4e 100644 --- a/test/probe/test_reconstructor_rebuild.py +++ b/test/probe/test_reconstructor_rebuild.py @@ -17,7 +17,6 @@ import errno import json from contextlib import contextmanager -from hashlib import md5 import unittest import uuid import shutil @@ -27,6 +26,7 @@ import time import six from swift.common.direct_client import DirectClientException +from swift.common.utils import md5 from test.probe.common import ECProbeTest from swift.common import direct_client @@ -40,7 +40,7 @@ class Body(object): def __init__(self, total=3.5 * 2 ** 20): self.total = int(total) - self.hasher = md5() + self.hasher = md5(usedforsecurity=False) self.size = 0 self.chunk = b'test' * 16 * 2 ** 10 @@ -118,7 +118,7 @@ class TestReconstructorRebuild(ECProbeTest): self.container_name, self.object_name, resp_chunk_size=64 * 2 ** 10) - resp_checksum = md5() + resp_checksum = md5(usedforsecurity=False) for chunk in body: resp_checksum.update(chunk) return headers, resp_checksum.hexdigest() @@ -140,7 +140,7 @@ class TestReconstructorRebuild(ECProbeTest): headers, data = direct_client.direct_get_object( node, part, acc, con, obj, headers=req_headers, resp_chunk_size=64 * 2 ** 20) - hasher = md5() + hasher = md5(usedforsecurity=False) for chunk in data: hasher.update(chunk) return headers, hasher.hexdigest() diff --git a/test/probe/test_reconstructor_revert.py b/test/probe/test_reconstructor_revert.py index 572793ddb1..18bcb74d58 100644 --- a/test/probe/test_reconstructor_revert.py +++ b/test/probe/test_reconstructor_revert.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hashlib import md5 import itertools import unittest import uuid @@ -27,6 +26,7 @@ from test.probe.common import ECProbeTest, Body from swift.common import direct_client from swift.common.storage_policy import EC_POLICY from swift.common.manager import Manager +from swift.common.utils import md5 from swift.obj import reconstructor from swiftclient import client @@ -49,7 +49,7 @@ class TestReconstructorRevert(ECProbeTest): self.container_name, self.object_name, resp_chunk_size=64 * 2 ** 10) - resp_checksum = md5() + resp_checksum = md5(usedforsecurity=False) for chunk in body: resp_checksum.update(chunk) return resp_checksum.hexdigest() @@ -60,7 +60,7 @@ class TestReconstructorRevert(ECProbeTest): node, part, self.account, self.container_name, self.object_name, headers=req_headers, resp_chunk_size=64 * 2 ** 20) - hasher = md5() + hasher = md5(usedforsecurity=False) for chunk in data: hasher.update(chunk) return hasher.hexdigest() diff --git a/test/probe/test_sharder.py b/test/probe/test_sharder.py index ae02d6546c..9b3c4c70bd 100644 --- a/test/probe/test_sharder.py +++ b/test/probe/test_sharder.py @@ -12,7 +12,6 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. -import hashlib import json import os import shutil @@ -27,7 +26,7 @@ from swift.common import direct_client, utils from swift.common.manager import Manager from swift.common.memcached import MemcacheRing from swift.common.utils import ShardRange, parse_db_filename, get_db_files, \ - quorum_size, config_true_value, Timestamp + quorum_size, config_true_value, Timestamp, md5 from swift.container.backend import ContainerBroker, UNSHARDED, SHARDING from swift.container.sharder import CleavingContext from swiftclient import client, get_auth, ClientException @@ -2082,7 +2081,8 @@ class TestContainerSharding(BaseTestContainerSharding): shard_broker.merge_items( [{'name': name, 'created_at': Timestamp.now().internal, 'size': 0, 'content_type': 'text/plain', - 'etag': hashlib.md5().hexdigest(), 'deleted': deleted, + 'etag': md5(usedforsecurity=False).hexdigest(), + 'deleted': deleted, 'storage_policy_index': shard_broker.storage_policy_index}]) return shard_nodes[0] diff --git a/test/s3api/test_versioning.py b/test/s3api/test_versioning.py index 7f2364dd5b..ec06594fa8 100644 --- a/test/s3api/test_versioning.py +++ b/test/s3api/test_versioning.py @@ -14,13 +14,13 @@ # limitations under the License. import time -import hashlib from collections import defaultdict from botocore.exceptions import ClientError import six from swift.common.header_key_dict import HeaderKeyDict +from swift.common.utils import md5 from test.s3api import BaseS3TestCase @@ -123,7 +123,7 @@ class TestObjectVersioning(BaseS3TestCase): def test_upload_fileobj_versioned(self): obj_data = self.create_name('some-data').encode('ascii') - obj_etag = hashlib.md5(obj_data).hexdigest() + obj_etag = md5(obj_data, usedforsecurity=False).hexdigest() obj_name = self.create_name('versioned-obj') self.client.upload_fileobj(six.BytesIO(obj_data), self.bucket_name, obj_name) @@ -157,7 +157,7 @@ class TestObjectVersioning(BaseS3TestCase): # overwrite the object new_obj_data = self.create_name('some-new-data').encode('ascii') - new_obj_etag = hashlib.md5(new_obj_data).hexdigest() + new_obj_etag = md5(new_obj_data, usedforsecurity=False).hexdigest() self.client.upload_fileobj(six.BytesIO(new_obj_data), self.bucket_name, obj_name) @@ -199,7 +199,7 @@ class TestObjectVersioning(BaseS3TestCase): obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') - etags.insert(0, hashlib.md5(obj_data).hexdigest()) + etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj(six.BytesIO(obj_data), self.bucket_name, obj_name) @@ -319,7 +319,7 @@ class TestObjectVersioning(BaseS3TestCase): obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') - etags.insert(0, hashlib.md5(obj_data).hexdigest()) + etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj(six.BytesIO(obj_data), self.bucket_name, obj_name) # and make a delete marker @@ -490,7 +490,7 @@ class TestObjectVersioning(BaseS3TestCase): for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') # TODO: pull etag from response instead - etags.insert(0, hashlib.md5(obj_data).hexdigest()) + etags.insert(0, md5(obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj_name) @@ -571,12 +571,14 @@ class TestObjectVersioning(BaseS3TestCase): etags = [] for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') - etags.insert(0, '"%s"' % hashlib.md5(obj_data).hexdigest()) + etags.insert(0, '"%s"' % md5( + obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj01_name) for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') - etags.insert(0, '"%s"' % hashlib.md5(obj_data).hexdigest()) + etags.insert(0, '"%s"' % md5( + obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj00_name) resp = self.client.list_object_versions(Bucket=self.bucket_name) @@ -653,7 +655,8 @@ class TestObjectVersioning(BaseS3TestCase): obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') - etags[obj_name].insert(0, hashlib.md5(obj_data).hexdigest()) + etags[obj_name].insert(0, md5( + obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj_name) @@ -708,7 +711,8 @@ class TestObjectVersioning(BaseS3TestCase): obj_name = self.create_name('versioned-obj') for i in range(3): obj_data = self.create_name('some-data-%s' % i).encode('ascii') - etags.insert(0, hashlib.md5(obj_data).hexdigest()) + etags.insert(0, md5( + obj_data, usedforsecurity=False).hexdigest()) self.client.upload_fileobj( six.BytesIO(obj_data), self.bucket_name, obj_name) diff --git a/test/unit/__init__.py b/test/unit/__init__.py index a9df23645b..0ac52e9b1d 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -23,7 +23,6 @@ import logging.handlers import sys from contextlib import contextmanager, closing from collections import defaultdict, Iterable -from hashlib import md5 import itertools from numbers import Number from tempfile import NamedTemporaryFile @@ -48,7 +47,7 @@ from six.moves.http_client import HTTPException from swift.common import storage_policy, swob, utils from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy, VALID_EC_TYPES) -from swift.common.utils import Timestamp, NOTICE +from swift.common.utils import Timestamp, NOTICE, md5 from test import get_config from swift.common.header_key_dict import HeaderKeyDict from swift.common.ring import Ring, RingData, RingBuilder @@ -65,7 +64,7 @@ class SkipTest(unittest.SkipTest): pass -EMPTY_ETAG = md5().hexdigest() +EMPTY_ETAG = md5(usedforsecurity=False).hexdigest() # try not to import this module from swift if not os.path.basename(sys.argv[0]).startswith('swift'): @@ -970,7 +969,8 @@ def fake_http_connect(*code_iter, **kwargs): etag = self.etag if not etag: if isinstance(self.body, bytes): - etag = '"' + md5(self.body).hexdigest() + '"' + etag = ('"' + md5( + self.body, usedforsecurity=False).hexdigest() + '"') else: etag = '"68b329da9893e34099c7d8ad5cb9c940"' @@ -1262,7 +1262,7 @@ def make_ec_object_stub(test_body, policy, timestamp): test_body = test_body or ( b'test' * segment_size)[:-random.randint(1, 1000)] timestamp = timestamp or utils.Timestamp.now() - etag = md5(test_body).hexdigest() + etag = md5(test_body, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(policy, test_body) return { diff --git a/test/unit/account/test_backend.py b/test/unit/account/test_backend.py index 3556a1ad0d..a284bc246e 100644 --- a/test/unit/account/test_backend.py +++ b/test/unit/account/test_backend.py @@ -16,7 +16,6 @@ """ Tests for swift.account.backend """ from collections import defaultdict -import hashlib import json import unittest import pickle @@ -40,6 +39,7 @@ from test.unit import patch_policies, with_tempdir, make_timestamp_iter from swift.common.db import DatabaseConnectionError from swift.common.request_helpers import get_reserved_name from swift.common.storage_policy import StoragePolicy, POLICIES +from swift.common.utils import md5 from test.unit.common import test_db @@ -821,10 +821,10 @@ class TestAccountBroker(unittest.TestCase): POLICIES.default.idx) text = '%s-%s' % ('a', "%s-%s-%s-%s" % ( Timestamp(1).internal, Timestamp(0).internal, 0, 0)) - hasha = hashlib.md5(text.encode('ascii')).digest() + hasha = md5(text.encode('ascii'), usedforsecurity=False).digest() text = '%s-%s' % ('b', "%s-%s-%s-%s" % ( Timestamp(2).internal, Timestamp(0).internal, 0, 0)) - hashb = hashlib.md5(text.encode('ascii')).digest() + hashb = md5(text.encode('ascii'), usedforsecurity=False).digest() hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b) for a, b in zip(hasha, hashb))) self.assertEqual(broker.get_info()['hash'], hashc) @@ -833,7 +833,7 @@ class TestAccountBroker(unittest.TestCase): POLICIES.default.idx) text = '%s-%s' % ('b', "%s-%s-%s-%s" % ( Timestamp(3).internal, Timestamp(0).internal, 0, 0)) - hashb = hashlib.md5(text.encode('ascii')).digest() + hashb = md5(text.encode('ascii'), usedforsecurity=False).digest() hashc = ''.join(('%02x' % (ord(a) ^ ord(b) if six.PY2 else a ^ b) for a, b in zip(hasha, hashb))) self.assertEqual(broker.get_info()['hash'], hashc) diff --git a/test/unit/common/middleware/crypto/crypto_helpers.py b/test/unit/common/middleware/crypto/crypto_helpers.py index 085f9544cd..333ca0faf6 100644 --- a/test/unit/common/middleware/crypto/crypto_helpers.py +++ b/test/unit/common/middleware/crypto/crypto_helpers.py @@ -13,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. import base64 -import hashlib from swift.common.exceptions import UnknownSecretIdError from swift.common.middleware.crypto.crypto_utils import Crypto +from swift.common.utils import md5 def fetch_crypto_keys(key_id=None): @@ -41,7 +41,7 @@ def fetch_crypto_keys(key_id=None): def md5hex(s): - return hashlib.md5(s).hexdigest() + return md5(s, usedforsecurity=False).hexdigest() def encrypt(val, key=None, iv=None, ctxt=None): diff --git a/test/unit/common/middleware/helpers.py b/test/unit/common/middleware/helpers.py index ffac87bff4..e07c2a7b38 100644 --- a/test/unit/common/middleware/helpers.py +++ b/test/unit/common/middleware/helpers.py @@ -16,14 +16,13 @@ # This stuff can't live in test/unit/__init__.py due to its swob dependency. from collections import defaultdict, namedtuple -from hashlib import md5 from six.moves.urllib import parse from swift.common import swob from swift.common.header_key_dict import HeaderKeyDict from swift.common.request_helpers import is_user_meta, \ is_object_transient_sysmeta, resolve_etag_is_at_header from swift.common.swob import HTTPNotImplemented -from swift.common.utils import split_path +from swift.common.utils import split_path, md5 from test.unit import FakeLogger, FakeRing @@ -159,7 +158,7 @@ class FakeSwift(object): footers = HeaderKeyDict() env['swift.callback.update_footers'](footers) req.headers.update(footers) - etag = md5(req_body).hexdigest() + etag = md5(req_body, usedforsecurity=False).hexdigest() headers.setdefault('Etag', etag) headers.setdefault('Content-Length', len(req_body)) diff --git a/test/unit/common/middleware/s3api/test_acl.py b/test/unit/common/middleware/s3api/test_acl.py index b645763240..f194097dea 100644 --- a/test/unit/common/middleware/s3api/test_acl.py +++ b/test/unit/common/middleware/s3api/test_acl.py @@ -18,13 +18,13 @@ import unittest import mock from io import BytesIO -from hashlib import md5 from swift.common.swob import Request, HTTPAccepted from swift.common.middleware.s3api.etree import fromstring, tostring, \ Element, SubElement, XMLNS_XSI from swift.common.middleware.s3api.s3response import InvalidArgument from swift.common.middleware.s3api.acl_utils import handle_acl_header +from swift.common.utils import md5 from test.unit.common.middleware.s3api import S3ApiTestCase from test.unit.common.middleware.s3api.helpers import UnreadableInput @@ -133,7 +133,8 @@ class TestS3ApiAcl(S3ApiTestCase): def _test_put_no_body(self, use_content_length=False, use_transfer_encoding=False, string_to_md5=b''): - content_md5 = base64.b64encode(md5(string_to_md5).digest()).strip() + content_md5 = base64.b64encode( + md5(string_to_md5, usedforsecurity=False).digest()).strip() with UnreadableInput(self) as fake_input: req = Request.blank( '/bucket?acl', diff --git a/test/unit/common/middleware/s3api/test_multi_delete.py b/test/unit/common/middleware/s3api/test_multi_delete.py index 02ee5d32fa..bcab0ac2fe 100644 --- a/test/unit/common/middleware/s3api/test_multi_delete.py +++ b/test/unit/common/middleware/s3api/test_multi_delete.py @@ -17,7 +17,6 @@ import base64 import json import unittest from datetime import datetime -from hashlib import md5 import mock from swift.common import swob @@ -28,6 +27,7 @@ from test.unit.common.middleware.s3api import S3ApiTestCase from test.unit.common.middleware.s3api.helpers import UnreadableInput from swift.common.middleware.s3api.etree import fromstring, tostring, \ Element, SubElement +from swift.common.utils import md5 from test.unit.common.middleware.s3api.test_s3_acl import s3acl @@ -47,7 +47,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = 'object' body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket/object?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -84,7 +85,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -138,7 +140,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -186,7 +189,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -213,7 +217,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key') body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -262,7 +267,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): if version: SubElement(obj, 'VersionId').text = version body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -319,7 +325,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): if ts: SubElement(obj, 'VersionId').text = ts.normal body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip() req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -392,7 +399,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = name body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = (base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip()) req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -414,7 +422,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = 'x' * 1000 + str(i) body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = (base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip()) req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -435,7 +444,8 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = 'Key1' body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = (base64.b64encode( + md5(body, usedforsecurity=False).digest()).strip()) req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -460,7 +470,9 @@ class TestS3ApiMultiDelete(S3ApiTestCase): obj = SubElement(elem, 'Object') SubElement(obj, 'Key').text = key body = tostring(elem, use_s3ns=False) - content_md5 = base64.b64encode(md5(body).digest()).strip() + content_md5 = ( + base64.b64encode(md5(body, usedforsecurity=False).digest()) + .strip()) req = Request.blank('/bucket?delete', environ={'REQUEST_METHOD': 'POST'}, @@ -502,7 +514,9 @@ class TestS3ApiMultiDelete(S3ApiTestCase): def _test_no_body(self, use_content_length=False, use_transfer_encoding=False, string_to_md5=b''): - content_md5 = base64.b64encode(md5(string_to_md5).digest()).strip() + content_md5 = (base64.b64encode( + md5(string_to_md5, usedforsecurity=False).digest()) + .strip()) with UnreadableInput(self) as fake_input: req = Request.blank( '/bucket?delete', diff --git a/test/unit/common/middleware/s3api/test_multi_upload.py b/test/unit/common/middleware/s3api/test_multi_upload.py index 3a0dc4c1eb..78c1b982e8 100644 --- a/test/unit/common/middleware/s3api/test_multi_upload.py +++ b/test/unit/common/middleware/s3api/test_multi_upload.py @@ -15,7 +15,6 @@ import base64 import binascii -import hashlib from mock import patch import os import time @@ -24,7 +23,7 @@ from six.moves.urllib.parse import quote, quote_plus from swift.common import swob from swift.common.swob import Request -from swift.common.utils import json +from swift.common.utils import json, md5 from test.unit import FakeMemcache, patch_policies from test.unit.common.middleware.s3api import S3ApiTestCase @@ -70,9 +69,9 @@ MULTIPARTS_TEMPLATE = \ ('subdir/object/Z/2', '2014-05-07T19:47:58.592270', 'fedcba9876543210', 41)) -S3_ETAG = '"%s-2"' % hashlib.md5(binascii.a2b_hex( +S3_ETAG = '"%s-2"' % md5(binascii.a2b_hex( '0123456789abcdef0123456789abcdef' - 'fedcba9876543210fedcba9876543210')).hexdigest() + 'fedcba9876543210fedcba9876543210'), usedforsecurity=False).hexdigest() class TestS3ApiMultiUpload(S3ApiTestCase): @@ -826,8 +825,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase): self.assertEqual(self._get_error_code(body), 'NoSuchBucket') def test_object_multipart_upload_complete(self): - content_md5 = base64.b64encode(hashlib.md5( - XML.encode('ascii')).digest()) + content_md5 = base64.b64encode(md5( + XML.encode('ascii'), usedforsecurity=False).digest()) req = Request.blank('/bucket/object?uploadId=X', environ={'REQUEST_METHOD': 'POST'}, headers={'Authorization': 'AWS test:tester:hmac', @@ -863,8 +862,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase): self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X') def test_object_multipart_upload_retry_complete(self): - content_md5 = base64.b64encode(hashlib.md5( - XML.encode('ascii')).digest()) + content_md5 = base64.b64encode(md5( + XML.encode('ascii'), usedforsecurity=False).digest()) self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X', swob.HTTPNotFound, {}, None) recent_ts = S3Timestamp.now(delta=-1000000).internal # 10s ago @@ -899,8 +898,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase): ]) def test_object_multipart_upload_retry_complete_etag_mismatch(self): - content_md5 = base64.b64encode(hashlib.md5( - XML.encode('ascii')).digest()) + content_md5 = base64.b64encode(md5( + XML.encode('ascii'), usedforsecurity=False).digest()) self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X', swob.HTTPNotFound, {}, None) recent_ts = S3Timestamp.now(delta=-1000000).internal @@ -948,8 +947,8 @@ class TestS3ApiMultiUpload(S3ApiTestCase): self.assertEqual(headers.get('X-Object-Sysmeta-S3Api-Upload-Id'), 'X') def test_object_multipart_upload_retry_complete_upload_id_mismatch(self): - content_md5 = base64.b64encode(hashlib.md5( - XML.encode('ascii')).digest()) + content_md5 = base64.b64encode(md5( + XML.encode('ascii'), usedforsecurity=False).digest()) self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/object/X', swob.HTTPNotFound, {}, None) recent_ts = S3Timestamp.now(delta=-1000000).internal @@ -982,8 +981,9 @@ class TestS3ApiMultiUpload(S3ApiTestCase): ]) def test_object_multipart_upload_invalid_md5(self): - bad_md5 = base64.b64encode(hashlib.md5( - XML.encode('ascii') + b'some junk').digest()) + bad_md5 = base64.b64encode(md5( + XML.encode('ascii') + b'some junk', usedforsecurity=False) + .digest()) req = Request.blank('/bucket/object?uploadId=X', environ={'REQUEST_METHOD': 'POST'}, headers={'Authorization': 'AWS test:tester:hmac', @@ -1413,8 +1413,9 @@ class TestS3ApiMultiUpload(S3ApiTestCase): self.assertEqual(status.split()[0], '200') elem = fromstring(body, 'CompleteMultipartUploadResult') self.assertNotIn('Etag', headers) - expected_etag = '"%s-3"' % hashlib.md5(binascii.unhexlify(''.join( - x['hash'] for x in object_list))).hexdigest() + expected_etag = ('"%s-3"' % md5(binascii.unhexlify(''.join( + x['hash'] for x in object_list)), usedforsecurity=False) + .hexdigest()) self.assertEqual(elem.find('ETag').text, expected_etag) self.assertEqual(self.swift.calls, [ @@ -2217,7 +2218,7 @@ class TestS3ApiMultiUpload(S3ApiTestCase): def _test_no_body(self, use_content_length=False, use_transfer_encoding=False, string_to_md5=b''): - raw_md5 = hashlib.md5(string_to_md5).digest() + raw_md5 = md5(string_to_md5, usedforsecurity=False).digest() content_md5 = base64.b64encode(raw_md5).strip() with UnreadableInput(self) as fake_input: req = Request.blank( diff --git a/test/unit/common/middleware/s3api/test_obj.py b/test/unit/common/middleware/s3api/test_obj.py index d9ff1f229d..a85e44de36 100644 --- a/test/unit/common/middleware/s3api/test_obj.py +++ b/test/unit/common/middleware/s3api/test_obj.py @@ -16,7 +16,7 @@ import binascii import unittest from datetime import datetime -import hashlib +from hashlib import sha256 import os from os.path import join import time @@ -36,6 +36,7 @@ from swift.common.middleware.s3api.etree import fromstring from swift.common.middleware.s3api.utils import mktime, S3Timestamp from swift.common.middleware.versioned_writes.object_versioning import \ DELETE_MARKER_CONTENT_TYPE +from swift.common.utils import md5 class TestS3ApiObj(S3ApiTestCase): @@ -44,7 +45,7 @@ class TestS3ApiObj(S3ApiTestCase): super(TestS3ApiObj, self).setUp() self.object_body = b'hello' - self.etag = hashlib.md5(self.object_body).hexdigest() + self.etag = md5(self.object_body, usedforsecurity=False).hexdigest() self.last_modified = 'Fri, 01 Apr 2014 12:00:00 GMT' self.response_headers = {'Content-Type': 'text/html', @@ -643,7 +644,7 @@ class TestS3ApiObj(S3ApiTestCase): @s3acl def test_object_PUT_v4(self): - body_sha = hashlib.sha256(self.object_body).hexdigest() + body_sha = sha256(self.object_body).hexdigest() req = Request.blank( '/bucket/object', environ={'REQUEST_METHOD': 'PUT'}, diff --git a/test/unit/common/middleware/s3api/test_s3api.py b/test/unit/common/middleware/s3api/test_s3api.py index 82c7ff100e..e89c791175 100644 --- a/test/unit/common/middleware/s3api/test_s3api.py +++ b/test/unit/common/middleware/s3api/test_s3api.py @@ -18,7 +18,6 @@ import unittest from mock import patch, MagicMock import calendar from datetime import datetime -import hashlib import mock import requests import json @@ -29,6 +28,7 @@ import swift.common.middleware.s3api from swift.common.middleware.keystoneauth import KeystoneAuth from swift.common import swob, utils from swift.common.swob import Request +from swift.common.utils import md5 from keystonemiddleware.auth_token import AuthProtocol from keystoneauth1.access import AccessInfoV2 @@ -177,7 +177,7 @@ class TestS3ApiMiddleware(S3ApiTestCase): def verify(hash, path, headers): s = canonical_string(path, headers) - self.assertEqual(hash, hashlib.md5(s).hexdigest()) + self.assertEqual(hash, md5(s, usedforsecurity=False).hexdigest()) verify('6dd08c75e42190a1ce9468d1fd2eb787', '/bucket/object', {'Content-Type': 'text/plain', 'X-Amz-Something': 'test', @@ -563,7 +563,7 @@ class TestS3ApiMiddleware(S3ApiTestCase): self.assertEqual(self._get_error_code(body), 'InvalidDigest') def test_object_create_bad_md5_too_short(self): - too_short_digest = hashlib.md5(b'hey').digest()[:-1] + too_short_digest = md5(b'hey', usedforsecurity=False).digest()[:-1] md5_str = base64.b64encode(too_short_digest).strip() if not six.PY2: md5_str = md5_str.decode('ascii') @@ -577,7 +577,8 @@ class TestS3ApiMiddleware(S3ApiTestCase): self.assertEqual(self._get_error_code(body), 'InvalidDigest') def test_object_create_bad_md5_too_long(self): - too_long_digest = hashlib.md5(b'hey').digest() + b'suffix' + too_long_digest = md5( + b'hey', usedforsecurity=False).digest() + b'suffix' md5_str = base64.b64encode(too_long_digest).strip() if not six.PY2: md5_str = md5_str.decode('ascii') diff --git a/test/unit/common/middleware/s3api/test_s3request.py b/test/unit/common/middleware/s3api/test_s3request.py index 853ff1a8f6..bad3a9edeb 100644 --- a/test/unit/common/middleware/s3api/test_s3request.py +++ b/test/unit/common/middleware/s3api/test_s3request.py @@ -32,6 +32,7 @@ from swift.common.middleware.s3api.s3request import S3Request, \ from swift.common.middleware.s3api.s3response import InvalidArgument, \ NoSuchBucket, InternalError, \ AccessDenied, SignatureDoesNotMatch, RequestTimeTooSkewed +from swift.common.utils import md5, md5_factory from test.unit import DebugLogger @@ -823,8 +824,9 @@ class TestRequest(S3ApiTestCase): class TestHashingInput(S3ApiTestCase): def test_good(self): raw = b'123456789' - wrapped = HashingInput(BytesIO(raw), 9, hashlib.md5, - hashlib.md5(raw).hexdigest()) + wrapped = HashingInput( + BytesIO(raw), 9, md5_factory, + md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'56', wrapped.read(2)) # trying to read past the end gets us whatever's left @@ -848,8 +850,9 @@ class TestHashingInput(S3ApiTestCase): def test_too_long(self): raw = b'123456789' - wrapped = HashingInput(BytesIO(raw), 8, hashlib.md5, - hashlib.md5(raw).hexdigest()) + wrapped = HashingInput( + BytesIO(raw), 8, md5_factory, + md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'56', wrapped.read(2)) # even though the hash matches, there was more data than we expected @@ -861,8 +864,9 @@ class TestHashingInput(S3ApiTestCase): def test_too_short(self): raw = b'123456789' - wrapped = HashingInput(BytesIO(raw), 10, hashlib.md5, - hashlib.md5(raw).hexdigest()) + wrapped = HashingInput( + BytesIO(raw), 10, md5_factory, + md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'56', wrapped.read(2)) # even though the hash matches, there was more data than we expected @@ -873,8 +877,9 @@ class TestHashingInput(S3ApiTestCase): def test_bad_hash(self): raw = b'123456789' - wrapped = HashingInput(BytesIO(raw), 9, hashlib.sha256, - hashlib.md5(raw).hexdigest()) + wrapped = HashingInput( + BytesIO(raw), 9, hashlib.sha256, + md5(raw, usedforsecurity=False).hexdigest()) self.assertEqual(b'1234', wrapped.read(4)) self.assertEqual(b'5678', wrapped.read(4)) with self.assertRaises(swob.HTTPException) as raised: diff --git a/test/unit/common/middleware/test_copy.py b/test/unit/common/middleware/test_copy.py index e1e5317cde..09ca2527ef 100644 --- a/test/unit/common/middleware/test_copy.py +++ b/test/unit/common/middleware/test_copy.py @@ -16,14 +16,13 @@ import mock import unittest -from hashlib import md5 from six.moves import urllib from swift.common import swob from swift.common.middleware import copy from swift.common.storage_policy import POLICIES from swift.common.swob import Request, HTTPException -from swift.common.utils import closing_if_possible +from swift.common.utils import closing_if_possible, md5 from test.unit import patch_policies, debug_logger, FakeRing from test.unit.common.middleware.helpers import FakeSwift from test.unit.proxy.controllers.test_obj import set_http_connect, \ @@ -1386,7 +1385,7 @@ class TestServerSideCopyMiddlewareWithEC(unittest.TestCase): def _test_invalid_ranges(self, method, real_body, segment_size, req_range): # make a request with range starts from more than real size. - body_etag = md5(real_body).hexdigest() + body_etag = md5(real_body, usedforsecurity=False).hexdigest() req = swob.Request.blank( '/v1/a/c/o', method=method, headers={'Destination': 'c1/o', diff --git a/test/unit/common/middleware/test_dlo.py b/test/unit/common/middleware/test_dlo.py index 276d887289..e48d2932d2 100644 --- a/test/unit/common/middleware/test_dlo.py +++ b/test/unit/common/middleware/test_dlo.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import hashlib import json import mock import shutil @@ -26,7 +25,7 @@ import unittest from swift.common import swob from swift.common.header_key_dict import HeaderKeyDict from swift.common.middleware import dlo -from swift.common.utils import closing_if_possible +from swift.common.utils import closing_if_possible, md5 from test.unit.common.middleware.helpers import FakeSwift @@ -36,7 +35,7 @@ LIMIT = 'swift.common.constraints.CONTAINER_LISTING_LIMIT' def md5hex(s): if not isinstance(s, bytes): s = s.encode('utf-8') - return hashlib.md5(s).hexdigest() + return md5(s, usedforsecurity=False).hexdigest() class DloTestCase(unittest.TestCase): @@ -738,7 +737,8 @@ class TestDloGetManifest(DloTestCase): status, headers, body = self.call_dlo(req) headers = HeaderKeyDict(headers) self.assertEqual(headers["Etag"], - '"' + hashlib.md5(b"abcdef").hexdigest() + '"') + '"' + md5(b"abcdef", + usedforsecurity=False).hexdigest() + '"') def test_object_prefix_quoting(self): self.app.register( diff --git a/test/unit/common/middleware/test_object_versioning.py b/test/unit/common/middleware/test_object_versioning.py index 97a06f72fe..691d93fbce 100644 --- a/test/unit/common/middleware/test_object_versioning.py +++ b/test/unit/common/middleware/test_object_versioning.py @@ -19,7 +19,6 @@ import os import time import mock import unittest -from hashlib import md5 import six from six.moves import urllib from swift.common import swob, utils @@ -33,6 +32,7 @@ from swift.common.middleware.versioned_writes.object_versioning import \ SYSMETA_VERSIONS_SYMLINK, DELETE_MARKER_CONTENT_TYPE from swift.common.request_helpers import get_reserved_name from swift.common.storage_policy import StoragePolicy +from swift.common.utils import md5 from swift.proxy.controllers.base import get_cache_key from test.unit import patch_policies, FakeMemcache, make_timestamp_iter from test.unit.common.middleware.helpers import FakeSwift @@ -580,7 +580,8 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): '/v1/a/c/o', method='PUT', body=put_body, headers={'Content-Type': 'text/plain', 'ETag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), + usedforsecurity=False).hexdigest(), 'Content-Length': len(put_body)}, environ={'swift.cache': self.cache_version_on, 'swift.trans_id': 'fake_trans_id'}) @@ -607,7 +608,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -757,7 +758,9 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): req = Request.blank( '/v1/a/c/o', method='PUT', body=put_body, headers={'Content-Type': 'text/plain', - 'ETag': md5(put_body.encode('utf8')).hexdigest(), + 'ETag': md5( + put_body.encode('utf8'), + usedforsecurity=False).hexdigest(), 'Content-Length': len(put_body)}, environ={'swift.cache': self.cache_version_on, 'swift.trans_id': 'fake_trans_id'}) @@ -784,7 +787,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -842,7 +845,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), } for k, v in expected_headers.items(): @@ -904,7 +907,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), } for k, v in expected_headers.items(): @@ -984,7 +987,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), } for k, v in expected_headers.items(): @@ -1013,7 +1016,8 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): '/v1/a/c/o', method='PUT', body=put_body, headers={'Content-Type': 'text/plain', 'ETag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), + usedforsecurity=False).hexdigest(), 'Content-Length': len(put_body)}, environ={'swift.cache': self.cache_version_on, 'swift.trans_id': 'fake_trans_id'}) @@ -1042,7 +1046,7 @@ class ObjectVersioningTestCase(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - put_body.encode('utf8')).hexdigest(), + put_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(put_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -1196,7 +1200,9 @@ class ObjectVersioningTestDisabled(ObjectVersioningBaseTestCase): req = Request.blank( '/v1/a/c/o', method='PUT', body=put_body, headers={'Content-Type': 'text/plain', - 'ETag': md5(put_body.encode('utf8')).hexdigest(), + 'ETag': md5( + put_body.encode('utf8'), + usedforsecurity=False).hexdigest(), 'Content-Length': len(put_body)}, environ={'swift.cache': self.cache_version_off, 'swift.trans_id': 'fake_trans_id'}) @@ -1283,7 +1289,9 @@ class ObjectVersioningTestDisabled(ObjectVersioningBaseTestCase): req = Request.blank( '/v1/a/c/o', method='PUT', body=put_body, headers={'Content-Type': 'text/plain', - 'ETag': md5(put_body.encode('utf8')).hexdigest(), + 'ETag': md5( + put_body.encode('utf8'), + usedforsecurity=False).hexdigest(), 'Content-Length': len(put_body)}, environ={'swift.cache': self.cache_version_off, 'swift.trans_id': 'fake_trans_id'}) @@ -1580,7 +1588,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - src_body.encode('utf8')).hexdigest(), + src_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -1633,7 +1641,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - src_body.encode('utf8')).hexdigest(), + src_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -1680,7 +1688,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - src_body.encode('utf8')).hexdigest(), + src_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -1730,7 +1738,7 @@ class ObjectVersioningTestCopy(ObjectVersioningBaseTestCase): TGT_OBJ_SYSMETA_SYMLINK_HDR: self.build_symlink_path('c', 'o', '9999998765.99999'), 'x-object-sysmeta-symlink-target-etag': md5( - src_body.encode('utf8')).hexdigest(), + src_body.encode('utf8'), usedforsecurity=False).hexdigest(), 'x-object-sysmeta-symlink-target-bytes': str(len(src_body)), } symlink_put_headers = self.app._calls[-1].headers @@ -1792,7 +1800,7 @@ class ObjectVersioningTestVersionAPI(ObjectVersioningBaseTestCase): timestamp = next(self.ts) version_path = '%s?symlink=get' % self.build_versions_path( obj='o', version=(~timestamp).normal) - etag = md5(b'old-version-etag').hexdigest() + etag = md5(b'old-version-etag', usedforsecurity=False).hexdigest() self.app.register('HEAD', version_path, swob.HTTPNoContent, { 'Content-Length': 10, 'Content-Type': 'application/old-version', @@ -2128,7 +2136,7 @@ class ObjectVersioningVersionAPIWhileDisabled(ObjectVersioningBaseTestCase): timestamp = next(self.ts) version_path = '%s?symlink=get' % self.build_versions_path( obj='o', version=(~timestamp).normal) - etag = md5(b'old-version-etag').hexdigest() + etag = md5(b'old-version-etag', usedforsecurity=False).hexdigest() self.app.register('HEAD', version_path, swob.HTTPNoContent, { 'Content-Length': 10, 'Content-Type': 'application/old-version', diff --git a/test/unit/common/middleware/test_slo.py b/test/unit/common/middleware/test_slo.py index dd6ad48123..d02c755854 100644 --- a/test/unit/common/middleware/test_slo.py +++ b/test/unit/common/middleware/test_slo.py @@ -15,7 +15,6 @@ # limitations under the License. import base64 -import hashlib import json import time import unittest @@ -32,7 +31,7 @@ from swift.common.swob import Request, HTTPException, str_to_wsgi, \ bytes_to_wsgi from swift.common.utils import quote, closing_if_possible, close_if_possible, \ parse_content_type, iter_multipart_mime_documents, parse_mime_headers, \ - Timestamp, get_expirer_container + Timestamp, get_expirer_container, md5 from test.unit.common.middleware.helpers import FakeSwift @@ -57,7 +56,7 @@ def fake_start_response(*args, **kwargs): def md5hex(s): if not isinstance(s, bytes): s = s.encode('ascii') - return hashlib.md5(s).hexdigest() + return md5(s, usedforsecurity=False).hexdigest() class SloTestCase(unittest.TestCase): @@ -3004,7 +3003,7 @@ class TestSloGetManifest(SloTestCase): def test_get_segment_with_non_ascii_path(self): segment_body = u"a møøse once bit my sister".encode("utf-8") - segment_etag = hashlib.md5(segment_body).hexdigest() + segment_etag = md5(segment_body, usedforsecurity=False).hexdigest() if six.PY2: path = u'/v1/AUTH_test/ünicode/öbject-segment'.encode('utf-8') else: diff --git a/test/unit/common/ring/test_ring.py b/test/unit/common/ring/test_ring.py index 8ebe0f9437..feffc4a6bb 100644 --- a/test/unit/common/ring/test_ring.py +++ b/test/unit/common/ring/test_ring.py @@ -16,7 +16,6 @@ import array import collections import six.moves.cPickle as pickle -import hashlib import os import unittest import stat @@ -30,9 +29,9 @@ import copy import mock from six.moves import range - from swift.common import ring, utils from swift.common.ring import utils as ring_utils +from swift.common.utils import md5 class TestRingBase(unittest.TestCase): @@ -236,7 +235,7 @@ class TestRing(TestRingBase): self.assertIsNone(self.ring.version) with open(self.testgz, 'rb') as fp: - expected_md5 = hashlib.md5() + expected_md5 = md5(usedforsecurity=False) expected_size = 0 for chunk in iter(lambda: fp.read(2 ** 16), b''): expected_md5.update(chunk) diff --git a/test/unit/common/test_direct_client.py b/test/unit/common/test_direct_client.py index faf2887cf6..8705595980 100644 --- a/test/unit/common/test_direct_client.py +++ b/test/unit/common/test_direct_client.py @@ -18,7 +18,6 @@ import json import unittest import os from contextlib import contextmanager -from hashlib import md5 import time import pickle @@ -29,7 +28,7 @@ from swift.common import direct_client from swift.common.direct_client import DirectClientException from swift.common.exceptions import ClientException from swift.common.header_key_dict import HeaderKeyDict -from swift.common.utils import Timestamp, quote +from swift.common.utils import Timestamp, quote, md5 from swift.common.swob import RESPONSE_REASONS from swift.common.storage_policy import POLICIES from six.moves.http_client import HTTPException @@ -81,7 +80,7 @@ class FakeConn(object): def send(self, data): if not self.etag: - self.etag = md5() + self.etag = md5(usedforsecurity=False) self.etag.update(data) @@ -546,7 +545,9 @@ class TestDirectClient(unittest.TestCase): self.assertEqual(conn.req_headers['User-Agent'], 'my UA') self.assertTrue('x-timestamp' in conn.req_headers) self.assertEqual('bar', conn.req_headers.get('x-foo')) - self.assertEqual(md5(body).hexdigest(), conn.etag.hexdigest()) + self.assertEqual( + md5(body, usedforsecurity=False).hexdigest(), + conn.etag.hexdigest()) self.assertIsNone(rv) def test_direct_put_container_chunked(self): @@ -568,8 +569,9 @@ class TestDirectClient(unittest.TestCase): self.assertEqual('bar', conn.req_headers.get('x-foo')) self.assertNotIn('Content-Length', conn.req_headers) expected_sent = b'%0x\r\n%s\r\n0\r\n\r\n' % (len(body), body) - self.assertEqual(md5(expected_sent).hexdigest(), - conn.etag.hexdigest()) + self.assertEqual( + md5(expected_sent, usedforsecurity=False).hexdigest(), + conn.etag.hexdigest()) self.assertIsNone(rv) def test_direct_put_container_fail(self): @@ -849,7 +851,9 @@ class TestDirectClient(unittest.TestCase): self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.method, 'PUT') self.assertEqual(conn.path, self.obj_path) - self.assertEqual(md5(b'123456').hexdigest(), resp) + self.assertEqual( + md5(b'123456', usedforsecurity=False).hexdigest(), + resp) def test_direct_put_object_fail(self): contents = io.BytesIO(b'123456') @@ -876,7 +880,10 @@ class TestDirectClient(unittest.TestCase): self.assertEqual(conn.port, self.node['port']) self.assertEqual(conn.method, 'PUT') self.assertEqual(conn.path, self.obj_path) - self.assertEqual(md5(b'6\r\n123456\r\n0\r\n\r\n').hexdigest(), resp) + self.assertEqual( + md5(b'6\r\n123456\r\n0\r\n\r\n', + usedforsecurity=False).hexdigest(), + resp) def test_direct_put_object_args(self): # One test to cover all missing checks @@ -891,7 +898,9 @@ class TestDirectClient(unittest.TestCase): self.assertEqual(self.obj_path, conn.path) self.assertEqual(conn.req_headers['Content-Length'], '0') self.assertEqual(conn.req_headers['Content-Type'], 'Text') - self.assertEqual(md5(b'0\r\n\r\n').hexdigest(), resp) + self.assertEqual( + md5(b'0\r\n\r\n', usedforsecurity=False).hexdigest(), + resp) def test_direct_put_object_header_content_length(self): contents = io.BytesIO(b'123456') @@ -906,7 +915,9 @@ class TestDirectClient(unittest.TestCase): self.assertEqual(conn.port, self.node['port']) self.assertEqual('PUT', conn.method) self.assertEqual(conn.req_headers['Content-length'], '6') - self.assertEqual(md5(b'123456').hexdigest(), resp) + self.assertEqual( + md5(b'123456', usedforsecurity=False).hexdigest(), + resp) def test_retry(self): headers = HeaderKeyDict({'key': 'value'}) diff --git a/test/unit/common/test_memcached.py b/test/unit/common/test_memcached.py index 7ae9e63066..61f9bf062c 100644 --- a/test/unit/common/test_memcached.py +++ b/test/unit/common/test_memcached.py @@ -18,7 +18,6 @@ from collections import defaultdict import errno -from hashlib import md5 import io import logging import six @@ -34,6 +33,7 @@ from eventlet import GreenPool, sleep, Queue from eventlet.pools import Pool from swift.common import memcached +from swift.common.utils import md5 from mock import patch, MagicMock from test.unit import debug_logger @@ -337,7 +337,8 @@ class TestMemcached(unittest.TestCase): mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) - cache_key = md5(b'some_key').hexdigest().encode('ascii') + cache_key = md5(b'some_key', + usedforsecurity=False).hexdigest().encode('ascii') memcache_client.set('some_key', [1, 2, 3]) self.assertEqual(memcache_client.get('some_key'), [1, 2, 3]) @@ -443,7 +444,8 @@ class TestMemcached(unittest.TestCase): mock = MockMemcached() memcache_client._client_cache['1.2.3.4:11211'] = MockedMemcachePool( [(mock, mock)] * 2) - cache_key = md5(b'some_key').hexdigest().encode('ascii') + cache_key = md5(b'some_key', + usedforsecurity=False).hexdigest().encode('ascii') memcache_client.incr('some_key', delta=5, time=55) self.assertEqual(memcache_client.get('some_key'), b'5') @@ -653,7 +655,7 @@ class TestMemcached(unittest.TestCase): memcache_client.get_multi(('some_key2', 'some_key1'), 'multi_key'), [[4, 5, 6], [1, 2, 3]]) for key in (b'some_key1', b'some_key2'): - key = md5(key).hexdigest().encode('ascii') + key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') self.assertIn(key, mock.cache) _junk, cache_timeout, _junk = mock.cache[key] self.assertEqual(cache_timeout, b'0') @@ -662,7 +664,7 @@ class TestMemcached(unittest.TestCase): {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key', time=20) for key in (b'some_key1', b'some_key2'): - key = md5(key).hexdigest().encode('ascii') + key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') _junk, cache_timeout, _junk = mock.cache[key] self.assertEqual(cache_timeout, b'20') @@ -672,7 +674,7 @@ class TestMemcached(unittest.TestCase): {'some_key1': [1, 2, 3], 'some_key2': [4, 5, 6]}, 'multi_key', time=fortydays) for key in (b'some_key1', b'some_key2'): - key = md5(key).hexdigest().encode('ascii') + key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') _junk, cache_timeout, _junk = mock.cache[key] self.assertAlmostEqual(float(cache_timeout), esttimeout, delta=1) self.assertEqual(memcache_client.get_multi( @@ -709,14 +711,15 @@ class TestMemcached(unittest.TestCase): memcache_client.get_multi(('some_key1', 'some_key0'), 'multi_key'), [[4, 5, 6], [1, 2, 3]]) for key in (b'some_key0', b'some_key1'): - key = md5(key).hexdigest().encode('ascii') + key = md5(key, usedforsecurity=False).hexdigest().encode('ascii') self.assertIn(key, mock1.cache) _junk, cache_timeout, _junk = mock1.cache[key] self.assertEqual(cache_timeout, b'0') memcache_client.set('some_key0', [7, 8, 9]) self.assertEqual(memcache_client.get('some_key0'), [7, 8, 9]) - key = md5(b'some_key0').hexdigest().encode('ascii') + key = md5(b'some_key0', + usedforsecurity=False).hexdigest().encode('ascii') self.assertIn(key, mock2.cache) # Delete 'some_key0' with server_key='multi_key' diff --git a/test/unit/common/test_utils.py b/test/unit/common/test_utils.py index 69976438d5..dfdefd1d84 100644 --- a/test/unit/common/test_utils.py +++ b/test/unit/common/test_utils.py @@ -74,7 +74,7 @@ from swift.common.exceptions import Timeout, MessageTimeout, \ MimeInvalid from swift.common import utils from swift.common.utils import is_valid_ip, is_valid_ipv4, is_valid_ipv6, \ - set_swift_dir + set_swift_dir, md5 from swift.common.container_sync_realms import ContainerSyncRealms from swift.common.header_key_dict import HeaderKeyDict from swift.common.storage_policy import POLICIES, reload_storage_policies @@ -1024,6 +1024,13 @@ class TestUtils(unittest.TestCase): def setUp(self): utils.HASH_PATH_SUFFIX = b'endcap' utils.HASH_PATH_PREFIX = b'startcap' + self.md5_test_data = "Openstack forever".encode('utf-8') + try: + self.md5_digest = hashlib.md5(self.md5_test_data).hexdigest() + self.fips_enabled = False + except ValueError: + self.md5_digest = '0d6dc3c588ae71a04ce9a6beebbbba06' + self.fips_enabled = True def test_get_zero_indexed_base_string(self): self.assertEqual(utils.get_zero_indexed_base_string('something', 0), @@ -4501,6 +4508,79 @@ cluster_dfw1 = http://dfw1.host/v1/ self.assertEqual(msg, b'READY=1') self.assertNotIn('NOTIFY_SOCKET', os.environ) + def test_md5_with_data(self): + if not self.fips_enabled: + digest = md5(self.md5_test_data).hexdigest() + self.assertEqual(digest, self.md5_digest) + else: + # on a FIPS enabled system, this throws a ValueError: + # [digital envelope routines: EVP_DigestInit_ex] disabled for FIPS + self.assertRaises(ValueError, md5, self.md5_test_data) + + if not self.fips_enabled: + digest = md5(self.md5_test_data, usedforsecurity=True).hexdigest() + self.assertEqual(digest, self.md5_digest) + else: + self.assertRaises( + ValueError, md5, self.md5_test_data, usedforsecurity=True) + + digest = md5(self.md5_test_data, usedforsecurity=False).hexdigest() + self.assertEqual(digest, self.md5_digest) + + def test_md5_without_data(self): + if not self.fips_enabled: + test_md5 = md5() + test_md5.update(self.md5_test_data) + digest = test_md5.hexdigest() + self.assertEqual(digest, self.md5_digest) + else: + self.assertRaises(ValueError, md5) + + if not self.fips_enabled: + test_md5 = md5(usedforsecurity=True) + test_md5.update(self.md5_test_data) + digest = test_md5.hexdigest() + self.assertEqual(digest, self.md5_digest) + else: + self.assertRaises(ValueError, md5, usedforsecurity=True) + + test_md5 = md5(usedforsecurity=False) + test_md5.update(self.md5_test_data) + digest = test_md5.hexdigest() + self.assertEqual(digest, self.md5_digest) + + @unittest.skipIf(sys.version_info.major == 2, + "hashlib.md5 does not raise TypeError here in py2") + def test_string_data_raises_type_error(self): + if not self.fips_enabled: + self.assertRaises(TypeError, hashlib.md5, u'foo') + self.assertRaises(TypeError, md5, u'foo') + self.assertRaises( + TypeError, md5, u'foo', usedforsecurity=True) + else: + self.assertRaises(ValueError, hashlib.md5, u'foo') + self.assertRaises(ValueError, md5, u'foo') + self.assertRaises( + ValueError, md5, u'foo', usedforsecurity=True) + + self.assertRaises( + TypeError, md5, u'foo', usedforsecurity=False) + + def test_none_data_raises_type_error(self): + if not self.fips_enabled: + self.assertRaises(TypeError, hashlib.md5, None) + self.assertRaises(TypeError, md5, None) + self.assertRaises( + TypeError, md5, None, usedforsecurity=True) + else: + self.assertRaises(ValueError, hashlib.md5, None) + self.assertRaises(ValueError, md5, None) + self.assertRaises( + ValueError, md5, None, usedforsecurity=True) + + self.assertRaises( + TypeError, md5, None, usedforsecurity=False) + class ResellerConfReader(unittest.TestCase): @@ -8180,7 +8260,7 @@ class TestShardRange(unittest.TestCase): def test_make_path(self): ts = utils.Timestamp.now() actual = utils.ShardRange.make_path('a', 'root', 'parent', ts, 0) - parent_hash = hashlib.md5(b'parent').hexdigest() + parent_hash = md5(b'parent', usedforsecurity=False).hexdigest() self.assertEqual('a/root-%s-%s-0' % (parent_hash, ts.internal), actual) actual = utils.ShardRange.make_path('a', 'root', 'parent', ts, 3) self.assertEqual('a/root-%s-%s-3' % (parent_hash, ts.internal), actual) diff --git a/test/unit/container/test_backend.py b/test/unit/container/test_backend.py index 2555107181..a76c276f3d 100644 --- a/test/unit/container/test_backend.py +++ b/test/unit/container/test_backend.py @@ -17,7 +17,6 @@ import base64 import errno import os -import hashlib import inspect import unittest from time import sleep, time @@ -40,7 +39,7 @@ from swift.container.backend import ContainerBroker, \ from swift.common.db import DatabaseAlreadyExists, GreenDBConnection from swift.common.request_helpers import get_reserved_name from swift.common.utils import Timestamp, encode_timestamps, hash_path, \ - ShardRange, make_db_file_path + ShardRange, make_db_file_path, md5 from swift.common.storage_policy import POLICIES import mock @@ -3161,7 +3160,7 @@ class TestContainerBroker(unittest.TestCase): def md5_str(s): if not isinstance(s, bytes): s = s.encode('utf8') - return hashlib.md5(s).hexdigest() + return md5(s, usedforsecurity=False).hexdigest() broker = ContainerBroker(':memory:', account='a', container='c') broker.initialize(Timestamp('1').internal, 0) diff --git a/test/unit/container/test_sharder.py b/test/unit/container/test_sharder.py index 359f23caee..8e93811cbb 100644 --- a/test/unit/container/test_sharder.py +++ b/test/unit/container/test_sharder.py @@ -12,7 +12,6 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. -import hashlib import json import random @@ -42,7 +41,7 @@ from swift.container.sharder import ContainerSharder, sharding_enabled, \ CleavingContext, DEFAULT_SHARD_SHRINK_POINT, \ DEFAULT_SHARD_CONTAINER_THRESHOLD from swift.common.utils import ShardRange, Timestamp, hash_path, \ - encode_timestamps, parse_db_filename, quorum_size, Everything + encode_timestamps, parse_db_filename, quorum_size, Everything, md5 from test import annotate_failure from test.unit import debug_logger, FakeRing, \ @@ -65,7 +64,8 @@ class BaseTestSharder(unittest.TestCase): def _make_broker(self, account='a', container='c', epoch=None, device='sda', part=0, hash_=None): - hash_ = hash_ or hashlib.md5(container.encode('utf-8')).hexdigest() + hash_ = hash_ or md5( + container.encode('utf-8'), usedforsecurity=False).hexdigest() datadir = os.path.join( self.tempdir, device, 'containers', str(part), hash_[-3:], hash_) if epoch: diff --git a/test/unit/obj/common.py b/test/unit/obj/common.py index a640138011..fd83427da7 100644 --- a/test/unit/obj/common.py +++ b/test/unit/obj/common.py @@ -12,7 +12,6 @@ # implied. # See the License for the specific language governing permissions and # limitations under the License. -import hashlib import os import shutil import tempfile @@ -20,7 +19,7 @@ import unittest from swift.common import utils from swift.common.storage_policy import POLICIES -from swift.common.utils import Timestamp +from swift.common.utils import Timestamp, md5 def write_diskfile(df, timestamp, data=b'test data', frag_index=None, @@ -31,7 +30,7 @@ def write_diskfile(df, timestamp, data=b'test data', frag_index=None, with df.create() as writer: writer.write(data) metadata = { - 'ETag': hashlib.md5(data).hexdigest(), + 'ETag': md5(data, usedforsecurity=False).hexdigest(), 'X-Timestamp': timestamp.internal, 'Content-Length': str(len(data)), } diff --git a/test/unit/obj/test_auditor.py b/test/unit/obj/test_auditor.py index 71c56c4681..2c7fe3dd94 100644 --- a/test/unit/obj/test_auditor.py +++ b/test/unit/obj/test_auditor.py @@ -23,7 +23,6 @@ import time import string import xattr from shutil import rmtree -from hashlib import md5 from tempfile import mkdtemp import textwrap from os.path import dirname, basename @@ -35,7 +34,7 @@ from swift.obj.diskfile import ( DiskFileManager, ECDiskFileManager, AuditLocation, clear_auditor_status, get_auditor_status, HASH_FILE, HASH_INVALIDATIONS_FILE) from swift.common.utils import ( - mkdirs, normalize_timestamp, Timestamp, readconf) + mkdirs, normalize_timestamp, Timestamp, readconf, md5) from swift.common.storage_policy import ( ECStoragePolicy, StoragePolicy, POLICIES, EC_POLICY) from test.unit.obj.common import write_diskfile @@ -160,7 +159,7 @@ class TestAuditor(unittest.TestCase): data = b'0' * 1024 if disk_file.policy.policy_type == EC_POLICY: data = disk_file.policy.pyeclib_driver.encode(data)[0] - etag = md5() + etag = md5(usedforsecurity=False) with disk_file.create() as writer: writer.write(data) etag.update(data) @@ -198,7 +197,7 @@ class TestAuditor(unittest.TestCase): # simulate a PUT now = time.time() data = b'boots and cats and ' * 1024 - hasher = md5() + hasher = md5(usedforsecurity=False) with disk_file.create() as writer: writer.write(data) hasher.update(data) @@ -257,14 +256,16 @@ class TestAuditor(unittest.TestCase): checksum = xattr.getxattr( file_path, "user.swift.metadata_checksum") - self.assertEqual(checksum, - md5(metadata).hexdigest().encode('ascii')) + self.assertEqual( + checksum, + (md5(metadata, usedforsecurity=False).hexdigest() + .encode('ascii'))) def test_object_audit_diff_data(self): auditor_worker = auditor.AuditorWorker(self.conf, self.logger, self.rcache, self.devices) data = b'0' * 1024 - etag = md5() + etag = md5(usedforsecurity=False) timestamp = str(normalize_timestamp(time.time())) with self.disk_file.create() as writer: writer.write(data) @@ -287,7 +288,7 @@ class TestAuditor(unittest.TestCase): AuditLocation(self.disk_file._datadir, 'sda', '0', policy=POLICIES.legacy)) self.assertEqual(auditor_worker.quarantines, pre_quarantines) - etag = md5(b'1' + b'0' * 1023).hexdigest() + etag = md5(b'1' + b'0' * 1023, usedforsecurity=False).hexdigest() metadata['ETag'] = etag with self.disk_file.create() as writer: @@ -305,7 +306,7 @@ class TestAuditor(unittest.TestCase): def do_test(data): # create diskfile and set ETag and content-length to match the data - etag = md5(data).hexdigest() + etag = md5(data, usedforsecurity=False).hexdigest() timestamp = str(normalize_timestamp(time.time())) with disk_file.create() as writer: writer.write(data) @@ -515,7 +516,7 @@ class TestAuditor(unittest.TestCase): policy=self.disk_file.policy) data = b'VERIFY' - etag = md5() + etag = md5(usedforsecurity=False) timestamp = str(normalize_timestamp(time.time())) with self.disk_file.create() as writer: writer.write(data) @@ -593,7 +594,7 @@ class TestAuditor(unittest.TestCase): timestamp = str(normalize_timestamp(time.time())) pre_errors = auditor_worker.errors data = b'0' * 1024 - etag = md5() + etag = md5(usedforsecurity=False) with self.disk_file.create() as writer: writer.write(data) etag.update(data) @@ -622,7 +623,7 @@ class TestAuditor(unittest.TestCase): with df.create() as writer: writer.write(data) metadata = { - 'ETag': md5(data).hexdigest(), + 'ETag': md5(data, usedforsecurity=False).hexdigest(), 'X-Timestamp': timestamp, 'Content-Length': str(os.fstat(writer._fd).st_size), } @@ -648,7 +649,7 @@ class TestAuditor(unittest.TestCase): with df.create() as writer: writer.write(data) metadata = { - 'ETag': md5(data).hexdigest(), + 'ETag': md5(data, usedforsecurity=False).hexdigest(), 'X-Timestamp': timestamp, 'Content-Length': str(os.fstat(writer._fd).st_size), } @@ -696,7 +697,7 @@ class TestAuditor(unittest.TestCase): with self.disk_file.create() as writer: writer.write(data) metadata = { - 'ETag': md5(data).hexdigest(), + 'ETag': md5(data, usedforsecurity=False).hexdigest(), 'X-Timestamp': ts.normal, 'Content-Length': str(os.fstat(writer._fd).st_size), } @@ -768,7 +769,7 @@ class TestAuditor(unittest.TestCase): # pretend that we logged (and reset counters) just now auditor_worker.last_logged = time.time() data = b'0' * 1024 - etag = md5() + etag = md5(usedforsecurity=False) with self.disk_file.create() as writer: writer.write(data) etag.update(data) @@ -792,7 +793,7 @@ class TestAuditor(unittest.TestCase): timestamp = str(normalize_timestamp(time.time())) pre_quarantines = auditor_worker.quarantines data = b'0' * 10 - etag = md5() + etag = md5(usedforsecurity=False) with self.disk_file.create() as writer: writer.write(data) etag.update(data) @@ -808,7 +809,7 @@ class TestAuditor(unittest.TestCase): self.disk_file = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'ob', policy=POLICIES.legacy) data = b'1' * 10 - etag = md5() + etag = md5(usedforsecurity=False) with self.disk_file.create() as writer: writer.write(data) etag.update(data) @@ -828,7 +829,7 @@ class TestAuditor(unittest.TestCase): self.auditor = auditor.ObjectAuditor(self.conf) self.auditor.log_time = 0 data = b'0' * 1024 - etag = md5() + etag = md5(usedforsecurity=False) with self.disk_file.create() as writer: writer.write(data) etag.update(data) @@ -841,7 +842,7 @@ class TestAuditor(unittest.TestCase): } writer.put(metadata) writer.commit(Timestamp(timestamp)) - etag = md5() + etag = md5(usedforsecurity=False) etag.update(b'1' + b'0' * 1023) etag = etag.hexdigest() metadata['ETag'] = etag @@ -863,7 +864,7 @@ class TestAuditor(unittest.TestCase): timestamp = Timestamp.now() self.auditor = auditor.ObjectAuditor(self.conf) self.auditor.log_time = 0 - etag = md5() + etag = md5(usedforsecurity=False) with self.disk_file.create() as writer: etag = etag.hexdigest() metadata = { @@ -873,7 +874,7 @@ class TestAuditor(unittest.TestCase): } writer.put(metadata) writer.commit(Timestamp(timestamp)) - etag = md5() + etag = md5(usedforsecurity=False) etag = etag.hexdigest() metadata['ETag'] = etag write_metadata(writer._fd, metadata) @@ -1424,7 +1425,7 @@ class TestAuditor(unittest.TestCase): ts = Timestamp(time.time()) with self.disk_file.create() as writer: metadata = { - 'ETag': md5(b'').hexdigest(), + 'ETag': md5(b'', usedforsecurity=False).hexdigest(), 'X-Timestamp': ts.normal, 'Content-Length': str(os.fstat(writer._fd).st_size), } diff --git a/test/unit/obj/test_diskfile.py b/test/unit/obj/test_diskfile.py index 778a578932..a1d1c8c25e 100644 --- a/test/unit/obj/test_diskfile.py +++ b/test/unit/obj/test_diskfile.py @@ -35,7 +35,6 @@ from random import shuffle, randint from shutil import rmtree from time import time from tempfile import mkdtemp -from hashlib import md5 as _md5 from contextlib import closing, contextmanager from gzip import GzipFile import pyeclib.ec_iface @@ -50,7 +49,7 @@ from test.unit import (mock as unit_mock, temptree, mock_check_drive, from swift.obj import diskfile from swift.common import utils from swift.common.utils import hash_path, mkdirs, Timestamp, \ - encode_timestamps, O_TMPFILE + encode_timestamps, O_TMPFILE, md5 as _md5 from swift.common import ring from swift.common.splice import splice from swift.common.exceptions import DiskFileNotExist, DiskFileQuarantined, \ @@ -76,7 +75,7 @@ class md5(object): def __init__(self, s=b''): if not isinstance(s, bytes): s = s.encode('ascii') - self.md = _md5(s) + self.md = _md5(s, usedforsecurity=False) def update(self, s=b''): if not isinstance(s, bytes): diff --git a/test/unit/obj/test_reconstructor.py b/test/unit/obj/test_reconstructor.py index a350a77681..c9682395a3 100644 --- a/test/unit/obj/test_reconstructor.py +++ b/test/unit/obj/test_reconstructor.py @@ -16,7 +16,6 @@ import itertools import json import unittest import os -from hashlib import md5 import mock import six import six.moves.cPickle as pickle @@ -37,7 +36,7 @@ from six.moves.urllib.parse import unquote from swift.common import utils from swift.common.exceptions import DiskFileError from swift.common.header_key_dict import HeaderKeyDict -from swift.common.utils import dump_recon_cache +from swift.common.utils import dump_recon_cache, md5 from swift.obj import diskfile, reconstructor as object_reconstructor from swift.common import ring from swift.common.storage_policy import (StoragePolicy, ECStoragePolicy, @@ -4414,7 +4413,7 @@ class TestObjectReconstructor(BaseTestObjectReconstructor): metadata = { 'X-Timestamp': ts.internal, 'Content-Length': len(test_data), - 'Etag': md5(test_data).hexdigest(), + 'Etag': md5(test_data, usedforsecurity=False).hexdigest(), 'X-Object-Sysmeta-Ec-Frag-Index': frag_index, } writer.put(metadata) @@ -4553,7 +4552,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(1) @@ -4583,8 +4582,8 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): self.assertEqual(0, df.content_length) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual(md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) self.assertEqual(len(part_nodes) - 1, len(called_headers), 'Expected %d calls, got %r' % (len(part_nodes) - 1, called_headers)) @@ -4617,7 +4616,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(4) @@ -4641,8 +4640,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, dict(self.obj_metadata)) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) def test_reconstruct_fa_error_with_invalid_header(self): job = { @@ -4654,7 +4654,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(4) @@ -4687,8 +4687,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): fixed_body = b''.join(df.reader()) # ... this bad response should be ignored like any other failure self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) def test_reconstruct_parity_fa_with_data_node_failure(self): job = { @@ -4702,7 +4703,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): # make up some data (trim some amount to make it unaligned with # segment size) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-454] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) # the scheme is 10+4, so this gets a parity node broken_body = ec_archive_bodies.pop(-4) @@ -4724,8 +4725,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, dict(self.obj_metadata)) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) def test_reconstruct_fa_exceptions_fails(self): job = { @@ -4792,7 +4794,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) # bad response @@ -4826,8 +4828,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, self.obj_metadata) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # no error and warning self.assertFalse(self.logger.get_lines_for_level('error')) @@ -4843,7 +4846,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(1) @@ -4865,8 +4868,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, dict(self.obj_metadata)) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # one newer etag won't spoil the bunch new_index = random.randint(0, self.policy.ec_ndata - 1) @@ -4881,8 +4885,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, dict(self.obj_metadata)) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # no error and warning self.assertFalse(self.logger.get_lines_for_level('error')) @@ -4898,7 +4903,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(1) @@ -4917,8 +4922,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, dict(self.obj_metadata)) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # a response at same timestamp but different etag won't spoil the bunch # N.B. (FIXME). if we choose the first response as garbage, the @@ -4937,8 +4943,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, dict(self.obj_metadata)) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # expect an error log but no warnings error_log_lines = self.logger.get_lines_for_level('error') @@ -4968,7 +4975,8 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): archive_bodies = encode_frag_archive_bodies(self.policy, body) # pop the index to the destination node archive_bodies.pop(1) - key = (md5(body).hexdigest(), next(ts).internal, bool(i % 2)) + key = (md5(body, usedforsecurity=False).hexdigest(), + next(ts).internal, bool(i % 2)) ec_archive_dict[key] = archive_bodies responses = list() @@ -5041,7 +5049,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) # instead of popping the broken body, we'll just leave it in the list @@ -5062,8 +5070,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, self.obj_metadata) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # no error, no warning self.assertFalse(self.logger.get_lines_for_level('error')) @@ -5100,7 +5109,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(1) @@ -5122,8 +5131,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, self.obj_metadata) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # no error and warning self.assertFalse(self.logger.get_lines_for_level('error')) @@ -5150,7 +5160,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(1) @@ -5180,8 +5190,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, self.obj_metadata) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # no errors self.assertFalse(self.logger.get_lines_for_level('error')) @@ -5222,7 +5233,7 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): node['backend_index'] = self.policy.get_backend_index(node['index']) test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(1) @@ -5247,8 +5258,9 @@ class TestReconstructFragmentArchive(BaseTestObjectReconstructor): job, node, self.obj_metadata) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) # no errors self.assertFalse(self.logger.get_lines_for_level('error')) @@ -5305,7 +5317,7 @@ class TestObjectReconstructorECDuplicationFactor(TestObjectReconstructor): } test_data = (b'rebuild' * self.policy.ec_segment_size)[:-777] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = encode_frag_archive_bodies(self.policy, test_data) broken_body = ec_archive_bodies.pop(index) @@ -5343,8 +5355,9 @@ class TestObjectReconstructorECDuplicationFactor(TestObjectReconstructor): job, node, metadata) fixed_body = b''.join(df.reader()) self.assertEqual(len(fixed_body), len(broken_body)) - self.assertEqual(md5(fixed_body).hexdigest(), - md5(broken_body).hexdigest()) + self.assertEqual( + md5(fixed_body, usedforsecurity=False).hexdigest(), + md5(broken_body, usedforsecurity=False).hexdigest()) for called_header in called_headers: called_header = HeaderKeyDict(called_header) self.assertIn('Content-Length', called_header) diff --git a/test/unit/obj/test_server.py b/test/unit/obj/test_server.py index 4cac67d95a..f04e9523c0 100644 --- a/test/unit/obj/test_server.py +++ b/test/unit/obj/test_server.py @@ -31,7 +31,6 @@ import random from shutil import rmtree from time import gmtime, strftime, time, struct_time from tempfile import mkdtemp -from hashlib import md5 from collections import defaultdict from contextlib import contextmanager from textwrap import dedent @@ -53,7 +52,7 @@ from swift.common import utils, bufferedhttp from swift.common.header_key_dict import HeaderKeyDict from swift.common.utils import hash_path, mkdirs, normalize_timestamp, \ NullLogger, storage_directory, public, replication, encode_timestamps, \ - Timestamp + Timestamp, md5 from swift.common import constraints from swift.common.request_helpers import get_reserved_name from swift.common.swob import Request, WsgiBytesIO @@ -261,7 +260,7 @@ class TestObjectController(unittest.TestCase): req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, headers=headers) req.body = b'VERIFY' - etag = '"%s"' % md5(b'VERIFY').hexdigest() + etag = '"%s"' % md5(b'VERIFY', usedforsecurity=False).hexdigest() resp = req.get_response(self.object_controller) self.assertEqual(resp.status_int, 201) self.assertEqual(dict(resp.headers), { @@ -1725,9 +1724,10 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}, environ={'REQUEST_METHOD': 'PUT'}) - obj_etag = md5(b"obj data").hexdigest() + obj_etag = md5(b"obj data", usedforsecurity=False).hexdigest() footer_meta = json.dumps({"Etag": obj_etag}).encode('ascii') - footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_meta, usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -1773,11 +1773,12 @@ class TestObjectController(unittest.TestCase): '/sda1/p/a/c/o', headers=headers, environ={'REQUEST_METHOD': 'PUT'}) - obj_etag = md5(b"obj data").hexdigest() + obj_etag = md5(b"obj data", usedforsecurity=False).hexdigest() footers = {'Etag': obj_etag} footers.update(override_footers) footer_meta = json.dumps(footers).encode('ascii') - footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_meta, usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -1863,9 +1864,10 @@ class TestObjectController(unittest.TestCase): 'X-Backend-Obj-Multipart-Mime-Boundary': 'boundary'}, environ={'REQUEST_METHOD': 'PUT'}) - footers = {"Etag": md5(b"green").hexdigest()} + footers = {"Etag": md5(b"green", usedforsecurity=False).hexdigest()} footer_meta = json.dumps(footers).encode('ascii') - footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_meta, usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -1899,7 +1901,8 @@ class TestObjectController(unittest.TestCase): 'X-Object-Meta-X': 'Y', 'X-Object-Sysmeta-X': 'Y', }).encode('ascii') - footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_meta, usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -1937,7 +1940,7 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT'}) footer_meta = json.dumps({ - "Etag": md5(b"obj data").hexdigest() + "Etag": md5(b"obj data", usedforsecurity=False).hexdigest() }).encode('ascii') req.body = b"\r\n".join(( @@ -1967,10 +1970,11 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT'}) footer_meta = json.dumps({ - "Etag": md5(b"obj data").hexdigest() + "Etag": md5(b"obj data", usedforsecurity=False).hexdigest() }).encode('ascii') bad_footer_meta_cksum = \ - md5(footer_meta + b"bad").hexdigest().encode('ascii') + md5(footer_meta + b"bad", + usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -1999,7 +2003,8 @@ class TestObjectController(unittest.TestCase): environ={'REQUEST_METHOD': 'PUT'}) footer_meta = b"{{{[[{{[{[[{[{[[{{{[{{{{[[{{[{[" - footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_meta, usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -2030,7 +2035,8 @@ class TestObjectController(unittest.TestCase): footer_meta = json.dumps({ 'X-Object-Meta-Mint': 'pepper' }).encode('ascii') - footer_meta_cksum = md5(footer_meta).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_meta, usedforsecurity=False).hexdigest().encode('ascii') req.body = b"\r\n".join(( b"--boundary", @@ -3921,7 +3927,7 @@ class TestObjectController(unittest.TestCase): policy=POLICIES.legacy) disk_file.open() file_name = os.path.basename(disk_file._data_file) - etag = md5() + etag = md5(usedforsecurity=False) etag.update(b'VERIF') etag = etag.hexdigest() metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o', @@ -3983,7 +3989,7 @@ class TestObjectController(unittest.TestCase): policy=POLICIES.legacy) disk_file.open(timestamp) file_name = os.path.basename(disk_file._data_file) - etag = md5() + etag = md5(usedforsecurity=False) etag.update(b'VERIF') etag = etag.hexdigest() metadata = {'X-Timestamp': timestamp, 'name': '/a/c/o', @@ -4314,7 +4320,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(path, '/sda1/p/a/c/o') expected = { 'X-Size': len(b'test1'), - 'X-Etag': md5(b'test1').hexdigest(), + 'X-Etag': md5(b'test1', usedforsecurity=False).hexdigest(), 'X-Content-Type': 'text/plain', 'X-Timestamp': create_timestamp, } @@ -4354,7 +4360,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(path, '/sda1/p/a/c/o') expected = { 'X-Size': len(b'test2'), - 'X-Etag': md5(b'test2').hexdigest(), + 'X-Etag': md5(b'test2', usedforsecurity=False).hexdigest(), 'X-Content-Type': 'text/html', 'X-Timestamp': offset_timestamp, } @@ -4393,7 +4399,7 @@ class TestObjectController(unittest.TestCase): self.assertEqual(path, '/sda1/p/a/c/o') expected = { 'X-Size': len(b'test3'), - 'X-Etag': md5(b'test3').hexdigest(), + 'X-Etag': md5(b'test3', usedforsecurity=False).hexdigest(), 'X-Content-Type': 'text/enriched', 'X-Timestamp': overwrite_timestamp, } @@ -4562,7 +4568,7 @@ class TestObjectController(unittest.TestCase): return False def my_hash_path(*args): - return md5(b'collide').hexdigest() + return md5(b'collide', usedforsecurity=False).hexdigest() with mock.patch("swift.obj.diskfile.hash_path", my_hash_path): with mock.patch("swift.obj.server.check_object_creation", @@ -7677,10 +7683,11 @@ class TestObjectController(unittest.TestCase): test_data = b'obj data' footer_meta = { "X-Object-Sysmeta-Ec-Frag-Index": "7", - "Etag": md5(test_data).hexdigest(), + "Etag": md5(test_data, usedforsecurity=False).hexdigest(), } footer_json = json.dumps(footer_meta).encode('ascii') - footer_meta_cksum = md5(footer_json).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_json, usedforsecurity=False).hexdigest().encode('ascii') test_doc = b"\r\n".join(( b"--boundary123", b"X-Document: object body", @@ -7921,10 +7928,11 @@ class TestObjectServer(unittest.TestCase): test_data = encode_frag_archive_bodies(POLICIES[1], b'obj data')[0] footer_meta = { "X-Object-Sysmeta-Ec-Frag-Index": "2", - "Etag": md5(test_data).hexdigest(), + "Etag": md5(test_data, usedforsecurity=False).hexdigest(), } footer_json = json.dumps(footer_meta).encode('ascii') - footer_meta_cksum = md5(footer_json).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_json, usedforsecurity=False).hexdigest().encode('ascii') test_doc = test_doc or b"\r\n".join(( b"--boundary123", b"X-Document: object body", @@ -8193,10 +8201,11 @@ class TestObjectServer(unittest.TestCase): # make footer doc footer_meta = { "X-Object-Sysmeta-Ec-Frag-Index": "2", - "Etag": md5(test_data).hexdigest(), + "Etag": md5(test_data, usedforsecurity=False).hexdigest(), } footer_json = json.dumps(footer_meta).encode('ascii') - footer_meta_cksum = md5(footer_json).hexdigest().encode('ascii') + footer_meta_cksum = md5( + footer_json, usedforsecurity=False).hexdigest().encode('ascii') # send most of the footer doc footer_doc = b"\r\n".join(( diff --git a/test/unit/proxy/controllers/test_obj.py b/test/unit/proxy/controllers/test_obj.py index 6a64345457..1ec6265f00 100644 --- a/test/unit/proxy/controllers/test_obj.py +++ b/test/unit/proxy/controllers/test_obj.py @@ -23,7 +23,6 @@ import unittest from collections import defaultdict from contextlib import contextmanager import json -from hashlib import md5 import mock from eventlet import Timeout, sleep @@ -41,7 +40,7 @@ else: import swift from swift.common import utils, swob, exceptions from swift.common.exceptions import ChunkWriteTimeout -from swift.common.utils import Timestamp, list_from_csv +from swift.common.utils import Timestamp, list_from_csv, md5 from swift.proxy import server as proxy_server from swift.proxy.controllers import obj from swift.proxy.controllers.base import \ @@ -143,7 +142,8 @@ def make_footers_callback(body=None): crypto_etag = '20242af0cd21dd7195a10483eb7472c9' etag_crypto_meta = \ '{"cipher": "AES_CTR_256", "iv": "sD+PSw/DfqYwpsVGSo0GEw=="}' - etag = md5(body).hexdigest() if body is not None else None + etag = md5(body, + usedforsecurity=False).hexdigest() if body is not None else None footers_to_add = { 'X-Object-Sysmeta-Container-Update-Override-Etag': cont_etag, 'X-Object-Sysmeta-Crypto-Etag': crypto_etag, @@ -1078,7 +1078,7 @@ class TestReplicatedObjController(CommonObjectControllerMixin, body=test_body) if chunked: req.headers['Transfer-Encoding'] = 'chunked' - etag = md5(test_body).hexdigest() + etag = md5(test_body, usedforsecurity=False).hexdigest() req.headers['Etag'] = etag put_requests = defaultdict( @@ -2188,7 +2188,8 @@ class ECObjectControllerMixin(CommonObjectControllerMixin): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) # expect a request to all primaries plus one handoff self.assertEqual(self.replicas() + 1, len(log)) @@ -2524,7 +2525,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_with_slow_primaries(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-743] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) ts = self.ts() headers = [] @@ -2562,7 +2563,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_with_some_slow_primaries(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-289] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) ts = self.ts() headers = [] @@ -2649,7 +2650,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_with_slow_nodes_and_failures(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-289] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) ts = self.ts() headers = [] @@ -2703,7 +2704,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_with_one_slow_frag_lane(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-454] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) ts = self.ts() headers = [] @@ -2748,7 +2749,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_with_concurrent_ec_extra_requests(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-454] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) ts = self.ts() headers = [] @@ -2825,7 +2826,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_with_frags_swapped_around(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-657] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) _part, primary_nodes = self.obj_ring.get_nodes('a', 'c', 'o') @@ -2909,7 +2910,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) collected_responses = defaultdict(list) for conn in log: @@ -2964,7 +2966,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): obj1['etag']: {True}, obj2['etag']: {False}, }, closed_conn) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj2['etag']) self.assertEqual({True}, {conn.closed for conn in log}) collected_responses = defaultdict(set) @@ -3011,7 +3014,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj2['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj2['etag']) collected_responses = defaultdict(set) for conn in log: @@ -3075,7 +3079,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj2['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj2['etag']) collected_responses = defaultdict(set) for conn in log: @@ -3195,7 +3200,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) # Expect a maximum of one request to each primary plus one extra # request to node 1. Actual value could be less if the extra request @@ -3489,7 +3495,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) self.assertGreaterEqual(len(log), self.policy.ec_ndata) collected_durables = [] @@ -3534,7 +3541,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) collected_durables = [] for conn in log: @@ -3625,7 +3633,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) # Quorum of non-durables for a different object won't # prevent us hunting down the durable object @@ -3670,7 +3679,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj1['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj1['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj1['etag']) def test_GET_with_missing_durables_and_older_durables(self): # scenario: non-durable frags of newer obj1 obscure all durable frags @@ -3725,7 +3735,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj2['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj2['etag']) # max: proxy will GET all non-durable obj1 frags and then 10 obj frags self.assertLessEqual(len(log), self.replicas() + self.policy.ec_ndata) # min: proxy will GET 10 non-durable obj1 frags and then 10 obj frags @@ -3770,7 +3781,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj3['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj3['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj3['etag']) self.assertGreaterEqual(len(log), self.policy.ec_ndata + 1) self.assertLessEqual(len(log), (self.policy.ec_ndata * 2) + 1) @@ -3817,7 +3829,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj2['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual(md5( + resp.body, usedforsecurity=False).hexdigest(), obj2['etag']) # max: proxy will GET all non-durable obj1 frags and then 10 obj2 frags self.assertLessEqual(len(log), self.replicas() + self.policy.ec_ndata) # min: proxy will GET 10 non-durable obj1 frags and then 10 obj2 frags @@ -4055,10 +4068,10 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): segment_size = self.policy.ec_segment_size frag_size = self.policy.fragment_size new_data = (b'test' * segment_size)[:-492] - new_etag = md5(new_data).hexdigest() + new_etag = md5(new_data, usedforsecurity=False).hexdigest() new_archives = self._make_ec_archive_bodies(new_data) old_data = (b'junk' * segment_size)[:-492] - old_etag = md5(old_data).hexdigest() + old_etag = md5(old_data, usedforsecurity=False).hexdigest() old_archives = self._make_ec_archive_bodies(old_data) frag_archive_size = len(new_archives[0]) @@ -4135,8 +4148,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): # N.B. the object data *length* here is different test_data2 = (b'blah1' * segment_size)[:-333] - etag1 = md5(test_data1).hexdigest() - etag2 = md5(test_data2).hexdigest() + etag1 = md5(test_data1, usedforsecurity=False).hexdigest() + etag2 = md5(test_data2, usedforsecurity=False).hexdigest() ec_archive_bodies1 = self._make_ec_archive_bodies(test_data1) ec_archive_bodies2 = self._make_ec_archive_bodies(test_data2) @@ -4161,7 +4174,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): headers=headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) - self.assertEqual(md5(resp.body).hexdigest(), etag1) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + etag1) # sanity check responses2 responses = responses2[:self.policy.ec_ndata] @@ -4170,7 +4185,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): headers=headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) - self.assertEqual(md5(resp.body).hexdigest(), etag2) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + etag2) # now mix the responses a bit mix_index = random.randint(0, self.policy.ec_ndata - 1) @@ -4199,7 +4216,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_read_timeout(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-333] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) headers = {'X-Object-Sysmeta-Ec-Etag': etag} self.app.recoverable_node_timeout = 0.01 @@ -4219,7 +4236,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): self.assertEqual(resp.status_int, 200) # do this inside the fake http context manager, it'll try to # resume but won't be able to give us all the right bytes - self.assertNotEqual(md5(resp.body).hexdigest(), etag) + self.assertNotEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + etag) error_lines = self.logger.get_lines_for_level('error') nparity = self.policy.ec_nparity self.assertGreater(len(error_lines), nparity) @@ -4233,7 +4252,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def test_GET_read_timeout_resume(self): segment_size = self.policy.ec_segment_size test_data = (b'test' * segment_size)[:-333] - etag = md5(test_data).hexdigest() + etag = md5(test_data, usedforsecurity=False).hexdigest() ec_archive_bodies = self._make_ec_archive_bodies(test_data) headers = { 'X-Object-Sysmeta-Ec-Etag': etag, @@ -4255,7 +4274,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): headers=headers): resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) - self.assertEqual(md5(resp.body).hexdigest(), etag) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + etag) error_lines = self.logger.get_lines_for_level('error') self.assertEqual(1, len(error_lines)) self.assertIn('retrying', error_lines[0]) @@ -4303,8 +4324,8 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): segment_size = self.policy.ec_segment_size test_data2 = (b'blah1' * segment_size)[:-333] test_data1 = (b'test' * segment_size)[:-333] - etag2 = md5(test_data2).hexdigest() - etag1 = md5(test_data1).hexdigest() + etag2 = md5(test_data2, usedforsecurity=False).hexdigest() + etag1 = md5(test_data1, usedforsecurity=False).hexdigest() ec_archive_bodies2 = self._make_ec_archive_bodies(test_data2) ec_archive_bodies1 = self._make_ec_archive_bodies(test_data1) headers2 = {'X-Object-Sysmeta-Ec-Etag': etag2, @@ -4382,7 +4403,9 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): headers=headers) as log: resp = req.get_response(self.app) self.assertEqual(resp.status_int, 200) - self.assertEqual(md5(resp.body).hexdigest(), etag1) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + etag1) error_lines = self.logger.get_lines_for_level('error') self.assertEqual(2, len(error_lines)) for line in error_lines: @@ -4439,7 +4462,7 @@ class TestECObjController(ECObjectControllerMixin, unittest.TestCase): def _test_invalid_ranges(self, method, real_body, segment_size, req_range): # make a request with range starts from more than real size. - body_etag = md5(real_body).hexdigest() + body_etag = md5(real_body, usedforsecurity=False).hexdigest() req = swift.common.swob.Request.blank( '/v1/a/c/o', method=method, headers={'Destination': 'c1/o', @@ -4643,7 +4666,9 @@ class TestECDuplicationObjController( self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj['etag']) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + obj['etag']) collected_responses = defaultdict(set) for conn in log: @@ -4819,7 +4844,9 @@ class TestECDuplicationObjController( self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj2['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + obj2['etag']) collected_responses = defaultdict(set) for conn in log: @@ -4891,7 +4918,9 @@ class TestECDuplicationObjController( self.assertEqual(resp.status_int, 200) self.assertEqual(resp.headers['etag'], obj2['etag']) - self.assertEqual(md5(resp.body).hexdigest(), obj2['etag']) + self.assertEqual( + md5(resp.body, usedforsecurity=False).hexdigest(), + obj2['etag']) collected_responses = defaultdict(set) for conn in log: @@ -5414,7 +5443,7 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin, env = {'swift.callback.update_footers': footers_callback} req = swift.common.swob.Request.blank( '/v1/a/c/o', method='PUT', environ=env) - etag = md5(test_body).hexdigest() + etag = md5(test_body, usedforsecurity=False).hexdigest() size = len(test_body) req.body = test_body if chunked: @@ -5515,7 +5544,7 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin, 'X-Object-Sysmeta-Ec-Etag': etag, 'X-Backend-Container-Update-Override-Etag': etag, 'X-Object-Sysmeta-Ec-Segment-Size': str(segment_size), - 'Etag': md5(obj_payload).hexdigest()}) + 'Etag': md5(obj_payload, usedforsecurity=False).hexdigest()}) for header, value in expected.items(): self.assertEqual(footer_metadata[header], value) @@ -5547,7 +5576,7 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin, # trailing metadata segment_size = self.policy.ec_segment_size test_body = (b'asdf' * segment_size)[:-10] - etag = md5(test_body).hexdigest() + etag = md5(test_body, usedforsecurity=False).hexdigest() size = len(test_body) codes = [201] * self.replicas() resp_headers = { @@ -5623,7 +5652,9 @@ class TestECObjControllerMimePutter(BaseObjectControllerMixin, 'X-Object-Sysmeta-Ec-Content-Length': str(size), 'X-Object-Sysmeta-Ec-Etag': etag, 'X-Object-Sysmeta-Ec-Segment-Size': str(segment_size), - 'Etag': md5(obj_part.get_payload(decode=True)).hexdigest()} + 'Etag': md5( + obj_part.get_payload(decode=True), + usedforsecurity=False).hexdigest()} expected.update(expect_added) for header, value in expected.items(): self.assertIn(header, footer_metadata) diff --git a/test/unit/proxy/test_server.py b/test/unit/proxy/test_server.py index d6ce2f5936..4345eab30b 100644 --- a/test/unit/proxy/test_server.py +++ b/test/unit/proxy/test_server.py @@ -30,7 +30,6 @@ from shutil import rmtree, copyfile, move import gc import time from textwrap import dedent -from hashlib import md5 import collections from pyeclib.ec_iface import ECDriverError from tempfile import mkdtemp, NamedTemporaryFile @@ -71,7 +70,7 @@ from swift.common.exceptions import ChunkReadTimeout, DiskFileNotExist, \ from swift.common import utils, constraints from swift.common.utils import hash_path, storage_directory, \ parse_content_type, parse_mime_headers, \ - iter_multipart_mime_documents, public, mkdirs, NullLogger + iter_multipart_mime_documents, public, mkdirs, NullLogger, md5 from swift.common.wsgi import loadapp, ConfigString, SwiftHttpProtocol from swift.proxy.controllers import base as proxy_base from swift.proxy.controllers.base import get_cache_key, cors_validation, \ @@ -2152,8 +2151,8 @@ class BaseTestObjectController(object): self.put_container(policy.name, container_name) obj = b'this object has an etag and is otherwise unimportant' - etag = md5(obj).hexdigest() - not_etag = md5(obj + b"blahblah").hexdigest() + etag = md5(obj, usedforsecurity=False).hexdigest() + not_etag = md5(obj + b"blahblah", usedforsecurity=False).hexdigest() prolis = _test_sockets[0] prosrv = _test_servers[0] @@ -5687,7 +5686,8 @@ class TestReplicatedObjectController( self.assertEqual(headers[:len(exp)], exp) # request with both If-None-Match and Range - etag = md5(b"abcdefghij").hexdigest().encode('ascii') + etag = md5(b"abcdefghij", + usedforsecurity=False).hexdigest().encode('ascii') sock = connect_tcp(('localhost', prolis.getsockname()[1])) fd = sock.makefile('rwb') fd.write(b'GET /v1/a/con/o HTTP/1.1\r\n' + @@ -6562,7 +6562,8 @@ class BaseTestECObjectController(BaseTestObjectController): 'Content-Length: %d\r\n' 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' - '\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), + '\r\n' % (self.ec_policy.name, + md5(obj, usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -6597,7 +6598,7 @@ class BaseTestECObjectController(BaseTestObjectController): self.assertEqual( lmeta['x-object-sysmeta-ec-etag'], - md5(obj).hexdigest()) + md5(obj, usedforsecurity=False).hexdigest()) self.assertEqual( lmeta['x-object-sysmeta-ec-content-length'], str(len(obj))) @@ -6609,7 +6610,7 @@ class BaseTestECObjectController(BaseTestObjectController): '%s 2+1' % DEFAULT_TEST_EC_TYPE) self.assertEqual( lmeta['etag'], - md5(contents).hexdigest()) + md5(contents, usedforsecurity=False).hexdigest()) # check presence for a durable data file for the timestamp durable_file = ( @@ -6738,7 +6739,8 @@ class BaseTestECObjectController(BaseTestObjectController): 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' '\r\n' % (self.ec_policy.name, - md5(b'something else').hexdigest(), + md5(b'something else', + usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -6769,8 +6771,8 @@ class BaseTestECObjectController(BaseTestObjectController): self.ec_policy.object_ring.replica_count - self.ec_policy.ec_ndata) countdown = [count] - def busted_md5_constructor(initial_str=b""): - hasher = md5(initial_str) + def busted_md5_constructor(initial_str=b"", usedforsecurity=True): + hasher = md5(initial_str, usedforsecurity=usedforsecurity) if countdown[0] > 0: hasher.update(b'wrong') countdown[0] -= 1 @@ -6790,7 +6792,8 @@ class BaseTestECObjectController(BaseTestObjectController): 'Content-Length: %d\r\n' 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' - '\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), + '\r\n' % (self.ec_policy.name, + md5(obj, usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -6830,9 +6833,9 @@ class BaseTestECObjectController(BaseTestObjectController): def test_PUT_ec_fragment_quorum_archive_etag_mismatch(self): self.put_container("ec", "ec-con") - def busted_md5_constructor(initial_str=""): - hasher = md5(initial_str) - hasher.update('wrong') + def busted_md5_constructor(initial_str=b"", usedforsecurity=True): + hasher = md5(initial_str, usedforsecurity=usedforsecurity) + hasher.update(b'wrong') return hasher obj = b'uvarovite-esurience-cerated-symphysic' @@ -6848,7 +6851,7 @@ class BaseTestECObjectController(BaseTestObjectController): commit_confirmation = \ 'swift.proxy.controllers.obj.MIMEPutter.send_commit_confirmation' diskfile_md5 = 'swift.obj.diskfile.md5' - mem_diskfile_md5 = 'swift.obj.mem_diskfile.hashlib.md5' + mem_diskfile_md5 = 'swift.obj.mem_diskfile.md5' with mock.patch(diskfile_md5, busted_md5_constructor), \ mock.patch(mem_diskfile_md5, busted_md5_constructor), \ @@ -6861,7 +6864,7 @@ class BaseTestECObjectController(BaseTestObjectController): 'Content-Length: %d\r\n' 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' - '\r\n' % (md5(obj).hexdigest(), + '\r\n' % (md5(obj, usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -6917,7 +6920,7 @@ class BaseTestECObjectController(BaseTestObjectController): 'Content-Length: %d\r\n' 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' - '\r\n' % (md5(obj).hexdigest(), + '\r\n' % (md5(obj, usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -6958,7 +6961,8 @@ class BaseTestECObjectController(BaseTestObjectController): 'Content-Length: %d\r\n' 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' - '\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), + '\r\n' % (self.ec_policy.name, + md5(obj, usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -6976,7 +6980,8 @@ class BaseTestECObjectController(BaseTestObjectController): 'Content-Length: %d\r\n' 'X-Storage-Token: t\r\n' 'Content-Type: application/octet-stream\r\n' - '\r\n' % (self.ec_policy.name, md5(obj).hexdigest(), + '\r\n' % (self.ec_policy.name, + md5(obj, usedforsecurity=False).hexdigest(), len(obj))).encode('ascii')) fd.write(obj) fd.flush() @@ -7022,7 +7027,9 @@ class BaseTestECObjectController(BaseTestObjectController): headers = parse_headers_string(headers) self.assertEqual(str(len(obj)), headers['Content-Length']) - self.assertEqual(md5(obj).hexdigest(), headers['Etag']) + self.assertEqual( + md5(obj, usedforsecurity=False).hexdigest(), + headers['Etag']) self.assertEqual('chartreuse', headers['X-Object-Meta-Color']) gotten_obj = b'' @@ -7084,7 +7091,9 @@ class BaseTestECObjectController(BaseTestObjectController): headers = parse_headers_string(headers) self.assertEqual(str(len(obj)), headers['Content-Length']) - self.assertEqual(md5(obj).hexdigest(), headers['Etag']) + self.assertEqual( + md5(obj, usedforsecurity=False).hexdigest(), + headers['Etag']) gotten_obj = b'' while True: @@ -7165,7 +7174,9 @@ class BaseTestECObjectController(BaseTestObjectController): headers = parse_headers_string(headers) self.assertEqual(str(len(obj)), headers['Content-Length']) - self.assertEqual(md5(obj).hexdigest(), headers['Etag']) + self.assertEqual( + md5(obj, usedforsecurity=False).hexdigest(), + headers['Etag']) gotten_obj = b'' try: @@ -7221,7 +7232,9 @@ class BaseTestECObjectController(BaseTestObjectController): headers = parse_headers_string(headers) self.assertEqual(str(len(obj)), headers['Content-Length']) - self.assertEqual(md5(obj).hexdigest(), headers['Etag']) + self.assertEqual( + md5(obj, usedforsecurity=False).hexdigest(), + headers['Etag']) self.assertEqual('chartreuse', headers['X-Object-Meta-Color']) error_lines = prosrv.logger.get_lines_for_level('error') @@ -7310,7 +7323,8 @@ class BaseTestECObjectController(BaseTestObjectController): obj = b'abCD' * 10 extra_trans_data = [ - 'Etag: "%s"\r\n' % md5(obj).hexdigest(), + 'Etag: "%s"\r\n' % md5( + obj, usedforsecurity=False).hexdigest(), 'Content-Length: %d\r\n' % len(obj), 'Content-Type: application/octet-stream\r\n', '\r\n%s' % obj.decode('ascii') @@ -8214,7 +8228,7 @@ class TestObjectECRangedGET(unittest.TestCase): str(s) for s in range(431)).encode('ascii') assert seg_size * 4 > len(cls.obj) > seg_size * 3, \ "object is wrong number of segments" - cls.obj_etag = md5(cls.obj).hexdigest() + cls.obj_etag = md5(cls.obj, usedforsecurity=False).hexdigest() cls.tiny_obj = b'tiny, tiny object' assert len(cls.tiny_obj) < seg_size, "tiny_obj too large"