Merge "Update hacking for Python3"

This commit is contained in:
Zuul 2020-04-09 15:05:28 +00:00 committed by Gerrit Code Review
commit 3cceec2ee5
47 changed files with 316 additions and 311 deletions

View File

@ -150,7 +150,7 @@ pygments_style = 'sphinx'
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static'] # html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format. # using the given strftime format.
@ -159,8 +159,8 @@ if 'SOURCE_DATE_EPOCH' in os.environ:
now = float(os.environ.get('SOURCE_DATE_EPOCH')) now = float(os.environ.get('SOURCE_DATE_EPOCH'))
html_last_updated_fmt = datetime.datetime.utcfromtimestamp(now).isoformat() html_last_updated_fmt = datetime.datetime.utcfromtimestamp(now).isoformat()
else: else:
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local", git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'",
"-n1"] "--date=local", "-n1"]
try: try:
html_last_updated_fmt = subprocess.Popen( html_last_updated_fmt = subprocess.Popen(
git_cmd, stdout=subprocess.PIPE).communicate()[0] git_cmd, stdout=subprocess.PIPE).communicate()[0]

View File

@ -21,12 +21,10 @@ enum-compat==0.0.2
eventlet==0.25.0 eventlet==0.25.0
extras==1.0.0 extras==1.0.0
fixtures==3.0.0 fixtures==3.0.0
flake8==2.5.5
future==0.16.0 future==0.16.0
gitdb2==2.0.3 gitdb2==2.0.3
GitPython==2.1.8 GitPython==2.1.8
greenlet==0.3.2 greenlet==0.3.2
hacking==0.11.0
idna==2.6 idna==2.6
imagesize==1.0.0 imagesize==1.0.0
iso8601==0.1.12 iso8601==0.1.12
@ -56,12 +54,10 @@ oslo.serialization==2.25.0
oslo.utils==3.36.0 oslo.utils==3.36.0
PasteDeploy==1.3.3 PasteDeploy==1.3.3
pbr==3.1.1 pbr==3.1.1
pep8==1.5.7
prettytable==0.7.2 prettytable==0.7.2
pycparser==2.18 pycparser==2.18
pyeclib==1.3.1 pyeclib==1.3.1
pykmip==0.7.0 pykmip==0.7.0
pyflakes==0.8.1
Pygments==2.2.0 Pygments==2.2.0
pyparsing==2.2.0 pyparsing==2.2.0
pyperclip==1.6.0 pyperclip==1.6.0

View File

@ -158,8 +158,8 @@ html_theme = 'openstackdocs'
# html_logo = None # html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of # The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # the docs. This file should be a Windows icon file (.ico) being 16x16 or
# pixels large. # 32x32 pixels large.
# #
# html_favicon = None # html_favicon = None

View File

@ -382,7 +382,7 @@ class AccountReaper(Daemon):
self.stats_return_codes.get(err.http_status // 100, 0) + 1 self.stats_return_codes.get(err.http_status // 100, 0) + 1
self.logger.increment( self.logger.increment(
'return_codes.%d' % (err.http_status // 100,)) 'return_codes.%d' % (err.http_status // 100,))
except (Timeout, socket.error) as err: except (Timeout, socket.error):
self.logger.error( self.logger.error(
'Timeout Exception with %(ip)s:%(port)s/%(device)s', 'Timeout Exception with %(ip)s:%(port)s/%(device)s',
node) node)
@ -433,7 +433,7 @@ class AccountReaper(Daemon):
self.stats_return_codes.get(err.http_status // 100, 0) + 1 self.stats_return_codes.get(err.http_status // 100, 0) + 1
self.logger.increment( self.logger.increment(
'return_codes.%d' % (err.http_status // 100,)) 'return_codes.%d' % (err.http_status // 100,))
except (Timeout, socket.error) as err: except (Timeout, socket.error):
self.logger.error( self.logger.error(
'Timeout Exception with %(ip)s:%(port)s/%(device)s', 'Timeout Exception with %(ip)s:%(port)s/%(device)s',
node) node)
@ -509,7 +509,7 @@ class AccountReaper(Daemon):
self.stats_return_codes.get(err.http_status // 100, 0) + 1 self.stats_return_codes.get(err.http_status // 100, 0) + 1
self.logger.increment( self.logger.increment(
'return_codes.%d' % (err.http_status // 100,)) 'return_codes.%d' % (err.http_status // 100,))
except (Timeout, socket.error) as err: except (Timeout, socket.error):
failures += 1 failures += 1
self.logger.increment('objects_failures') self.logger.increment('objects_failures')
self.logger.error( self.logger.error(

View File

@ -1164,7 +1164,7 @@ swift-ring-builder <builder_file> rebalance [options]
@staticmethod @staticmethod
def dispersion(): def dispersion():
""" r"""
swift-ring-builder <builder_file> dispersion <search_filter> [options] swift-ring-builder <builder_file> dispersion <search_filter> [options]
Output report on dispersion. Output report on dispersion.

View File

@ -937,8 +937,7 @@ class SimpleClient(object):
elif self.attempts > retries: elif self.attempts > retries:
raise ClientException('Raise too many retries', raise ClientException('Raise too many retries',
http_status=err.getcode()) http_status=err.getcode())
except (socket.error, httplib.HTTPException, urllib2.URLError) \ except (socket.error, httplib.HTTPException, urllib2.URLError):
as err:
if self.attempts > retries: if self.attempts > retries:
raise raise
sleep(backoff) sleep(backoff)

View File

@ -614,7 +614,7 @@ class Server(object):
'%(signal)s') % '%(signal)s') %
{'server': self.server, 'pid': pid, 'signal': sig}) {'server': self.server, 'pid': pid, 'signal': sig})
safe_kill(pid, sig, 'swift-%s' % self.server) safe_kill(pid, sig, 'swift-%s' % self.server)
except InvalidPidFileException as e: except InvalidPidFileException:
if kwargs.get('verbose'): if kwargs.get('verbose'):
print(_('Removing pid file %(pid_file)s with wrong pid ' print(_('Removing pid file %(pid_file)s with wrong pid '
'%(pid)d') % {'pid_file': pid_file, 'pid': pid}) '%(pid)d') % {'pid_file': pid_file, 'pid': pid})

View File

@ -48,7 +48,7 @@ from swift.common.swob import Request, HTTPBadRequest
FORBIDDEN_CHARS = "\'\"`<>" FORBIDDEN_CHARS = "\'\"`<>"
MAX_LENGTH = 255 MAX_LENGTH = 255
FORBIDDEN_REGEXP = "/\./|/\.\./|/\.$|/\.\.$" FORBIDDEN_REGEXP = r"/\./|/\.\./|/\.$|/\.\.$"
class NameCheckMiddleware(object): class NameCheckMiddleware(object):

View File

@ -19,7 +19,7 @@ from swift.common.utils import public
from swift.common.middleware.s3api.exception import ACLError from swift.common.middleware.s3api.exception import ACLError
from swift.common.middleware.s3api.controllers.base import Controller from swift.common.middleware.s3api.controllers.base import Controller
from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \ from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented,\
MalformedACLError, UnexpectedContent, MissingSecurityHeader MalformedACLError, UnexpectedContent, MissingSecurityHeader
from swift.common.middleware.s3api.etree import Element, SubElement, tostring from swift.common.middleware.s3api.etree import Element, SubElement, tostring
from swift.common.middleware.s3api.acl_utils import swift_acl_translate, \ from swift.common.middleware.s3api.acl_utils import swift_acl_translate, \

View File

@ -18,7 +18,7 @@ from swift.common.utils import public
from swift.common.middleware.s3api.controllers.base import Controller, \ from swift.common.middleware.s3api.controllers.base import Controller, \
bucket_operation bucket_operation
from swift.common.middleware.s3api.etree import Element, tostring from swift.common.middleware.s3api.etree import Element, tostring
from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented, \ from swift.common.middleware.s3api.s3response import HTTPOk, S3NotImplemented,\
NoLoggingStatusForKey NoLoggingStatusForKey

View File

@ -82,16 +82,16 @@ Multiple Reseller Prefix Items
The reseller prefix specifies which parts of the account namespace this The reseller prefix specifies which parts of the account namespace this
middleware is responsible for managing authentication and authorization. middleware is responsible for managing authentication and authorization.
By default, the prefix is 'AUTH' so accounts and tokens are prefixed By default, the prefix is ``AUTH`` so accounts and tokens are prefixed
by 'AUTH\_'. When a request's token and/or path start with 'AUTH\_', this by ``AUTH_``. When a request's token and/or path start with ``AUTH_``, this
middleware knows it is responsible. middleware knows it is responsible.
We allow the reseller prefix to be a list. In tempauth, the first item We allow the reseller prefix to be a list. In tempauth, the first item
in the list is used as the prefix for tokens and user groups. The in the list is used as the prefix for tokens and user groups. The
other prefixes provide alternate accounts that user's can access. For other prefixes provide alternate accounts that user's can access. For
example if the reseller prefix list is 'AUTH, OTHER', a user with example if the reseller prefix list is ``AUTH, OTHER``, a user with
admin access to 'AUTH_account' also has admin access to admin access to ``AUTH_account`` also has admin access to
'OTHER_account'. ``OTHER_account``.
Required Group Required Group
^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^
@ -112,7 +112,7 @@ derived from the token are appended to the roles derived from
The ``X-Service-Token`` is useful when combined with multiple reseller The ``X-Service-Token`` is useful when combined with multiple reseller
prefix items. In the following configuration, accounts prefixed prefix items. In the following configuration, accounts prefixed
``SERVICE\_`` are only accessible if ``X-Auth-Token`` is from the end-user ``SERVICE_`` are only accessible if ``X-Auth-Token`` is from the end-user
and ``X-Service-Token`` is from the ``glance`` user:: and ``X-Service-Token`` is from the ``glance`` user::
[filter:tempauth] [filter:tempauth]

View File

@ -323,7 +323,7 @@ class HTMLViewer(object):
if not fulldirs: if not fulldirs:
stats.strip_dirs() stats.strip_dirs()
stats.sort_stats(sort) stats.sort_stats(sort)
nfl_filter_esc = nfl_filter.replace('(', '\(').replace(')', '\)') nfl_filter_esc = nfl_filter.replace(r'(', r'\(').replace(r')', r'\)')
amount = [nfl_filter_esc, limit] if nfl_filter_esc else [limit] amount = [nfl_filter_esc, limit] if nfl_filter_esc else [limit]
profile_html = self.generate_stats_html(stats, self.app_path, profile_html = self.generate_stats_html(stats, self.app_path,
profile_id, *amount) profile_id, *amount)
@ -371,7 +371,7 @@ class HTMLViewer(object):
if len(log_files) == 0: if len(log_files) == 0:
raise NotFoundException(_('no log file found')) raise NotFoundException(_('no log file found'))
try: try:
nfl_esc = nfl_filter.replace('(', '\(').replace(')', '\)') nfl_esc = nfl_filter.replace(r'(', r'\(').replace(r')', r'\)')
# remove the slash that is intentionally added in the URL # remove the slash that is intentionally added in the URL
# to avoid failure of filtering stats data. # to avoid failure of filtering stats data.
if nfl_esc.startswith('/'): if nfl_esc.startswith('/'):
@ -454,15 +454,15 @@ class HTMLViewer(object):
fmt = '<span id="L%d" rel="#L%d">%' + max_width\ fmt = '<span id="L%d" rel="#L%d">%' + max_width\
+ 'd|<code>%s</code></span>' + 'd|<code>%s</code></span>'
for line in lines: for line in lines:
l = html_escape(line) el = html_escape(line)
i = i + 1 i = i + 1
if i == lineno: if i == lineno:
fmt2 = '<span id="L%d" style="background-color: \ fmt2 = '<span id="L%d" style="background-color: \
rgb(127,255,127)">%' + max_width +\ rgb(127,255,127)">%' + max_width +\
'd|<code>%s</code></span>' 'd|<code>%s</code></span>'
data.append(fmt2 % (i, i, l)) data.append(fmt2 % (i, i, el))
else: else:
data.append(fmt % (i, i, i, l)) data.append(fmt % (i, i, i, el))
data = ''.join(data) data = ''.join(data)
except Exception: except Exception:
return _('Can not access the file %s.') % file_path return _('Can not access the file %s.') % file_path

View File

@ -79,7 +79,7 @@ rebuilding of the composite ring.
The ``id`` of each component RingBuilder is therefore stored in metadata of The ``id`` of each component RingBuilder is therefore stored in metadata of
the composite and used to check for the component ordering when the same the composite and used to check for the component ordering when the same
composite ring is re-composed. RingBuilder ``id``\s are normally assigned composite ring is re-composed. RingBuilder ``id``\\s are normally assigned
when a RingBuilder instance is first saved. Older RingBuilder instances when a RingBuilder instance is first saved. Older RingBuilder instances
loaded from file may not have an ``id`` assigned and will need to be saved loaded from file may not have an ``id`` assigned and will need to be saved
before they can be used as components of a composite ring. This can be before they can be used as components of a composite ring. This can be

View File

@ -199,7 +199,7 @@ def is_valid_hostname(hostname):
if hostname.endswith('.'): if hostname.endswith('.'):
# strip exactly one dot from the right, if present # strip exactly one dot from the right, if present
hostname = hostname[:-1] hostname = hostname[:-1]
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE) allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split(".")) return all(allowed.match(x) for x in hostname.split("."))

View File

@ -403,7 +403,7 @@ class BaseStoragePolicy(object):
(self.diskfile_module, self.name, err)) (self.diskfile_module, self.name, err))
try: try:
dfm_cls.check_policy(self) dfm_cls.check_policy(self)
except ValueError as err: except ValueError:
raise PolicyError( raise PolicyError(
'Invalid diskfile_module %s for policy %s:%s (%s)' % 'Invalid diskfile_module %s for policy %s:%s (%s)' %
(self.diskfile_module, int(self), self.name, self.policy_type)) (self.diskfile_module, int(self), self.name, self.policy_type))

View File

@ -186,7 +186,7 @@ F_SETPIPE_SZ = getattr(fcntl, 'F_SETPIPE_SZ', 1031)
O_TMPFILE = getattr(os, 'O_TMPFILE', 0o20000000 | os.O_DIRECTORY) O_TMPFILE = getattr(os, 'O_TMPFILE', 0o20000000 | os.O_DIRECTORY)
# Used by the parse_socket_string() function to validate IPv6 addresses # Used by the parse_socket_string() function to validate IPv6 addresses
IPV6_RE = re.compile("^\[(?P<address>.*)\](:(?P<port>[0-9]+))?$") IPV6_RE = re.compile(r"^\[(?P<address>.*)\](:(?P<port>[0-9]+))?$")
MD5_OF_EMPTY_STRING = 'd41d8cd98f00b204e9800998ecf8427e' MD5_OF_EMPTY_STRING = 'd41d8cd98f00b204e9800998ecf8427e'
RESERVED_BYTE = b'\x00' RESERVED_BYTE = b'\x00'
@ -3506,7 +3506,7 @@ def affinity_key_function(affinity_str):
pieces = [s.strip() for s in affinity_str.split(',')] pieces = [s.strip() for s in affinity_str.split(',')]
for piece in pieces: for piece in pieces:
# matches r<number>=<number> or r<number>z<number>=<number> # matches r<number>=<number> or r<number>z<number>=<number>
match = re.match("r(\d+)(?:z(\d+))?=(\d+)$", piece) match = re.match(r"r(\d+)(?:z(\d+))?=(\d+)$", piece)
if match: if match:
region, zone, priority = match.groups() region, zone, priority = match.groups()
region = int(region) region = int(region)
@ -3559,7 +3559,7 @@ def affinity_locality_predicate(write_affinity_str):
pieces = [s.strip() for s in affinity_str.split(',')] pieces = [s.strip() for s in affinity_str.split(',')]
for piece in pieces: for piece in pieces:
# matches r<number> or r<number>z<number> # matches r<number> or r<number>z<number>
match = re.match("r(\d+)(?:z(\d+))?$", piece) match = re.match(r"r(\d+)(?:z(\d+))?$", piece)
if match: if match:
region, zone = match.groups() region, zone = match.groups()
region = int(region) region = int(region)

View File

@ -1302,7 +1302,7 @@ def run_wsgi(conf_path, app_section, *args, **kwargs):
os.getpid(), orig_server_pid) os.getpid(), orig_server_pid)
try: try:
got_pid = os.read(read_fd, 30) got_pid = os.read(read_fd, 30)
except Exception as e: except Exception:
logger.warning('Unexpected exception while reading from ' logger.warning('Unexpected exception while reading from '
'pipe:', exc_info=True) 'pipe:', exc_info=True)
else: else:

View File

@ -664,7 +664,7 @@ class ContainerSync(Daemon):
self.container_failures += 1 self.container_failures += 1
self.logger.increment('failures') self.logger.increment('failures')
return False return False
except (Exception, Timeout) as err: except (Exception, Timeout):
self.logger.exception( self.logger.exception(
_('ERROR Syncing %(db_file)s %(row)s'), _('ERROR Syncing %(db_file)s %(row)s'),
{'db_file': str(broker), 'row': row}) {'db_file': str(broker), 'row': row})

View File

@ -3,7 +3,7 @@
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
# Hacking already pins down pep8, pyflakes and flake8 # Hacking already pins down pep8, pyflakes and flake8
hacking>=0.11.0,<0.12 # Apache-2.0 hacking>=2.0,<2.1.0 # Apache-2.0
coverage>=3.6 # Apache-2.0 coverage>=3.6 # Apache-2.0
nose>=1.3.7 # LGPL nose>=1.3.7 # LGPL
nosexcover>=1.0.10 # BSD nosexcover>=1.0.10 # BSD

View File

@ -248,7 +248,7 @@ def _in_process_setup_ring(swift_conf, conf_src_dir, testdir):
try: try:
ring_file_src = _in_process_find_conf_file(conf_src_dir, ring_file_src, ring_file_src = _in_process_find_conf_file(conf_src_dir, ring_file_src,
use_sample=False) use_sample=False)
except InProcessException as e: except InProcessException:
if policy_specified: if policy_specified:
raise InProcessException('Failed to find ring file %s' raise InProcessException('Failed to find ring file %s'
% ring_file_src) % ring_file_src)
@ -883,8 +883,8 @@ def setup_package():
# and we'll skip everything later # and we'll skip everything later
if 'service_prefix' in config: if 'service_prefix' in config:
swift_test_service_prefix = utils.append_underscore( swift_test_service_prefix = utils.append_underscore(
config['service_prefix']) config['service_prefix'])
if swift_test_auth_version == "1": if swift_test_auth_version == "1":

View File

@ -104,7 +104,7 @@ class Connection(object):
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload # 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.status != 404: if e.status != 404:
raise raise
except Exception as e: except Exception:
exceptions.append(''.join( exceptions.append(''.join(
traceback.format_exception(*sys.exc_info()))) traceback.format_exception(*sys.exc_info())))
if exceptions: if exceptions:
@ -186,7 +186,7 @@ def tear_down_s3(conn):
# 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload # 404 means NoSuchBucket, NoSuchKey, or NoSuchUpload
if e.response['ResponseMetadata']['HTTPStatusCode'] != 404: if e.response['ResponseMetadata']['HTTPStatusCode'] != 404:
raise raise
except Exception as e: except Exception:
exceptions.append(''.join( exceptions.append(''.join(
traceback.format_exception(*sys.exc_info()))) traceback.format_exception(*sys.exc_info())))
if exceptions: if exceptions:

View File

@ -16,8 +16,8 @@
import unittest import unittest
import os import os
import test.functional as tf import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \ from swift.common.middleware.s3api.etree import fromstring, tostring, \
SubElement Element, SubElement
from test.functional.s3api import S3ApiBase from test.functional.s3api import S3ApiBase
from test.functional.s3api.s3_test_client import Connection from test.functional.s3api.s3_test_client import Connection

View File

@ -27,8 +27,8 @@ from hashlib import md5
from six.moves import zip, zip_longest from six.moves import zip, zip_longest
import test.functional as tf import test.functional as tf
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \ from swift.common.middleware.s3api.etree import fromstring, tostring, \
SubElement Element, SubElement
from swift.common.middleware.s3api.utils import mktime from swift.common.middleware.s3api.utils import mktime
from test.functional.s3api import S3ApiBase from test.functional.s3api import S3ApiBase

View File

@ -260,12 +260,12 @@ class TestAccount(Base):
def testListingLimit(self): def testListingLimit(self):
limit = load_constraint('account_listing_limit') limit = load_constraint('account_listing_limit')
for l in (1, 100, limit / 2, limit - 1, limit, limit + 1, limit * 2): for lim in (1, 100, limit / 2, limit - 1, limit, limit + 1, limit * 2):
p = {'limit': l} p = {'limit': lim}
if l <= limit: if lim <= limit:
self.assertLessEqual(len(self.env.account.containers(parms=p)), self.assertLessEqual(len(self.env.account.containers(parms=p)),
l) lim)
self.assert_status(200) self.assert_status(200)
else: else:
self.assertRaises(ResponseError, self.assertRaises(ResponseError,
@ -578,10 +578,10 @@ class TestContainer(Base):
def testContainerNameLimit(self): def testContainerNameLimit(self):
limit = load_constraint('max_container_name_length') limit = load_constraint('max_container_name_length')
for l in (limit - 100, limit - 10, limit - 1, limit, for lim in (limit - 100, limit - 10, limit - 1, limit,
limit + 1, limit + 10, limit + 100): limit + 1, limit + 10, limit + 100):
cont = self.env.account.container('a' * l) cont = self.env.account.container('a' * lim)
if l <= limit: if lim <= limit:
self.assertTrue(cont.create()) self.assertTrue(cont.create())
self.assert_status((201, 202)) self.assert_status((201, 202))
else: else:
@ -1949,10 +1949,10 @@ class TestFile(Base):
def testNameLimit(self): def testNameLimit(self):
limit = load_constraint('max_object_name_length') limit = load_constraint('max_object_name_length')
for l in (1, 10, limit // 2, limit - 1, limit, limit + 1, limit * 2): for lim in (1, 10, limit // 2, limit - 1, limit, limit + 1, limit * 2):
file_item = self.env.container.file('a' * l) file_item = self.env.container.file('a' * lim)
if l <= limit: if lim <= limit:
self.assertTrue(file_item.write()) self.assertTrue(file_item.write())
self.assert_status(201) self.assert_status(201)
else: else:
@ -2946,31 +2946,31 @@ class TestServiceToken(unittest.TestCase):
self.dbg = dbg self.dbg = dbg
def do_request(self, url, token, parsed, conn, service_token=''): def do_request(self, url, token, parsed, conn, service_token=''):
if self.use_service_account: if self.use_service_account:
path = self._service_account(parsed.path) path = self._service_account(parsed.path)
else: else:
path = parsed.path path = parsed.path
if self.container: if self.container:
path += '/%s' % self.container path += '/%s' % self.container
if self.obj: if self.obj:
path += '/%s' % self.obj path += '/%s' % self.obj
headers = {} headers = {}
if self.body: if self.body:
headers.update({'Content-Length': len(self.body)}) headers.update({'Content-Length': len(self.body)})
if self.x_auth_token == self.SET_TO_USERS_TOKEN: if self.x_auth_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Auth-Token': token}) headers.update({'X-Auth-Token': token})
elif self.x_auth_token == self.SET_TO_SERVICE_TOKEN: elif self.x_auth_token == self.SET_TO_SERVICE_TOKEN:
headers.update({'X-Auth-Token': service_token}) headers.update({'X-Auth-Token': service_token})
if self.x_service_token == self.SET_TO_USERS_TOKEN: if self.x_service_token == self.SET_TO_USERS_TOKEN:
headers.update({'X-Service-Token': token}) headers.update({'X-Service-Token': token})
elif self.x_service_token == self.SET_TO_SERVICE_TOKEN: elif self.x_service_token == self.SET_TO_SERVICE_TOKEN:
headers.update({'X-Service-Token': service_token}) headers.update({'X-Service-Token': service_token})
if self.dbg: if self.dbg:
print('DEBUG: conn.request: method:%s path:%s' print('DEBUG: conn.request: method:%s path:%s'
' body:%s headers:%s' % (self.method, path, self.body, ' body:%s headers:%s' % (self.method, path, self.body,
headers)) headers))
conn.request(self.method, path, self.body, headers=headers) conn.request(self.method, path, self.body, headers=headers)
return check_response(conn) return check_response(conn)
def _service_account(self, path): def _service_account(self, path):
parts = path.split('/', 3) parts = path.split('/', 3)

View File

@ -171,9 +171,9 @@ class TestAccountReaper(ReplProbeTest):
# define reapers which are supposed to operate 3 seconds later # define reapers which are supposed to operate 3 seconds later
account_reapers = [] account_reapers = []
for conf_file in self.configs['account-server'].values(): for conf_file in self.configs['account-server'].values():
conf = utils.readconf(conf_file, 'account-reaper') conf = utils.readconf(conf_file, 'account-reaper')
conf['delay_reaping'] = '3' conf['delay_reaping'] = '3'
account_reapers.append(reaper.AccountReaper(conf)) account_reapers.append(reaper.AccountReaper(conf))
self.assertTrue(account_reapers) self.assertTrue(account_reapers)

View File

@ -98,7 +98,7 @@ class TestContainerMergePolicyIndex(ReplProbeTest):
self.object_name, self.object_name,
headers={'X-Backend-Storage-Policy-Index': headers={'X-Backend-Storage-Policy-Index':
policy_index}) policy_index})
except direct_client.ClientException as err: except direct_client.ClientException:
continue continue
orig_policy_index = policy_index orig_policy_index = policy_index
break break

View File

@ -29,7 +29,7 @@ from test.probe.common import ReplProbeTest
from swift.common.request_helpers import get_reserved_name from swift.common.request_helpers import get_reserved_name
from swift.common.utils import readconf from swift.common.utils import readconf
EXCLUDE_FILES = re.compile('^(hashes\.(pkl|invalid)|lock(-\d+)?)$') EXCLUDE_FILES = re.compile(r'^(hashes\.(pkl|invalid)|lock(-\d+)?)$')
def collect_info(path_list): def collect_info(path_list):

View File

@ -438,7 +438,7 @@ aliases = %s
self.recon_instance.quarantine_check(hosts) self.recon_instance.quarantine_check(hosts)
output = stdout.getvalue() output = stdout.getvalue()
r = re.compile("\[quarantined_(.*)\](.*)") r = re.compile(r"\[quarantined_(.*)\](.*)")
for line in output.splitlines(): for line in output.splitlines():
m = r.match(line) m = r.match(line)
if m: if m:
@ -473,7 +473,7 @@ aliases = %s
self.recon_instance.async_check(hosts) self.recon_instance.async_check(hosts)
output = stdout.getvalue() output = stdout.getvalue()
r = re.compile("\[async_pending(.*)\](.*)") r = re.compile(r"\[async_pending(.*)\](.*)")
lines = output.splitlines() lines = output.splitlines()
self.assertTrue(lines) self.assertTrue(lines)
for line in lines: for line in lines:
@ -514,7 +514,7 @@ aliases = %s
self.recon_instance.umount_check(hosts) self.recon_instance.umount_check(hosts)
output = stdout.getvalue() output = stdout.getvalue()
r = re.compile("^Not mounted:|Device errors: .*") r = re.compile(r"^Not mounted:|Device errors: .*")
lines = output.splitlines() lines = output.splitlines()
self.assertTrue(lines) self.assertTrue(lines)
for line in lines: for line in lines:
@ -548,7 +548,7 @@ aliases = %s
self.recon_instance.driveaudit_check(hosts) self.recon_instance.driveaudit_check(hosts)
output = stdout.getvalue() output = stdout.getvalue()
r = re.compile("\[drive_audit_errors(.*)\](.*)") r = re.compile(r"\[drive_audit_errors(.*)\](.*)")
lines = output.splitlines() lines = output.splitlines()
self.assertTrue(lines) self.assertTrue(lines)
for line in lines: for line in lines:

View File

@ -2192,7 +2192,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
with mock.patch("sys.stdout", mock_stdout): with mock.patch("sys.stdout", mock_stdout):
with mock.patch("sys.stderr", mock_stderr): with mock.patch("sys.stderr", mock_stderr):
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv) self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
ring_not_found_re = re.compile("Ring file .*\.ring\.gz not found") ring_not_found_re = re.compile(r"Ring file .*\.ring\.gz not found")
self.assertTrue(ring_not_found_re.findall(mock_stdout.getvalue())) self.assertTrue(ring_not_found_re.findall(mock_stdout.getvalue()))
# write ring file # write ring file
@ -2204,7 +2204,9 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
with mock.patch("sys.stdout", mock_stdout): with mock.patch("sys.stdout", mock_stdout):
with mock.patch("sys.stderr", mock_stderr): with mock.patch("sys.stderr", mock_stderr):
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv) self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
ring_up_to_date_re = re.compile("Ring file .*\.ring\.gz is up-to-date") ring_up_to_date_re = re.compile(
r"Ring file .*\.ring\.gz is up-to-date"
)
self.assertTrue(ring_up_to_date_re.findall(mock_stdout.getvalue())) self.assertTrue(ring_up_to_date_re.findall(mock_stdout.getvalue()))
# change builder (set weight) # change builder (set weight)
@ -2216,7 +2218,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
with mock.patch("sys.stdout", mock_stdout): with mock.patch("sys.stdout", mock_stdout):
with mock.patch("sys.stderr", mock_stderr): with mock.patch("sys.stderr", mock_stderr):
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv) self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
ring_obsolete_re = re.compile("Ring file .*\.ring\.gz is obsolete") ring_obsolete_re = re.compile(r"Ring file .*\.ring\.gz is obsolete")
self.assertTrue(ring_obsolete_re.findall(mock_stdout.getvalue())) self.assertTrue(ring_obsolete_re.findall(mock_stdout.getvalue()))
# write ring file # write ring file
@ -2238,7 +2240,7 @@ class TestCommands(unittest.TestCase, RunSwiftRingBuilderMixin):
with mock.patch("sys.stdout", mock_stdout): with mock.patch("sys.stdout", mock_stdout):
with mock.patch("sys.stderr", mock_stderr): with mock.patch("sys.stderr", mock_stderr):
self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv) self.assertSystemExit(EXIT_SUCCESS, ringbuilder.main, argv)
ring_invalid_re = re.compile("Ring file .*\.ring\.gz is invalid") ring_invalid_re = re.compile(r"Ring file .*\.ring\.gz is invalid")
self.assertTrue(ring_invalid_re.findall(mock_stdout.getvalue())) self.assertTrue(ring_invalid_re.findall(mock_stdout.getvalue()))
def test_default_no_device_ring_without_exception(self): def test_default_no_device_ring_without_exception(self):

View File

@ -26,8 +26,8 @@ from swift.common.swob import Request
from test.unit import make_timestamp_iter from test.unit import make_timestamp_iter
from test.unit.common.middleware.s3api import S3ApiTestCase from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import UnreadableInput from test.unit.common.middleware.s3api.helpers import UnreadableInput
from swift.common.middleware.s3api.etree import fromstring, tostring, Element, \ from swift.common.middleware.s3api.etree import fromstring, tostring, \
SubElement Element, SubElement
from test.unit.common.middleware.s3api.test_s3_acl import s3acl from test.unit.common.middleware.s3api.test_s3_acl import s3acl

View File

@ -97,7 +97,7 @@ class TestS3ApiObj(S3ApiTestCase):
unexpected_headers.append((key, val)) unexpected_headers.append((key, val))
if unexpected_headers: if unexpected_headers:
self.fail('unexpected headers: %r' % unexpected_headers) self.fail('unexpected headers: %r' % unexpected_headers)
self.assertEqual(headers['etag'], self.assertEqual(headers['etag'],
'"%s"' % self.response_headers['etag']) '"%s"' % self.response_headers['etag'])

View File

@ -39,8 +39,8 @@ def operator_roles(test_auth):
def get_account_for_tenant(test_auth, tenant_id): def get_account_for_tenant(test_auth, tenant_id):
"""Convenience function reduces unit test churn""" """Convenience function reduces unit test churn"""
return '%s%s' % (test_auth.reseller_prefixes[0], tenant_id) return '%s%s' % (test_auth.reseller_prefixes[0], tenant_id)
def get_identity_headers(status='Confirmed', tenant_id='1', def get_identity_headers(status='Confirmed', tenant_id='1',

View File

@ -30,7 +30,7 @@ from swift.common import utils
MAX_LENGTH = 255 MAX_LENGTH = 255
FORBIDDEN_CHARS = '\'\"<>`' FORBIDDEN_CHARS = '\'\"<>`'
FORBIDDEN_REGEXP = "/\./|/\.\./|/\.$|/\.\.$" FORBIDDEN_REGEXP = r"/\./|/\.\./|/\.$|/\.\.$"
class FakeApp(object): class FakeApp(object):
@ -94,7 +94,7 @@ class TestNameCheckMiddleware(unittest.TestCase):
self.assertEqual(resp.status_int, 400) self.assertEqual(resp.status_int, 400)
def test_invalid_regexp(self): def test_invalid_regexp(self):
for s in ['/.', '/..', '/./foo', '/../foo']: for s in [r'/.', r'/..', r'/./foo', r'/../foo']:
path = '/V1.0/' + s path = '/V1.0/' + s
resp = Request.blank( resp = Request.blank(
path, environ={'REQUEST_METHOD': 'PUT'}).get_response( path, environ={'REQUEST_METHOD': 'PUT'}).get_response(
@ -107,7 +107,7 @@ class TestNameCheckMiddleware(unittest.TestCase):
self.assertEqual(resp.status_int, 400) self.assertEqual(resp.status_int, 400)
def test_valid_regexp(self): def test_valid_regexp(self):
for s in ['/...', '/.\.', '/foo']: for s in [r'/...', r'/.\.', r'/foo']:
path = '/V1.0/' + s path = '/V1.0/' + s
resp = Request.blank( resp = Request.blank(
path, environ={'REQUEST_METHOD': 'PUT'}).get_response( path, environ={'REQUEST_METHOD': 'PUT'}).get_response(
@ -137,7 +137,7 @@ class TestSwiftInfo(unittest.TestCase):
def test_registered_configured_options(self): def test_registered_configured_options(self):
conf = {'maximum_length': 512, conf = {'maximum_length': 512,
'forbidden_chars': '\'\"`', 'forbidden_chars': '\'\"`',
'forbidden_regexp': "/\./|/\.\./|/\.$"} 'forbidden_regexp': r"/\./|/\.\./|/\.$"}
name_check.filter_factory(conf)(FakeApp()) name_check.filter_factory(conf)(FakeApp())
swift_info = utils.get_swift_info() swift_info = utils.get_swift_info()
self.assertTrue('name_check' in swift_info) self.assertTrue('name_check' in swift_info)
@ -145,7 +145,7 @@ class TestSwiftInfo(unittest.TestCase):
self.assertEqual(set(swift_info['name_check'].get('forbidden_chars')), self.assertEqual(set(swift_info['name_check'].get('forbidden_chars')),
set('\'\"`')) set('\'\"`'))
self.assertEqual(swift_info['name_check'].get('forbidden_regexp'), self.assertEqual(swift_info['name_check'].get('forbidden_regexp'),
"/\./|/\.\./|/\.$") r"/\./|/\.\./|/\.$")
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View File

@ -2281,7 +2281,6 @@ class ObjectVersioningTestContainerOperations(ObjectVersioningBaseTestCase):
'name': 'unexpected-symlink', 'name': 'unexpected-symlink',
'hash': 'd41d8cd98f00b204e9800998ecf8427e', 'hash': 'd41d8cd98f00b204e9800998ecf8427e',
'last_modified': '2019-07-26T15:09:54.518990', 'last_modified': '2019-07-26T15:09:54.518990',
'content_type': 'application/foo',
'symlink_bytes': 9, 'symlink_bytes': 9,
'symlink_path': '/v1/a/tgt_container/tgt_obj', 'symlink_path': '/v1/a/tgt_container/tgt_obj',
'symlink_etag': 'e55cedc11adb39c404b7365f7d6291fa', 'symlink_etag': 'e55cedc11adb39c404b7365f7d6291fa',

View File

@ -400,8 +400,8 @@ class TestProxyLogging(unittest.TestCase):
with mock.patch('time.time', with mock.patch('time.time',
mock.MagicMock( mock.MagicMock(
side_effect=[10000000.0, 10000000.5, 10000001.0])): side_effect=[10000000.0, 10000000.5, 10000001.0])):
resp = app(req.environ, start_response) resp = app(req.environ, start_response)
resp_body = b''.join(resp) resp_body = b''.join(resp)
# exhaust generator # exhaust generator
[x for x in resp] [x for x in resp]
log_parts = self._log_parts(app) log_parts = self._log_parts(app)

View File

@ -386,9 +386,9 @@ class TestRingBuilder(unittest.TestCase):
def test_shuffled_gather(self): def test_shuffled_gather(self):
if self._shuffled_gather_helper() and \ if self._shuffled_gather_helper() and \
self._shuffled_gather_helper(): self._shuffled_gather_helper():
raise AssertionError('It is highly likely the ring is no ' raise AssertionError('It is highly likely the ring is no '
'longer shuffling the set of partitions ' 'longer shuffling the set of partitions '
'to reassign on a rebalance.') 'to reassign on a rebalance.')
def _shuffled_gather_helper(self): def _shuffled_gather_helper(self):
rb = ring.RingBuilder(8, 3, 1) rb = ring.RingBuilder(8, 3, 1)

View File

@ -996,7 +996,7 @@ class TestCooperativeRingBuilder(BaseTestCompositeBuilder):
for p in range(before.parts): for p in range(before.parts):
if ({uniqueness(dev) for dev in before._devs_for_part(p)} != if ({uniqueness(dev) for dev in before._devs_for_part(p)} !=
{uniqueness(dev) for dev in after._devs_for_part(p)}): {uniqueness(dev) for dev in after._devs_for_part(p)}):
moved_parts.add(p) moved_parts.add(p)
return moved_parts return moved_parts
def num_parts_can_move(self, builder): def num_parts_can_move(self, builder):

View File

@ -512,7 +512,7 @@ class TestInternalClient(unittest.TestCase):
def do_test(resp_status): def do_test(resp_status):
client = InternalClient(resp_status) client = InternalClient(resp_status)
with self.assertRaises(internal_client.UnexpectedResponse) as ctx, \ with self.assertRaises(internal_client.UnexpectedResponse) as ctx,\
mock.patch('swift.common.internal_client.sleep'): mock.patch('swift.common.internal_client.sleep'):
# This is obvious strange tests to expect only 400 Bad Request # This is obvious strange tests to expect only 400 Bad Request
# but this test intended to avoid extra body drain if it's # but this test intended to avoid extra body drain if it's

View File

@ -5688,7 +5688,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
'some.counter') 'some.counter')
self.assertStat('some-name.some.operation:4900.0|ms', self.assertStat('some-name.some.operation:4900.0|ms',
self.logger.timing, 'some.operation', 4.9 * 1000) self.logger.timing, 'some.operation', 4.9 * 1000)
self.assertStatMatches('some-name\.another\.operation:\d+\.\d+\|ms', self.assertStatMatches(r'some-name\.another\.operation:\d+\.\d+\|ms',
self.logger.timing_since, 'another.operation', self.logger.timing_since, 'another.operation',
time.time()) time.time())
self.assertStat('some-name.another.counter:42|c', self.assertStat('some-name.another.counter:42|c',
@ -5703,7 +5703,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('pfx.some.operation:4900.0|ms|@0.972', self.assertStat('pfx.some.operation:4900.0|ms|@0.972',
self.logger.timing, 'some.operation', 4.9 * 1000, self.logger.timing, 'some.operation', 4.9 * 1000,
sample_rate=0.972) sample_rate=0.972)
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.972', self.assertStatMatches(r'pfx\.another\.op:\d+\.\d+\|ms|@0.972',
self.logger.timing_since, 'another.op', self.logger.timing_since, 'another.op',
time.time(), sample_rate=0.972) time.time(), sample_rate=0.972)
self.assertStat('pfx.another.counter:3|c|@0.972', self.assertStat('pfx.another.counter:3|c|@0.972',
@ -5719,7 +5719,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('some.operation:4900.0|ms|@0.939', self.assertStat('some.operation:4900.0|ms|@0.939',
self.logger.timing, 'some.operation', self.logger.timing, 'some.operation',
4.9 * 1000, 0.939) 4.9 * 1000, 0.939)
self.assertStatMatches('another\.op:\d+\.\d+\|ms|@0.939', self.assertStatMatches(r'another\.op:\d+\.\d+\|ms|@0.939',
self.logger.timing_since, 'another.op', self.logger.timing_since, 'another.op',
time.time(), 0.939) time.time(), 0.939)
self.assertStat('another.counter:3|c|@0.939', self.assertStat('another.counter:3|c|@0.939',
@ -5737,7 +5737,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
'some.counter') 'some.counter')
self.assertStat('pfx.some.operation:4760.0|ms|@0.93', self.assertStat('pfx.some.operation:4760.0|ms|@0.93',
self.logger.timing, 'some.operation', 4.76 * 1000) self.logger.timing, 'some.operation', 4.76 * 1000)
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.93', self.assertStatMatches(r'pfx\.another\.op:\d+\.\d+\|ms|@0.93',
self.logger.timing_since, 'another.op', self.logger.timing_since, 'another.op',
time.time()) time.time())
self.assertStat('pfx.another.counter:3|c|@0.93', self.assertStat('pfx.another.counter:3|c|@0.93',
@ -5751,7 +5751,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('pfx.some.operation:4900.0|ms|@0.9912', self.assertStat('pfx.some.operation:4900.0|ms|@0.9912',
self.logger.timing, 'some.operation', 4.9 * 1000, self.logger.timing, 'some.operation', 4.9 * 1000,
sample_rate=0.9912) sample_rate=0.9912)
self.assertStatMatches('pfx\.another\.op:\d+\.\d+\|ms|@0.9912', self.assertStatMatches(r'pfx\.another\.op:\d+\.\d+\|ms|@0.9912',
self.logger.timing_since, 'another.op', self.logger.timing_since, 'another.op',
time.time(), sample_rate=0.9912) time.time(), sample_rate=0.9912)
self.assertStat('pfx.another.counter:3|c|@0.9912', self.assertStat('pfx.another.counter:3|c|@0.9912',
@ -5767,7 +5767,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('some.operation:4900.0|ms|@0.987654', self.assertStat('some.operation:4900.0|ms|@0.987654',
self.logger.timing, 'some.operation', self.logger.timing, 'some.operation',
4.9 * 1000, 0.987654) 4.9 * 1000, 0.987654)
self.assertStatMatches('another\.op:\d+\.\d+\|ms|@0.987654', self.assertStatMatches(r'another\.op:\d+\.\d+\|ms|@0.987654',
self.logger.timing_since, 'another.op', self.logger.timing_since, 'another.op',
time.time(), 0.987654) time.time(), 0.987654)
self.assertStat('another.counter:3|c|@0.987654', self.assertStat('another.counter:3|c|@0.987654',
@ -5787,7 +5787,7 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('alpha.beta.pfx.some.operation:4760.0|ms', self.assertStat('alpha.beta.pfx.some.operation:4760.0|ms',
self.logger.timing, 'some.operation', 4.76 * 1000) self.logger.timing, 'some.operation', 4.76 * 1000)
self.assertStatMatches( self.assertStatMatches(
'alpha\.beta\.pfx\.another\.op:\d+\.\d+\|ms', r'alpha\.beta\.pfx\.another\.op:\d+\.\d+\|ms',
self.logger.timing_since, 'another.op', time.time()) self.logger.timing_since, 'another.op', time.time())
self.assertStat('alpha.beta.pfx.another.counter:3|c', self.assertStat('alpha.beta.pfx.another.counter:3|c',
self.logger.update_stats, 'another.counter', 3) self.logger.update_stats, 'another.counter', 3)
@ -5801,9 +5801,10 @@ class TestStatsdLoggingDelegation(unittest.TestCase):
self.assertStat('alpha.beta.some.operation:4900.0|ms|@0.9912', self.assertStat('alpha.beta.some.operation:4900.0|ms|@0.9912',
self.logger.timing, 'some.operation', 4.9 * 1000, self.logger.timing, 'some.operation', 4.9 * 1000,
sample_rate=0.9912) sample_rate=0.9912)
self.assertStatMatches('alpha\.beta\.another\.op:\d+\.\d+\|ms|@0.9912', self.assertStatMatches(
self.logger.timing_since, 'another.op', r'alpha\.beta\.another\.op:\d+\.\d+\|ms|@0.9912',
time.time(), sample_rate=0.9912) self.logger.timing_since, 'another.op',
time.time(), sample_rate=0.9912)
self.assertStat('alpha.beta.another.counter:3|c|@0.9912', self.assertStat('alpha.beta.another.counter:3|c|@0.9912',
self.logger.update_stats, 'another.counter', 3, self.logger.update_stats, 'another.counter', 3,
sample_rate=0.9912) sample_rate=0.9912)

View File

@ -1262,7 +1262,7 @@ class TestContainerController(unittest.TestCase):
try: try:
with Timeout(3): with Timeout(3):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
except BaseException as err: except BaseException:
got_exc = True got_exc = True
finally: finally:
err = event.wait() err = event.wait()
@ -2337,7 +2337,7 @@ class TestContainerController(unittest.TestCase):
try: try:
with Timeout(3): with Timeout(3):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
except BaseException as err: except BaseException:
got_exc = True got_exc = True
finally: finally:
err = event.wait() err = event.wait()

View File

@ -1837,11 +1837,11 @@ class TestSharder(BaseTestSharder):
merge_items_calls = [] merge_items_calls = []
with mock.patch('swift.container.backend.ContainerBroker.merge_items', with mock.patch('swift.container.backend.ContainerBroker.merge_items',
mock_merge_items), self._mock_sharder() as sharder: mock_merge_items), self._mock_sharder() as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [False, True, True]), side_effect=[(False, [False, True, True]),
(False, [False, False, True])]) (False, [False, False, True])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
@ -1934,11 +1934,11 @@ class TestSharder(BaseTestSharder):
node = {'ip': '1.2.3.4', 'port': 6040, 'device': 'sda5', 'id': '2', node = {'ip': '1.2.3.4', 'port': 6040, 'device': 'sda5', 'id': '2',
'index': 0} 'index': 0}
with self._mock_sharder({'shard_replication_quorum': 3}) as sharder: with self._mock_sharder({'shard_replication_quorum': 3}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [False, True, True]), side_effect=[(False, [False, True, True]),
(False, [False, False, True])]) (False, [False, False, True])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
# replication of first shard range fails - no more shards attempted # replication of first shard range fails - no more shards attempted
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
@ -1951,11 +1951,11 @@ class TestSharder(BaseTestSharder):
# and again with a chilled out quorom, so cleaving moves onto second # and again with a chilled out quorom, so cleaving moves onto second
# shard range which fails to reach even chilled quorum # shard range which fails to reach even chilled quorum
with self._mock_sharder({'shard_replication_quorum': 1}) as sharder: with self._mock_sharder({'shard_replication_quorum': 1}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [False, False, True]), side_effect=[(False, [False, False, True]),
(False, [False, False, False])]) (False, [False, False, False])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
broker.get_own_shard_range().state) broker.get_own_shard_range().state)
@ -1973,10 +1973,10 @@ class TestSharder(BaseTestSharder):
shard_ranges[1].update_state(ShardRange.CLEAVED) shard_ranges[1].update_state(ShardRange.CLEAVED)
broker.merge_shard_ranges(shard_ranges[1]) broker.merge_shard_ranges(shard_ranges[1])
with self._mock_sharder({'shard_replication_quorum': 1}) as sharder: with self._mock_sharder({'shard_replication_quorum': 1}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [False, False, False])]) side_effect=[(False, [False, False, False])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
broker.get_own_shard_range().state) broker.get_own_shard_range().state)
@ -1992,10 +1992,10 @@ class TestSharder(BaseTestSharder):
with self._mock_sharder( with self._mock_sharder(
{'shard_replication_quorum': 1, {'shard_replication_quorum': 1,
'existing_shard_replication_quorum': 0}) as sharder: 'existing_shard_replication_quorum': 0}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [])]) # maybe shard db was deleted side_effect=[(False, [])]) # maybe shard db was deleted
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
broker.get_own_shard_range().state) broker.get_own_shard_range().state)
@ -2012,11 +2012,11 @@ class TestSharder(BaseTestSharder):
with self._mock_sharder( with self._mock_sharder(
{'shard_replication_quorum': 1, {'shard_replication_quorum': 1,
'existing_shard_replication_quorum': 0}) as sharder: 'existing_shard_replication_quorum': 0}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [False, False, False]), side_effect=[(False, [False, False, False]),
(False, [False, True, False])]) (False, [False, True, False])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
broker.get_own_shard_range().state) broker.get_own_shard_range().state)
@ -2033,10 +2033,10 @@ class TestSharder(BaseTestSharder):
with self._mock_sharder( with self._mock_sharder(
{'shard_replication_quorum': 99, {'shard_replication_quorum': 99,
'existing_shard_replication_quorum': 99}) as sharder: 'existing_shard_replication_quorum': 99}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(False, [False, True, True])]) side_effect=[(False, [False, True, True])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDING, broker.get_db_state()) self.assertEqual(SHARDING, broker.get_db_state())
self.assertEqual(ShardRange.SHARDING, self.assertEqual(ShardRange.SHARDING,
broker.get_own_shard_range().state) broker.get_own_shard_range().state)
@ -2051,10 +2051,10 @@ class TestSharder(BaseTestSharder):
with self._mock_sharder( with self._mock_sharder(
{'shard_replication_quorum': 99, {'shard_replication_quorum': 99,
'existing_shard_replication_quorum': 99}) as sharder: 'existing_shard_replication_quorum': 99}) as sharder:
sharder._replicate_object = mock.MagicMock( sharder._replicate_object = mock.MagicMock(
side_effect=[(True, [True, True, True])]) side_effect=[(True, [True, True, True])])
sharder._audit_container = mock.MagicMock() sharder._audit_container = mock.MagicMock()
sharder._process_broker(broker, node, 99) sharder._process_broker(broker, node, 99)
self.assertEqual(SHARDED, broker.get_db_state()) self.assertEqual(SHARDED, broker.get_db_state())
self.assertEqual(ShardRange.SHARDED, self.assertEqual(ShardRange.SHARDED,
broker.get_own_shard_range().state) broker.get_own_shard_range().state)

View File

@ -899,7 +899,7 @@ class TestAuditor(unittest.TestCase):
with mock.patch('swift.obj.diskfile.get_auditor_status', with mock.patch('swift.obj.diskfile.get_auditor_status',
mock_get_auditor_status): mock_get_auditor_status):
self.auditor.run_audit(**kwargs) self.auditor.run_audit(**kwargs)
quarantine_path = os.path.join(self.devices, quarantine_path = os.path.join(self.devices,
'sda', 'quarantined', 'objects') 'sda', 'quarantined', 'objects')
self.assertTrue(os.path.isdir(quarantine_path)) self.assertTrue(os.path.isdir(quarantine_path))

View File

@ -5027,7 +5027,7 @@ class DiskFileMixin(BaseDiskFileTestMixin):
with open('/dev/null', 'w') as devnull: with open('/dev/null', 'w') as devnull:
exc_re = (r'tee\(\) failed: tried to move \d+ bytes, but only ' exc_re = (r'tee\(\) failed: tried to move \d+ bytes, but only '
'moved -?\d+') r'moved -?\d+')
try: try:
reader.zero_copy_send(devnull.fileno()) reader.zero_copy_send(devnull.fileno())
except Exception as e: except Exception as e:

View File

@ -1913,8 +1913,8 @@ class TestWorkerReconstructor(unittest.TestCase):
return_value=now), \ return_value=now), \
mock.patch('swift.obj.reconstructor.os.getpid', mock.patch('swift.obj.reconstructor.os.getpid',
return_value='pid-1'): return_value='pid-1'):
reconstructor.final_recon_dump( reconstructor.final_recon_dump(
total, override_devices=override_devices) total, override_devices=override_devices)
with open(self.rcache) as f: with open(self.rcache) as f:
data = json.load(f) data = json.load(f)
self.assertEqual({ self.assertEqual({

View File

@ -381,8 +381,8 @@ class TestReceiver(unittest.TestCase):
b':UPDATES: START', b':UPDATES: END']) b':UPDATES: START', b':UPDATES: END'])
self.assertRegexpMatches( self.assertRegexpMatches(
b''.join(body_lines2), b''.join(body_lines2),
b"^:ERROR: 0 '0\.0[0-9]+ seconds: " br"^:ERROR: 0 '0\.0[0-9]+ seconds: "
b"/.+/sda1/objects/1/.lock-replication'$") br"/.+/sda1/objects/1/.lock-replication'$")
def test_SSYNC_initial_path(self): def test_SSYNC_initial_path(self):
with mock.patch.object( with mock.patch.object(
@ -1168,13 +1168,35 @@ class TestReceiver(unittest.TestCase):
self.assertFalse(mock_wsgi_input.mock_socket.close.called) self.assertFalse(mock_wsgi_input.mock_socket.close.called)
def test_UPDATES_bad_subrequest_line(self): def test_UPDATES_bad_subrequest_line(self):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'bad_subrequest_line\r\n')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END',
UNPACK_ERR])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver')
with mock.patch.object(
self.controller, 'DELETE',
return_value=swob.HTTPNoContent()):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'bad_subrequest_line\r\n') 'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n'
'bad_subrequest_line2')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
@ -1184,151 +1206,129 @@ class TestReceiver(unittest.TestCase):
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
with mock.patch.object(
self.controller, 'DELETE',
return_value=swob.HTTPNoContent()):
self.controller.logger = mock.MagicMock()
req = swob.Request.blank(
'/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n'
'DELETE /a/c/o\r\n'
'X-Timestamp: 1364456113.76334\r\n'
'\r\n'
'bad_subrequest_line2')
resp = req.get_response(self.controller)
self.assertEqual(
self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END',
UNPACK_ERR])
self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_no_headers(self): def test_UPDATES_no_headers(self):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'DELETE /a/c/o\r\n') 'DELETE /a/c/o\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
b":ERROR: 0 'Got no headers for DELETE /a/c/o'"]) b":ERROR: 0 'Got no headers for DELETE /a/c/o'"])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_bad_headers(self): def test_UPDATES_bad_headers(self):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'DELETE /a/c/o\r\n' 'DELETE /a/c/o\r\n'
'Bad-Header Test\r\n') 'Bad-Header Test\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
UNPACK_ERR]) UNPACK_ERR])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'DELETE /a/c/o\r\n' 'DELETE /a/c/o\r\n'
'Good-Header: Test\r\n' 'Good-Header: Test\r\n'
'Bad-Header Test\r\n') 'Bad-Header Test\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
UNPACK_ERR]) UNPACK_ERR])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_bad_content_length(self): def test_UPDATES_bad_content_length(self):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'PUT /a/c/o\r\n' 'PUT /a/c/o\r\n'
'Content-Length: a\r\n\r\n') 'Content-Length: a\r\n\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
b':ERROR: 0 "invalid literal for int() with base 10: \'a\'"']) b':ERROR: 0 "invalid literal for int() with base 10: \'a\'"'])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_content_length_with_DELETE(self): def test_UPDATES_content_length_with_DELETE(self):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'DELETE /a/c/o\r\n' 'DELETE /a/c/o\r\n'
'Content-Length: 1\r\n\r\n') 'Content-Length: 1\r\n\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
b":ERROR: 0 'DELETE subrequest with content-length /a/c/o'"]) b":ERROR: 0 'DELETE subrequest with content-length /a/c/o'"])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_no_content_length_with_PUT(self): def test_UPDATES_no_content_length_with_PUT(self):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'PUT /a/c/o\r\n\r\n') 'PUT /a/c/o\r\n\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
b":ERROR: 0 'No content-length sent for PUT /a/c/o'"]) b":ERROR: 0 'No content-length sent for PUT /a/c/o'"])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_early_termination(self): def test_UPDATES_early_termination(self):
self.controller.logger = mock.MagicMock() self.controller.logger = mock.MagicMock()
req = swob.Request.blank( req = swob.Request.blank(
'/device/partition', '/device/partition',
environ={'REQUEST_METHOD': 'SSYNC'}, environ={'REQUEST_METHOD': 'SSYNC'},
body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n' body=':MISSING_CHECK: START\r\n:MISSING_CHECK: END\r\n'
':UPDATES: START\r\n' ':UPDATES: START\r\n'
'PUT /a/c/o\r\n' 'PUT /a/c/o\r\n'
'Content-Length: 1\r\n\r\n') 'Content-Length: 1\r\n\r\n')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual( self.assertEqual(
self.body_lines(resp.body), self.body_lines(resp.body),
[b':MISSING_CHECK: START', b':MISSING_CHECK: END', [b':MISSING_CHECK: START', b':MISSING_CHECK: END',
b":ERROR: 0 'Early termination for PUT /a/c/o'"]) b":ERROR: 0 'Early termination for PUT /a/c/o'"])
self.assertEqual(resp.status_int, 200) self.assertEqual(resp.status_int, 200)
self.controller.logger.exception.assert_called_once_with( self.controller.logger.exception.assert_called_once_with(
'None/device/partition EXCEPTION in ssync.Receiver') 'None/device/partition EXCEPTION in ssync.Receiver')
def test_UPDATES_failures(self): def test_UPDATES_failures(self):

View File

@ -2030,7 +2030,7 @@ class BaseTestObjectController(object):
self.app.update_request(req) self.app.update_request(req)
try: try:
res = method(req) res = method(req)
except HTTPException as res: except HTTPException as res: # noqa: F841
pass pass
self.assertEqual(res.status_int, expected) self.assertEqual(res.status_int, expected)
@ -2043,7 +2043,7 @@ class BaseTestObjectController(object):
self.app.update_request(req) self.app.update_request(req)
try: try:
res = method(req) res = method(req)
except HTTPException as res: except HTTPException as res: # noqa: F841
pass pass
self.assertEqual(res.status_int, expected) self.assertEqual(res.status_int, expected)
@ -3596,7 +3596,7 @@ class TestReplicatedObjectController(
self.app.update_request(req) self.app.update_request(req)
try: try:
res = controller.PUT(req) res = controller.PUT(req)
except HTTPException as res: except HTTPException as res: # noqa: F841
pass pass
expected = str(expected) expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected) self.assertEqual(res.status[:len(expected)], expected)
@ -3628,7 +3628,7 @@ class TestReplicatedObjectController(
self.app.update_request(req) self.app.update_request(req)
try: try:
res = controller.PUT(req) res = controller.PUT(req)
except HTTPException as res: except HTTPException as res: # noqa: F841
pass pass
expected = str(expected) expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected) self.assertEqual(res.status[:len(expected)], expected)
@ -3673,7 +3673,7 @@ class TestReplicatedObjectController(
self.app.update_request(req) self.app.update_request(req)
try: try:
res = controller.PUT(req) res = controller.PUT(req)
except HTTPException as res: except HTTPException as res: # noqa: F841
pass pass
expected = str(expected) expected = str(expected)
self.assertEqual(res.status[:len(str(expected))], self.assertEqual(res.status[:len(str(expected))],
@ -9935,7 +9935,7 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(3, len(timestamps)) self.assertEqual(3, len(timestamps))
for timestamp in timestamps: for timestamp in timestamps:
self.assertEqual(timestamp, timestamps[0]) self.assertEqual(timestamp, timestamps[0])
self.assertTrue(re.match('[0-9]{10}\.[0-9]{5}', timestamp)) self.assertTrue(re.match(r'[0-9]{10}\.[0-9]{5}', timestamp))
def test_DELETE_backed_x_timestamp_header(self): def test_DELETE_backed_x_timestamp_header(self):
timestamps = [] timestamps = []
@ -9961,7 +9961,7 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(3, len(timestamps)) self.assertEqual(3, len(timestamps))
for timestamp in timestamps: for timestamp in timestamps:
self.assertEqual(timestamp, timestamps[0]) self.assertEqual(timestamp, timestamps[0])
self.assertTrue(re.match('[0-9]{10}\.[0-9]{5}', timestamp)) self.assertTrue(re.match(r'[0-9]{10}\.[0-9]{5}', timestamp))
def test_node_read_timeout_retry_to_container(self): def test_node_read_timeout_retry_to_container(self):
with save_globals(): with save_globals():

10
tox.ini
View File

@ -127,7 +127,15 @@ commands = bandit -c bandit.yaml -r swift -n 5
# H404: multi line docstring should start without a leading new line # H404: multi line docstring should start without a leading new line
# H405: multi line docstring summary not separated with an empty line # H405: multi line docstring summary not separated with an empty line
# H501: Do not use self.__dict__ for string formatting # H501: Do not use self.__dict__ for string formatting
ignore = H101,H202,H301,H306,H404,H405,H501 # Disabled with going to hacking 2.0, needs further investigation and
# changes to enable:
# E305 expected 2 blank lines after class or function definition, found 1
# E402: module level import not at top of file
# E731 do not assign a lambda expression, use a def
# Swift team needs to decide if they want to enable either of these:
# W503: line break before binary operator
# W504: line break after binary operator
ignore = H101,H202,H301,H306,H404,H405,H501,W503,W504,E305,E402,E731
exclude = .venv,.tox,dist,*egg exclude = .venv,.tox,dist,*egg
filename = *.py,bin/* filename = *.py,bin/*
show-source = True show-source = True