Apply ruff

We disable all but the hacking checks but don't enable ruff yet. That
will be done separately so we can git-blame-ignore this file.

Change-Id: I70587777cb817893f154a603967106262495c419
Signed-off-by: Stephen Finucane <sfinucan@redhat.com>
This commit is contained in:
Stephen Finucane
2025-08-01 12:37:17 +01:00
parent 79a81e2dfc
commit aedef2e288
42 changed files with 2439 additions and 1775 deletions

View File

@@ -21,10 +21,7 @@ sys.path.insert(0, os.path.abspath('../..'))
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'openstackdocstheme'
]
extensions = ['sphinx.ext.autodoc', 'openstackdocstheme']
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/oslo.utils'
@@ -65,14 +62,17 @@ pygments_style = 'native'
html_theme = 'openstackdocs'
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
htmlhelp_basename = f'{project}doc'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'%s.tex' % project,
'%s Documentation' % project,
'OpenStack Foundation', 'manual'),
(
'index',
f'{project}.tex',
f'{project} Documentation',
'OpenStack Foundation',
'manual',
),
]

View File

@@ -22,8 +22,9 @@ def flatten_dict_to_keypairs(d, separator=':'):
"""
for name, value in sorted(d.items()):
if isinstance(value, dict):
for subname, subvalue in flatten_dict_to_keypairs(value,
separator):
yield '{}{}{}'.format(name, separator, subname), subvalue
for subname, subvalue in flatten_dict_to_keypairs(
value, separator
):
yield f'{name}{separator}{subname}', subvalue
else:
yield name, value

View File

@@ -29,15 +29,16 @@ def safe_decode(text, incoming=None, errors='strict'):
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, (str, bytes)):
raise TypeError("%s can't be decoded" % type(text))
if not isinstance(text, str | bytes):
raise TypeError(f"{type(text)} can't be decoded")
if isinstance(text, str):
return text
if not incoming:
incoming = (getattr(sys.stdin, 'encoding', None) or
sys.getdefaultencoding())
incoming = (
getattr(sys.stdin, 'encoding', None) or sys.getdefaultencoding()
)
try:
return text.decode(incoming, errors)
@@ -57,8 +58,7 @@ def safe_decode(text, incoming=None, errors='strict'):
return text.decode('utf-8', errors)
def safe_encode(text, incoming=None,
encoding='utf-8', errors='strict'):
def safe_encode(text, incoming=None, encoding='utf-8', errors='strict'):
"""Encodes incoming text/bytes string using `encoding`.
If incoming is not specified, text is expected to be encoded with
@@ -75,12 +75,13 @@ def safe_encode(text, incoming=None,
See also to_utf8() function which is simpler and don't depend on
the locale encoding.
"""
if not isinstance(text, (str, bytes)):
raise TypeError("%s can't be encoded" % type(text))
if not isinstance(text, str | bytes):
raise TypeError(f"{type(text)} can't be encoded")
if not incoming:
incoming = (getattr(sys.stdin, 'encoding', None) or
sys.getdefaultencoding())
incoming = (
getattr(sys.stdin, 'encoding', None) or sys.getdefaultencoding()
)
# Avoid case issues in comparisons
if hasattr(incoming, 'lower'):
@@ -110,12 +111,14 @@ def to_utf8(text):
elif isinstance(text, str):
return text.encode('utf-8')
else:
raise TypeError("bytes or Unicode expected, got %s"
% type(text).__name__)
raise TypeError(
f"bytes or Unicode expected, got {type(text).__name__}"
)
@debtcollector.removals.remove(message='Use str(exc) instead',
category=DeprecationWarning)
@debtcollector.removals.remove(
message='Use str(exc) instead', category=DeprecationWarning
)
def exception_to_unicode(exc):
"""Get the message of an exception as a Unicode string.

View File

@@ -27,7 +27,8 @@ from oslo_utils import importutils
from oslo_utils import timeutils
debtcollector.deprecate(
"eventletutils module is deprecated and will be removed.")
"eventletutils module is deprecated and will be removed."
)
# These may or may not exist; so carefully import them if we can...
_eventlet = importutils.try_import('eventlet')
@@ -40,8 +41,18 @@ EVENTLET_AVAILABLE = all((_eventlet, _patcher))
# Taken from eventlet.py (v0.16.1) patcher code (it's not a accessible set
# for some reason...)
_ALL_PATCH = frozenset(['__builtin__', 'MySQLdb', 'os',
'psycopg', 'select', 'socket', 'thread', 'time'])
_ALL_PATCH = frozenset(
[
'__builtin__',
'MySQLdb',
'os',
'psycopg',
'select',
'socket',
'thread',
'time',
]
)
def fetch_current_thread_functor():
@@ -68,8 +79,9 @@ def fetch_current_thread_functor():
return threading.current_thread
def warn_eventlet_not_patched(expected_patched_modules=None,
what='this library'):
def warn_eventlet_not_patched(
expected_patched_modules=None, what='this library'
):
"""Warns if eventlet is being used without patching provided modules.
:param expected_patched_modules: list of modules to check to ensure that
@@ -99,8 +111,9 @@ def warn_eventlet_not_patched(expected_patched_modules=None,
expanded_patched_modules.update(_ALL_PATCH)
else:
if m not in _ALL_PATCH:
raise ValueError("Unknown module '%s' requested to check"
" if patched" % m)
raise ValueError(
f"Unknown module '{m}' requested to check if patched"
)
else:
expanded_patched_modules.add(m)
if EVENTLET_AVAILABLE:
@@ -125,12 +138,15 @@ def warn_eventlet_not_patched(expected_patched_modules=None,
if not _patcher.is_monkey_patched(m):
not_patched.append(m)
if not_patched:
warnings.warn("It is highly recommended that when eventlet"
" is used that the %s modules are monkey"
" patched when using %s (to avoid"
" spurious or unexpected lock-ups"
" and/or hangs)" % (not_patched, what),
RuntimeWarning, stacklevel=3)
warnings.warn(
"It is highly recommended that when eventlet"
f" is used that the {not_patched} modules are monkey"
f" patched when using {what} (to avoid"
" spurious or unexpected lock-ups"
" and/or hangs)",
RuntimeWarning,
stacklevel=3,
)
def is_monkey_patched(module):
@@ -150,6 +166,7 @@ class EventletEvent:
This wraps the eventlet.event.Event class to have the same API as
the standard threading.Event object.
"""
def __init__(self, *args, **kwargs):
super().__init__()
self.clear()
@@ -173,8 +190,9 @@ class EventletEvent:
with timeutils.StopWatch(timeout) as sw:
while True:
event = self._event
with _eventlet.timeout.Timeout(sw.leftover(return_none=True),
False):
with _eventlet.timeout.Timeout(
sw.leftover(return_none=True), False
):
event.wait()
if event is not self._event:
continue

View File

@@ -48,6 +48,7 @@ class CausedByException(Exception):
.. versionadded:: 2.4
"""
def __init__(self, message, cause=None):
super().__init__(message)
self.cause = cause
@@ -67,8 +68,10 @@ class CausedByException(Exception):
def pformat(self, indent=2, indent_text=" ", show_root_class=False):
"""Pretty formats a caused exception + any connected causes."""
if indent < 0:
raise ValueError("Provided 'indent' must be greater than"
" or equal to zero instead of %s" % indent)
raise ValueError(
"Provided 'indent' must be greater than"
f" or equal to zero instead of {indent}"
)
buf = io.StringIO()
if show_root_class:
buf.write(reflection.get_class_name(self, fully_qualified=False))
@@ -82,8 +85,9 @@ class CausedByException(Exception):
buf.write(os.linesep)
if isinstance(next_up, CausedByException):
buf.write(indent_text * active_indent)
buf.write(reflection.get_class_name(next_up,
fully_qualified=False))
buf.write(
reflection.get_class_name(next_up, fully_qualified=False)
)
buf.write(": ")
buf.write(next_up._get_message())
else:
@@ -181,6 +185,7 @@ class save_and_reraise_exception:
.. versionchanged:: 1.4
Added *logger* optional parameter.
"""
def __init__(self, reraise=True, logger=None):
self.reraise = reraise
if logger is None:
@@ -190,8 +195,10 @@ class save_and_reraise_exception:
def force_reraise(self):
if self.type_ is None and self.value is None:
raise RuntimeError("There is no (currently) captured exception"
" to force the reraising of")
raise RuntimeError(
"There is no (currently) captured exception"
" to force the reraising of"
)
try:
if self.value is None:
self.value = self.type_()
@@ -218,10 +225,12 @@ class save_and_reraise_exception:
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
if self.reraise:
self.logger.error('Original exception being dropped: %s',
traceback.format_exception(self.type_,
self.value,
self.tb))
self.logger.error(
'Original exception being dropped: %s',
traceback.format_exception(
self.type_, self.value, self.tb
),
)
return False
if self.reraise:
self.force_reraise()
@@ -266,7 +275,8 @@ def forever_retry_uncaught_exceptions(*args, **kwargs):
# changed, so time to log it again...
logging.exception(
'Unexpected exception occurred %d time(s)... '
'retrying.' % same_failure_count)
'retrying.' % same_failure_count
)
if not watch.has_started():
watch.start()
else:
@@ -274,6 +284,7 @@ def forever_retry_uncaught_exceptions(*args, **kwargs):
same_failure_count = 0
last_exc_message = this_exc_message
time.sleep(retry_delay)
return wrapper
# This is needed to handle when the decorator has args or the decorator
@@ -321,8 +332,9 @@ class exception_filter:
def __init__(self, should_ignore_ex):
self._should_ignore_ex = should_ignore_ex
if all(hasattr(should_ignore_ex, a)
for a in functools.WRAPPER_ASSIGNMENTS):
if all(
hasattr(should_ignore_ex, a) for a in functools.WRAPPER_ASSIGNMENTS
):
functools.update_wrapper(self, should_ignore_ex)
def __get__(self, obj, owner):

View File

@@ -66,6 +66,7 @@ class _UUIDSentinels:
from oslo_utils.fixture import uuidsentinel as uuids
from oslo_utils.fixture import keystoneidsentinel as keystids
...
foo = uuids.foo
do_a_thing(foo)
@@ -77,6 +78,7 @@ class _UUIDSentinels:
data = create_some_data_structure(keystids.bar, var1, var2, var3)
assert extract_bar(data) == keystids.bar
"""
def __init__(self, is_dashed=True):
self._sentinels = {}
self._lock = threading.Lock()
@@ -88,7 +90,8 @@ class _UUIDSentinels:
with self._lock:
if name not in self._sentinels:
self._sentinels[name] = uuidutils.generate_uuid(
dashed=self.is_dashed)
dashed=self.is_dashed
)
return self._sentinels[name]

View File

@@ -48,25 +48,38 @@ def main():
parser = argparse.ArgumentParser(
prog='oslo.utils.imageutils',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=(textwrap.dedent('''\
description=(
textwrap.dedent('''\
oslo.utils.imageutils image checking program.
* Exit code of 0 indicates image passes safety check
* Exit code of 1 indicates image fails safety check
''')),
epilog=f"Testing using oslo.utils version {oslo_utils_version}")
parser.add_argument('-v', '--verbose',
action='store_true',
help=("Print detailed information about the image in "
"KEY=VALUE format. Defaults to no output."))
parser.add_argument('-i', '--image',
action='store', required=True, metavar="IMG",
help="Path to an image you wish to inspect.")
''')
),
epilog=f"Testing using oslo.utils version {oslo_utils_version}",
)
parser.add_argument(
'-v',
'--verbose',
action='store_true',
help=(
"Print detailed information about the image in "
"KEY=VALUE format. Defaults to no output."
),
)
parser.add_argument(
'-i',
'--image',
action='store',
required=True,
metavar="IMG",
help="Path to an image you wish to inspect.",
)
args = parser.parse_args()
image = args.image
verbose = args.verbose
if not os.path.exists(image) or not os.path.isfile(image):
print('Image path %s provided does not exist' % image, file=sys.stderr)
print(f'Image path {image} provided does not exist', file=sys.stderr)
sys.exit(1)
inspector = format_inspector.detect_file_format(image)
@@ -77,7 +90,7 @@ def main():
safe = False
failure_reasons = []
for exc in e.failures.items():
failure_reasons.append("{}: {}".format(exc[0], exc[1]))
failure_reasons.append(f"{exc[0]}: {exc[1]}")
virtual_size = inspector.virtual_size
actual_size = inspector.actual_size
@@ -93,6 +106,6 @@ def main():
sys.exit(0)
if verbose:
print('FAILURE_REASONS=\'%s\'' % ','.join(failure_reasons))
print('FAILURE_REASONS=\'{}\''.format(','.join(failure_reasons)))
sys.exit(1)

View File

@@ -83,14 +83,16 @@ class CaptureRegion:
being presented.
"""
read_start = current_position - len(chunk)
if (read_start <= self.offset <= current_position or
self.offset <= read_start <= (self.offset + self.length)):
if (
read_start <= self.offset <= current_position
or self.offset <= read_start <= (self.offset + self.length)
):
if read_start < self.offset:
lead_gap = self.offset - read_start
else:
lead_gap = 0
self.data += chunk[lead_gap:]
self.data = self.data[:self.length]
self.data = self.data[: self.length]
class EndCaptureRegion(CaptureRegion):
@@ -103,6 +105,7 @@ class EndCaptureRegion(CaptureRegion):
:param offset: Byte offset from the end of the stream to capture (which
will also be the region length)
"""
def __init__(self, offset):
super().__init__(offset, offset)
# We don't want to indicate completeness until we have the data we
@@ -111,7 +114,7 @@ class EndCaptureRegion(CaptureRegion):
def capture(self, chunk, current_position):
self.data += chunk
self.data = self.data[0 - self.length:]
self.data = self.data[0 - self.length :]
self.offset = current_position - len(self.data)
@property
@@ -150,8 +153,12 @@ class SafetyCheck:
except SafetyViolation:
raise
except Exception as e:
LOG.error('Failed to run safety check %s on %s inspector: %s',
self.name, self, e)
LOG.error(
'Failed to run safety check %s on %s inspector: %s',
self.name,
self,
e,
)
raise SafetyViolation(_('Unexpected error'))
@classmethod
@@ -161,8 +168,11 @@ class SafetyCheck:
This should only be used if there is no meaningful checks that can
be done for a given format.
"""
return cls('null', lambda: None,
_('This file format has no meaningful safety check'))
return cls(
'null',
lambda: None,
_('This file format has no meaningful safety check'),
)
@classmethod
def banned(cls):
@@ -172,26 +182,32 @@ class SafetyCheck:
generally because they are unsupported by any of our users and/or
we are unable to check for safety.
"""
def fail():
raise SafetyViolation(_('This file format is not allowed'))
return cls('banned', fail, _('This file format is not allowed'))
class ImageFormatError(Exception):
"""An unrecoverable image format error that aborts the process."""
pass
class SafetyViolation(Exception):
"""Indicates a failure of a single safety violation."""
pass
class SafetyCheckFailed(Exception):
"""Indictes that one or more of a series of safety checks failed."""
def __init__(self, failures):
super().__init__(_('Safety checks failed: %s') % ','.join(
failures.keys()))
super().__init__(
_('Safety checks failed: %s') % ','.join(failures.keys())
)
self.failures = failures
@@ -224,7 +240,8 @@ class FileInspector(abc.ABC):
# Make sure we actively declare some safety check, even if it
# is a no-op.
raise RuntimeError(
'All inspectors must define at least one safety check')
'All inspectors must define at least one safety check'
)
def _trace(self, *args, **kwargs):
if self._tracing:
@@ -250,8 +267,10 @@ class FileInspector(abc.ABC):
def _capture(self, chunk, only=None):
if self._finished:
raise RuntimeError('Inspector has been marked finished, '
'no more data processing allowed')
raise RuntimeError(
'Inspector has been marked finished, '
'no more data processing allowed'
)
for name, region in self._capture_regions.items():
if only and name not in only:
continue
@@ -261,8 +280,11 @@ class FileInspector(abc.ABC):
def eat_chunk(self, chunk):
"""Call this to present chunks of the file to the inspector."""
pre_regions = set(self._capture_regions.values())
pre_complete = {region for region in self._capture_regions.values()
if region.complete}
pre_complete = {
region
for region in self._capture_regions.values()
if region.complete
}
# Increment our position-in-file counter
self._total_count += len(chunk)
@@ -278,11 +300,15 @@ class FileInspector(abc.ABC):
# which may require the current chunk.
new_regions = set(self._capture_regions.values()) - pre_regions
if new_regions:
self._capture(chunk, only=[self.region_name(r)
for r in new_regions])
self._capture(
chunk, only=[self.region_name(r) for r in new_regions]
)
post_complete = {region for region in self._capture_regions.values()
if region.complete}
post_complete = {
region
for region in self._capture_regions.values()
if region.complete
}
# Call the handler for any regions that are newly complete
for region in post_complete - pre_complete:
self.region_complete(self.region_name(region))
@@ -312,7 +338,7 @@ class FileInspector(abc.ABC):
"""Add a new CaptureRegion by name."""
if self.has_region(name):
# This is a bug, we tried to add the same region twice
raise ImageFormatError('Inspector re-added region %s' % name)
raise ImageFormatError(f'Inspector re-added region {name}')
self._capture_regions[name] = region
def has_region(self, name):
@@ -336,8 +362,10 @@ class FileInspector(abc.ABC):
def add_safety_check(self, check):
if not isinstance(check, SafetyCheck):
raise RuntimeError(_('Unable to add safety check of type %s') % (
type(check).__name__))
raise RuntimeError(
_('Unable to add safety check of type %s')
% (type(check).__name__)
)
if check.name in self._safety_checks:
raise RuntimeError(_('Duplicate check of name %s') % check.name)
self._safety_checks[check.name] = check
@@ -376,8 +404,10 @@ class FileInspector(abc.ABC):
This is a dict of region:sizeinbytes items that the inspector
uses to examine the file.
"""
return {name: len(region.data) for name, region in
self._capture_regions.items()}
return {
name: len(region.data)
for name, region in self._capture_regions.items()
}
@classmethod
def from_file(cls, filename):
@@ -410,11 +440,16 @@ class FileInspector(abc.ABC):
"""
if not self.complete:
raise ImageFormatError(
_('Incomplete file cannot be safety checked'))
_('Incomplete file cannot be safety checked')
)
if not self.format_match:
raise ImageFormatError(
_('Unable to safety check format %s '
'because content does not match') % self)
_(
'Unable to safety check format %s '
'because content does not match'
)
% self
)
failures = {}
for check in self._safety_checks.values():
try:
@@ -424,8 +459,12 @@ class FileInspector(abc.ABC):
except SafetyViolation as exc:
exc.check = check
failures[check.name] = exc
LOG.warning('Safety check %s on %s failed because %s',
check.name, self, exc)
LOG.warning(
'Safety check %s on %s failed because %s',
check.name,
self,
exc,
)
if failures:
raise SafetyCheckFailed(failures)
@@ -465,6 +504,7 @@ class QcowInspector(FileInspector):
This recognizes the (very) old v1 format but will raise a SafetyViolation
for it, as it should definitely not be in production use at this point.
"""
NAME = 'qcow2'
BF_OFFSET = 0x08
BF_OFFSET_LEN = 8
@@ -477,16 +517,27 @@ class QcowInspector(FileInspector):
self.qemu_header_info = {}
self.new_region('header', CaptureRegion(0, 512))
self.add_safety_check(
SafetyCheck('backing_file', self.check_backing_file))
SafetyCheck('backing_file', self.check_backing_file)
)
self.add_safety_check(SafetyCheck('data_file', self.check_data_file))
self.add_safety_check(
SafetyCheck('data_file', self.check_data_file))
self.add_safety_check(
SafetyCheck('unknown_features', self.check_unknown_features))
SafetyCheck('unknown_features', self.check_unknown_features)
)
def region_complete(self, region):
self.qemu_header_info = dict(zip(
('magic', 'version', 'bf_offset', 'bf_sz', 'cluster_bits', 'size'),
struct.unpack('>4sIQIIQ', self.region('header').data[:32])))
self.qemu_header_info = dict(
zip(
(
'magic',
'version',
'bf_offset',
'bf_sz',
'cluster_bits',
'size',
),
struct.unpack('>4sIQIIQ', self.region('header').data[:32]),
)
)
if not self.format_match:
self.qemu_header_info = {}
@@ -498,13 +549,14 @@ class QcowInspector(FileInspector):
def format_match(self):
if not self.region('header').complete:
return False
return self.qemu_header_info.get('magic') == b'QFI\xFB'
return self.qemu_header_info.get('magic') == b'QFI\xfb'
def check_backing_file(self):
bf_offset_bytes = self.region('header').data[
self.BF_OFFSET:self.BF_OFFSET + self.BF_OFFSET_LEN]
self.BF_OFFSET : self.BF_OFFSET + self.BF_OFFSET_LEN
]
# nonzero means "has a backing file"
bf_offset, = struct.unpack('>Q', bf_offset_bytes)
(bf_offset,) = struct.unpack('>Q', bf_offset_bytes)
if bf_offset != 0:
raise SafetyViolation('Image has a backing file')
@@ -518,7 +570,8 @@ class QcowInspector(FileInspector):
raise SafetyViolation('Unsupported qcow2 version')
i_features = self.region('header').data[
self.I_FEATURES:self.I_FEATURES + self.I_FEATURES_LEN]
self.I_FEATURES : self.I_FEATURES + self.I_FEATURES_LEN
]
# This is the maximum byte number we should expect any bits to be set
max_byte = self.I_FEATURES_MAX_BIT // 8
@@ -529,7 +582,7 @@ class QcowInspector(FileInspector):
if byte_num == max_byte:
# If we're in the max-allowed byte, allow any bits less than
# the maximum-known feature flag bit to be set
allow_mask = ((1 << (self.I_FEATURES_MAX_BIT % 8)) - 1)
allow_mask = (1 << (self.I_FEATURES_MAX_BIT % 8)) - 1
elif byte_num > max_byte:
# If we're above the byte with the maximum known feature flag
# bit, then we expect all zeroes
@@ -540,14 +593,18 @@ class QcowInspector(FileInspector):
allow_mask = 0xFF
if i_features[i] & ~allow_mask:
LOG.warning('Found unknown feature bit in byte %i: %s/%s',
byte_num, bin(i_features[byte_num] & ~allow_mask),
bin(allow_mask))
LOG.warning(
'Found unknown feature bit in byte %i: %s/%s',
byte_num,
bin(i_features[byte_num] & ~allow_mask),
bin(allow_mask),
)
raise SafetyViolation('Unknown QCOW2 features found')
def check_data_file(self):
i_features = self.region('header').data[
self.I_FEATURES:self.I_FEATURES + self.I_FEATURES_LEN]
self.I_FEATURES : self.I_FEATURES + self.I_FEATURES_LEN
]
# First byte of bitfield, which is i_features[7]
byte = self.I_FEATURES_LEN - 1 - self.I_FEATURES_DATAFILE_BIT // 8
@@ -588,6 +645,7 @@ class VHDInspector(FileInspector):
This should only require about 512 bytes of the beginning of the file
to determine the virtual size.
"""
NAME = 'vhd'
def _initialize(self):
@@ -675,6 +733,7 @@ class VHDXInspector(FileInspector):
actual VDS uint64.
"""
NAME = 'vhdx'
METAREGION = '8B7CA206-4790-4B9A-B8FE-575F050F886E'
VIRTUAL_DISK_SIZE = '2FA54224-CD1B-4876-B211-5DBED83BF4B8'
@@ -713,8 +772,9 @@ class VHDXInspector(FileInspector):
def _guid(buf):
"""Format a MSFT GUID from the 16-byte input buffer."""
guid_format = '<IHHBBBBBBBB'
return '%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X' % (
struct.unpack(guid_format, buf))
return '{:08X}-{:04X}-{:04X}-{:02X}{:02X}-{:02X}{:02X}{:02X}{:02X}{:02X}{:02X}'.format(
*struct.unpack(guid_format, buf)
)
def _find_meta_region(self):
# The region table entries start after a 16-byte table header
@@ -722,10 +782,14 @@ class VHDXInspector(FileInspector):
# Parse the region table header to find the number of regions
regi, cksum, count, reserved = struct.unpack(
'<IIII', self.region('header').data[:16])
'<IIII', self.region('header').data[:16]
)
if regi != 0x69676572:
raise ImageFormatError('Region signature not found at %x' % (
self.region('header').offset))
raise ImageFormatError(
'Region signature not found at {:x}'.format(
self.region('header').offset
)
)
if count >= 2048:
raise ImageFormatError('Region count is %i (limit 2047)' % count)
@@ -746,9 +810,11 @@ class VHDXInspector(FileInspector):
if guid == self.METAREGION:
# This entry is the metadata region entry
meta_offset, meta_len, meta_req = struct.unpack(
'<QII', entry[16:])
self._trace('Meta entry %i specifies offset: %x',
i, meta_offset)
'<QII', entry[16:]
)
self._trace(
'Meta entry %i specifies offset: %x', i, meta_offset
)
# NOTE(danms): The meta_len in the region descriptor is the
# entire size of the metadata table and data. This can be
# very large, so we should only capture the size required
@@ -770,7 +836,8 @@ class VHDXInspector(FileInspector):
sig, reserved, count = struct.unpack('<8sHH', meta_buffer[:12])
if sig != b'metadata':
raise ImageFormatError(
'Invalid signature for metadata region: %r' % sig)
f'Invalid signature for metadata region: {sig!r}'
)
entries_size = 32 + (count * 32)
if len(meta_buffer) < entries_size:
@@ -782,26 +849,28 @@ class VHDXInspector(FileInspector):
if count >= 2048:
raise ImageFormatError(
'Metadata item count is %i (limit 2047)' % count)
'Metadata item count is %i (limit 2047)' % count
)
for i in range(0, count):
entry_offset = 32 + (i * 32)
guid = self._guid(meta_buffer[entry_offset:entry_offset + 16])
guid = self._guid(meta_buffer[entry_offset : entry_offset + 16])
if guid == desired_guid:
# Found the item we are looking for by id.
# Stop our region from capturing
item_offset, item_length, _reserved = struct.unpack(
'<III',
meta_buffer[entry_offset + 16:entry_offset + 28])
item_length = min(item_length,
self.VHDX_METADATA_TABLE_MAX_SIZE)
'<III', meta_buffer[entry_offset + 16 : entry_offset + 28]
)
item_length = min(
item_length, self.VHDX_METADATA_TABLE_MAX_SIZE
)
self.region('metadata').length = len(meta_buffer)
self._trace('Found entry at offset %x', item_offset)
# Metadata item offset is from the beginning of the metadata
# region, not the file.
return CaptureRegion(
self.region('metadata').offset + item_offset,
item_length)
self.region('metadata').offset + item_offset, item_length
)
self._trace('Did not find guid %s', desired_guid)
return None
@@ -813,7 +882,7 @@ class VHDXInspector(FileInspector):
if not self.has_region('vds') or not self.region('vds').complete:
return 0
size, = struct.unpack('<Q', self.region('vds').data)
(size,) = struct.unpack('<Q', self.region('vds').data)
return size
@@ -858,7 +927,7 @@ class VMDKInspector(FileInspector):
# at 0x200 and 1MB - 1
DESC_OFFSET = 0x200
DESC_MAX_SIZE = (1 << 20) - 1
GD_AT_END = 0xffffffffffffffff
GD_AT_END = 0xFFFFFFFFFFFFFFFF
# This is the minimum amount of data we need to read to recognize and
# process a "Hosted Sparse Extent" header
MIN_SPARSE_HEADER = 64
@@ -870,23 +939,34 @@ class VMDKInspector(FileInspector):
# This is the header for "Hosted Sparse Extent" type files. It may
# or may not be used, depending on what kind of VMDK we are about to
# read.
self.new_region('header',
CaptureRegion(0, 512,
min_length=self.MIN_SPARSE_HEADER))
self.new_region(
'header', CaptureRegion(0, 512, min_length=self.MIN_SPARSE_HEADER)
)
# The descriptor starts from the beginning in the some of the older
# formats, but we do not know which one we are reading yet. This
# will be deleted and re-created if we are reading one of the formats
# that embeds it later.
self.new_region('descriptor',
CaptureRegion(0, self.DESC_MAX_SIZE, min_length=4))
self.add_safety_check(
SafetyCheck('descriptor', self.check_descriptor))
self.new_region(
'descriptor', CaptureRegion(0, self.DESC_MAX_SIZE, min_length=4)
)
self.add_safety_check(SafetyCheck('descriptor', self.check_descriptor))
def _parse_sparse_header(self, region, offset=0):
(sig, ver, _flags, _sectors, _grain, desc_sec, desc_num,
_numGTEsperGT, _rgdOffset, gdOffset) = struct.unpack(
(
sig,
ver,
_flags,
_sectors,
_grain,
desc_sec,
desc_num,
_numGTEsperGT,
_rgdOffset,
gdOffset,
) = struct.unpack(
'<4sIIQQQQIQQ',
self.region(region).data[offset:offset + self.MIN_SPARSE_HEADER])
self.region(region).data[offset : offset + self.MIN_SPARSE_HEADER],
)
return sig, ver, desc_sec, desc_num, gdOffset
def post_process(self):
@@ -898,8 +978,9 @@ class VMDKInspector(FileInspector):
if not self.has_region('header') or not self.region('header').complete:
return
sig, ver, desc_sec, desc_num, gdOffset = (
self._parse_sparse_header('header'))
sig, ver, desc_sec, desc_num, gdOffset = self._parse_sparse_header(
'header'
)
try:
is_text = True
@@ -919,7 +1000,7 @@ class VMDKInspector(FileInspector):
# support it.
self.delete_region('header')
return
raise ImageFormatError('Signature KDMV not found: %r' % sig)
raise ImageFormatError(f'Signature KDMV not found: {sig!r}')
if ver not in (1, 2, 3):
raise ImageFormatError('Unsupported format version %i' % ver)
@@ -944,8 +1025,9 @@ class VMDKInspector(FileInspector):
# embedded one.
if self.region('descriptor').offset == 0:
self.delete_region('descriptor')
self.new_region('descriptor',
CaptureRegion(desc_offset, desc_size))
self.new_region(
'descriptor', CaptureRegion(desc_offset, desc_size)
)
def region_complete(self, region_name):
if region_name == 'descriptor':
@@ -1005,7 +1087,8 @@ class VMDKInspector(FileInspector):
# If we have the descriptor, we definitely have the header
_sig, _ver, _flags, sectors, _grain, _desc_sec, _desc_num = (
struct.unpack('<IIIQQQQ', self.region('header').data[:44]))
struct.unpack('<IIIQQQQ', self.region('header').data[:44])
)
return sectors * 512
@@ -1046,8 +1129,9 @@ class VMDKInspector(FileInspector):
# Check all the extent lines for concerning content
for extent_line in extents:
if '/' in extent_line:
LOG.error('Extent line %r contains unsafe characters',
extent_line)
LOG.error(
'Extent line %r contains unsafe characters', extent_line
)
raise SafetyViolation(_('Invalid extent filenames found'))
if not extents:
@@ -1056,31 +1140,35 @@ class VMDKInspector(FileInspector):
def check_footer(self):
h_sig, h_ver, h_desc_sec, h_desc_num, h_goff = (
self._parse_sparse_header('header'))
self._parse_sparse_header('header')
)
f_sig, f_ver, f_desc_sec, f_desc_num, f_goff = (
self._parse_sparse_header('footer', 512))
self._parse_sparse_header('footer', 512)
)
if h_sig != f_sig:
raise SafetyViolation(
_('Header and footer signature do not match'))
_('Header and footer signature do not match')
)
if h_ver != f_ver:
raise SafetyViolation(_('Header and footer versions do not match'))
if h_desc_sec != f_desc_sec or h_desc_num != f_desc_num:
raise SafetyViolation(
_('Footer specifies a different descriptor than header'))
_('Footer specifies a different descriptor than header')
)
if f_goff == self.GD_AT_END:
raise SafetyViolation(_('Footer indicates another footer'))
pad = b'\x00' * 496
val, size, typ, zero = struct.unpack(
'<QII496s',
self.region('footer').data[:512])
'<QII496s', self.region('footer').data[:512]
)
if size != 0 or typ != self.MARKER_FOOTER or zero != pad:
raise SafetyViolation(_('Footer marker is invalid'))
val, size, typ, zero = struct.unpack(
'<QII496s',
self.region('footer').data[-512:])
'<QII496s', self.region('footer').data[-512:]
)
if val != 0 or size != 0 or typ != self.MARKER_EOS or zero != pad:
raise SafetyViolation(_('End-of-stream marker is invalid'))
@@ -1099,6 +1187,7 @@ class VDIInspector(FileInspector):
This only needs to store the first 512 bytes of the image.
"""
NAME = 'vdi'
def _initialize(self):
@@ -1110,8 +1199,10 @@ class VDIInspector(FileInspector):
if not self.region('header').complete:
return False
signature, = struct.unpack('<I', self.region('header').data[0x40:0x44])
return signature == 0xbeda107f
(signature,) = struct.unpack(
'<I', self.region('header').data[0x40:0x44]
)
return signature == 0xBEDA107F
@property
def virtual_size(self):
@@ -1120,7 +1211,7 @@ class VDIInspector(FileInspector):
if not self.format_match:
return 0
size, = struct.unpack('<Q', self.region('header').data[0x170:0x178])
(size,) = struct.unpack('<Q', self.region('header').data[0x170:0x178])
return size
@@ -1152,6 +1243,7 @@ class ISOInspector(FileInspector):
located at the beginning of the image, which contains the volume size.
"""
NAME = 'iso'
def _initialize(self):
@@ -1193,7 +1285,9 @@ class ISOInspector(FileInspector):
# can use the first 2 bytes which are the little endian part
# this is normally 2048 or 2KB but we need to check as it can be
# different according to the ISO 9660 standard.
logical_block_size, = struct.unpack('<H', logical_block_size_data[:2])
(logical_block_size,) = struct.unpack(
'<H', logical_block_size_data[:2]
)
# The volume space size is the total number of logical blocks
# and is stored at offset 80 and is 8 bytes long
# as with the logical block size the field is encoded in both
@@ -1201,7 +1295,7 @@ class ISOInspector(FileInspector):
volume_space_size_data = self.region('header').data[80:88]
# given the encoding we only need to read half the field so we
# can use the first 4 bytes which are the little endian part
volume_space_size, = struct.unpack('<L', volume_space_size_data[:4])
(volume_space_size,) = struct.unpack('<L', volume_space_size_data[:4])
# the virtual size is the volume space size * logical block size
return volume_space_size * logical_block_size
@@ -1239,7 +1333,7 @@ class GPTInspector(FileInspector):
# Media descriptor will basically always be "a fixed disk" for any of
# our purposes, not a floppy disk
media_desc = boot_sector[0x15]
return (num_fats == 2 and media_desc == self.MEDIA_TYPE_FDISK)
return num_fats == 2 and media_desc == self.MEDIA_TYPE_FDISK
@property
def format_match(self):
@@ -1248,7 +1342,7 @@ class GPTInspector(FileInspector):
# Check to see if this looks like a VBR from a FAT filesystem so we
# can exclude it
is_fat = self._check_for_fat()
mbr_sig, = struct.unpack('<H', self.region('mbr').data[510:512])
(mbr_sig,) = struct.unpack('<H', self.region('mbr').data[510:512])
return mbr_sig == self.MBR_SIGNATURE and not is_fat
def check_mbr_partitions(self):
@@ -1256,10 +1350,19 @@ class GPTInspector(FileInspector):
found_gpt = False
for i in range(4):
pte_start = self.MBR_PTE_START + (16 * i)
pte = self.region('mbr').data[pte_start:pte_start + 16]
(boot, starth, starts, startt, ostype,
endh, ehds, endt, startlba, sizelba) = struct.unpack(
'<B3BB3BII', pte)
pte = self.region('mbr').data[pte_start : pte_start + 16]
(
boot,
starth,
starts,
startt,
ostype,
endh,
ehds,
endt,
startlba,
sizelba,
) = struct.unpack('<B3BB3BII', pte)
if boot not in (0x00, 0x80):
raise SafetyViolation('MBR PTE %i has invalid boot flag' % i)
if ostype != 0:
@@ -1291,21 +1394,29 @@ class LUKSInspector(FileInspector):
@property
def format_match(self):
return self.region('header').data[:6] == b'LUKS\xBA\xBE'
return self.region('header').data[:6] == b'LUKS\xba\xbe'
@property
def header_items(self):
fields = struct.unpack('>6sh32s32s32sI',
self.region('header').data[:108])
names = ['magic', 'version', 'cipher_alg', 'cipher_mode', 'hash',
'payload_offset']
fields = struct.unpack(
'>6sh32s32s32sI', self.region('header').data[:108]
)
names = [
'magic',
'version',
'cipher_alg',
'cipher_mode',
'hash',
'payload_offset',
]
return dict(zip(names, fields))
def check_version(self):
header = self.header_items
if header['version'] != 1:
raise SafetyViolation(
'LUKS version %i is not supported' % header['version'])
'LUKS version %i is not supported' % header['version']
)
@property
def virtual_size(self):
@@ -1335,20 +1446,25 @@ class InspectWrapper:
hole if used improperly, but may be used to limit
the detected formats to some smaller scope.
"""
def __init__(self, source, expected_format=None, allowed_formats=None):
self._source = source
self._expected_format = expected_format
self._errored_inspectors = set()
self._inspectors = {v() for k, v in ALL_FORMATS.items()
if not allowed_formats or k in allowed_formats}
self._inspectors = {
v()
for k, v in ALL_FORMATS.items()
if not allowed_formats or k in allowed_formats
}
self._finished = False
def __iter__(self):
return self
def _process_chunk(self, chunk):
for inspector in [i for i in self._inspectors
if i not in self._errored_inspectors]:
for inspector in [
i for i in self._inspectors if i not in self._errored_inspectors
]:
try:
inspector.eat_chunk(chunk)
except Exception as e:
@@ -1362,20 +1478,26 @@ class InspectWrapper:
# If we are expecting to parse a specific format, we do
# not need to log scary messages about the other formats
# failing to parse the data as expected.
LOG.debug('Format inspector for %s does not match, '
'excluding from consideration (%s)',
inspector.NAME, e)
LOG.debug(
'Format inspector for %s does not match, '
'excluding from consideration (%s)',
inspector.NAME,
e,
)
self._errored_inspectors.add(inspector)
else:
# If we are expecting a format, have read enough data to
# satisfy that format's inspector, and no match is detected,
# abort the stream immediately to save having to read the
# entire thing before we signal the mismatch.
if (inspector.NAME == self._expected_format and
inspector.complete and not inspector.format_match):
if (
inspector.NAME == self._expected_format
and inspector.complete
and not inspector.format_match
):
raise ImageFormatError(
'Content does not match expected format %r' % (
inspector.NAME))
f'Content does not match expected format {inspector.NAME!r}'
)
def __next__(self):
try:
@@ -1432,7 +1554,8 @@ class InspectWrapper:
return [x for x in self._inspectors if str(x) == 'raw']
except IndexError:
raise ImageFormatError(
'Content does not match any allowed format')
'Content does not match any allowed format'
)
return matches
@property
@@ -1454,8 +1577,11 @@ class InspectWrapper:
# Multiple format matches mean that not only can we not return a
# decision here, but also means that there may be something
# nefarious going on (i.e. hiding one header in another).
raise ImageFormatError('Multiple formats detected: %s' % ','.join(
str(i) for i in matches))
raise ImageFormatError(
'Multiple formats detected: {}'.format(
','.join(str(i) for i in matches)
)
)
else:
try:
# The expected outcome of this is a single match of something
@@ -1463,7 +1589,8 @@ class InspectWrapper:
return matches[0]
except IndexError:
raise ImageFormatError(
'Content does not match any allowed format')
'Content does not match any allowed format'
)
ALL_FORMATS = {

View File

@@ -46,12 +46,20 @@ class QemuImgInfo:
However 'human' format support will be dropped in next cycle and only
'json' format will be supported. Prefer to use 'json' instead of 'human'.
"""
BACKING_FILE_RE = re.compile((r"^(.*?)\s*\(actual\s+path\s*:"
r"\s+(.*?)\)\s*$"), re.I)
BACKING_FILE_RE = re.compile(
(
r"^(.*?)\s*\(actual\s+path\s*:"
r"\s+(.*?)\)\s*$"
),
re.I,
)
TOP_LEVEL_RE = re.compile(r"^([\w\d\s\_\-]+):(.*)$")
SIZE_RE = re.compile(r"([0-9]+[eE][-+][0-9]+|\d*\.?\d+)"
r"\s*(\w+)?(\s*\(\s*(\d+)\s+bytes\s*\))?",
re.I)
SIZE_RE = re.compile(
r"([0-9]+[eE][-+][0-9]+|\d*\.?\d+)"
r"\s*(\w+)?(\s*\(\s*(\d+)\s+bytes\s*\))?",
re.I,
)
def __init__(self, cmd_output=None, format='human'):
if format == 'json':
@@ -72,7 +80,8 @@ class QemuImgInfo:
'The human format is deprecated and the format parameter '
'will be removed. Use explicitly json instead',
version="xena",
category=FutureWarning)
category=FutureWarning,
)
details = self._parse(cmd_output or '')
self.image = details.get('image')
self.backing_file = details.get('backing_file')
@@ -87,20 +96,20 @@ class QemuImgInfo:
def __str__(self):
lines = [
'image: %s' % self.image,
'file_format: %s' % self.file_format,
'virtual_size: %s' % self.virtual_size,
'disk_size: %s' % self.disk_size,
'cluster_size: %s' % self.cluster_size,
'backing_file: %s' % self.backing_file,
'backing_file_format: %s' % self.backing_file_format,
f'image: {self.image}',
f'file_format: {self.file_format}',
f'virtual_size: {self.virtual_size}',
f'disk_size: {self.disk_size}',
f'cluster_size: {self.cluster_size}',
f'backing_file: {self.backing_file}',
f'backing_file_format: {self.backing_file_format}',
]
if self.snapshots:
lines.append("snapshots: %s" % self.snapshots)
lines.append(f"snapshots: {self.snapshots}")
if self.encrypted:
lines.append("encrypted: %s" % self.encrypted)
lines.append(f"encrypted: {self.encrypted}")
if self.format_specific:
lines.append("format_specific: %s" % self.format_specific)
lines.append(f"format_specific: {self.format_specific}")
return "\n".join(lines)
def _canonicalize(self, field):
@@ -131,8 +140,8 @@ class QemuImgInfo:
if len(unit_of_measure) == 1 and unit_of_measure != 'B':
unit_of_measure += 'B'
return strutils.string_to_bytes(
'{}{}'.format(magnitude, unit_of_measure),
return_int=True)
f'{magnitude}{unit_of_measure}', return_int=True
)
def _extract_details(self, root_cmd, root_details, lines_after):
real_details = root_details
@@ -169,13 +178,15 @@ class QemuImgInfo:
if len(date_pieces) != 3:
break
lines_after.pop(0)
real_details.append({
'id': line_pieces[0],
'tag': line_pieces[1],
'vm_size': line_pieces[2],
'date': line_pieces[3],
'vm_clock': line_pieces[4] + " " + line_pieces[5],
})
real_details.append(
{
'id': line_pieces[0],
'tag': line_pieces[1],
'vm_size': line_pieces[2],
'date': line_pieces[3],
'vm_clock': line_pieces[4] + " " + line_pieces[5],
}
)
return real_details
def _parse(self, cmd_output):

View File

@@ -31,9 +31,9 @@ def import_class(import_str):
try:
return getattr(sys.modules[mod_str], class_str)
except AttributeError:
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info())))
raise ImportError(
f'Class {class_str} cannot be found ({traceback.format_exception(*sys.exc_info())})'
)
def import_object(import_str, *args, **kwargs):
@@ -57,7 +57,7 @@ def import_object_ns(name_space, import_str, *args, **kwargs):
Don't capture :exc:`ImportError` when instanciating the object, only
when importing the object class.
"""
import_value = "{}.{}".format(name_space, import_str)
import_value = f"{name_space}.{import_str}"
try:
cls = import_class(import_value)
except ImportError:
@@ -89,9 +89,9 @@ def import_versioned_module(module, version, submodule=None):
"""
# NOTE(gcb) Disallow parameter version include character '.'
if '.' in '%s' % version:
if '.' in f'{version}':
raise ValueError("Parameter version shouldn't include character '.'.")
module_str = '{}.v{}'.format(module, version)
module_str = f'{module}.v{version}'
if submodule:
module_str = '.'.join((module_str, submodule))
return import_module(module_str)
@@ -119,5 +119,6 @@ def import_any(module, *modules):
if imported_module:
return imported_module
raise ImportError('Unable to import any modules from the list %s' %
str(modules))
raise ImportError(
f'Unable to import any modules from the list {str(modules)}'
)

View File

@@ -169,8 +169,7 @@ def is_valid_cidr(address):
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
if len(ip_segment) <= 1 or ip_segment[1] == '':
return False
return True
@@ -217,12 +216,18 @@ def get_ipv6_addr_by_EUI64(prefix, mac):
prefix = netaddr.IPNetwork(prefix)
return netaddr.IPAddress(prefix.first + eui64 ^ (1 << 57))
except (ValueError, netaddr.AddrFormatError):
raise ValueError(_('Bad prefix or mac format for generating IPv6 '
'address by EUI-64: %(prefix)s, %(mac)s:')
% {'prefix': prefix, 'mac': mac})
raise ValueError(
_(
'Bad prefix or mac format for generating IPv6 '
'address by EUI-64: %(prefix)s, %(mac)s:'
)
% {'prefix': prefix, 'mac': mac}
)
except TypeError:
raise TypeError(_('Bad prefix type for generating IPv6 address by '
'EUI-64: %s') % prefix)
raise TypeError(
_('Bad prefix type for generating IPv6 address by EUI-64: %s')
% prefix
)
def get_mac_addr_by_ipv6(ipv6, dialect=netaddr.mac_unix_expanded):
@@ -242,16 +247,23 @@ def get_mac_addr_by_ipv6(ipv6, dialect=netaddr.mac_unix_expanded):
.. versionadded:: 4.3.0
"""
return netaddr.EUI(int(
# out of the lowest 8 bytes (byte positions 8-1)
# delete the middle 2 bytes (5-4, 0xff_fe)
# by shifting the highest 3 bytes to the right by 2 bytes (8-6 -> 6-4)
(((ipv6 & 0xff_ff_ff_00_00_00_00_00) >> 16) +
# adding the lowest 3 bytes as they are (3-1)
(ipv6 & 0xff_ff_ff)) ^
# then invert the universal/local bit
0x02_00_00_00_00_00),
dialect=dialect)
return netaddr.EUI(
int(
# out of the lowest 8 bytes (byte positions 8-1)
# delete the middle 2 bytes (5-4, 0xff_fe)
# by shifting the highest 3 bytes to the right by 2 bytes (8-6 -> 6-4)
(
((ipv6 & 0xFF_FF_FF_00_00_00_00_00) >> 16)
+
# adding the lowest 3 bytes as they are (3-1)
(ipv6 & 0xFF_FF_FF)
)
^
# then invert the universal/local bit
0x02_00_00_00_00_00
),
dialect=dialect,
)
def is_ipv6_enabled():
@@ -288,7 +300,7 @@ def escape_ipv6(address):
.. versionadded:: 3.29.0
"""
if is_valid_ipv6(address):
return "[%s]" % address
return f"[{address}]"
return address
@@ -332,7 +344,7 @@ def _is_int_in_range(value, start, end):
val = int(value)
except (ValueError, TypeError):
return False
return (start <= val <= end)
return start <= val <= end
def is_valid_port(port):
@@ -394,8 +406,7 @@ def get_my_ipv4():
def _get_my_ipv4_address():
"""Figure out the best ipv4
"""
"""Figure out the best ipv4"""
LOCALHOST = '127.0.0.1'
interface = None
@@ -407,12 +418,16 @@ def _get_my_ipv4_address():
interface = route_attrs[0]
break
else:
LOG.info('Could not determine default network interface, '
'using %s for IPv4 address', LOCALHOST)
LOG.info(
'Could not determine default network interface, '
'using %s for IPv4 address',
LOCALHOST,
)
return LOCALHOST
except FileNotFoundError:
LOG.info('IPv4 route table not found, '
'using %s for IPv4 address', LOCALHOST)
LOG.info(
'IPv4 route table not found, using %s for IPv4 address', LOCALHOST
)
return LOCALHOST
try:
@@ -420,13 +435,17 @@ def _get_my_ipv4_address():
v4addrs = [addr for addr in addrs if addr.family == socket.AF_INET]
return v4addrs[0].address
except (KeyError, IndexError):
LOG.info('Could not determine IPv4 address for interface %s, '
'using 127.0.0.1',
interface)
LOG.info(
'Could not determine IPv4 address for interface %s, '
'using 127.0.0.1',
interface,
)
except Exception as e:
LOG.info('Could not determine IPv4 address for '
'interface %(interface)s: %(error)s',
{'interface': interface, 'error': e})
LOG.info(
'Could not determine IPv4 address for '
'interface %(interface)s: %(error)s',
{'interface': interface, 'error': e},
)
return LOCALHOST
@@ -450,8 +469,7 @@ def get_my_ipv6():
def _get_my_ipv6_address():
"""Figure out the best IPv6 address
"""
"""Figure out the best IPv6 address"""
LOCALHOST = '::1'
interface = None
ZERO_ADDRESS = '00000000000000000000000000000000'
@@ -461,17 +479,23 @@ def _get_my_ipv6_address():
with open('/proc/net/ipv6_route') as routes:
for route in routes:
route_attrs = route.strip().split(' ')
if ((route_attrs[0], route_attrs[1]) == ZERO_PAIR and
(route_attrs[2], route_attrs[3]) == ZERO_PAIR):
if (route_attrs[0], route_attrs[1]) == ZERO_PAIR and (
route_attrs[2],
route_attrs[3],
) == ZERO_PAIR:
interface = route_attrs[-1]
break
else:
LOG.info('Could not determine default network interface, '
'using %s for IPv6 address', LOCALHOST)
LOG.info(
'Could not determine default network interface, '
'using %s for IPv6 address',
LOCALHOST,
)
return LOCALHOST
except FileNotFoundError:
LOG.info('IPv6 route table not found, '
'using %s for IPv6 address', LOCALHOST)
LOG.info(
'IPv6 route table not found, using %s for IPv6 address', LOCALHOST
)
return LOCALHOST
try:
@@ -479,13 +503,17 @@ def _get_my_ipv6_address():
v6addrs = [addr for addr in addrs if addr.family == socket.AF_INET6]
return v6addrs[0].address
except (KeyError, IndexError):
LOG.info('Could not determine IPv6 address for interface '
'%(interface)s, using %(address)s',
{'interface': interface, 'address': LOCALHOST})
LOG.info(
'Could not determine IPv6 address for interface '
'%(interface)s, using %(address)s',
{'interface': interface, 'address': LOCALHOST},
)
except Exception as e:
LOG.info('Could not determine IPv6 address for '
'interface %(interface)s: %(error)s',
{'interface': interface, 'error': e})
LOG.info(
'Could not determine IPv6 address for '
'interface %(interface)s: %(error)s',
{'interface': interface, 'error': e},
)
return LOCALHOST
@@ -511,7 +539,7 @@ class _ModifiedSplitResult(parse.SplitResult):
return dict(parse.parse_qsl(self.query))
else:
params = {}
for (key, value) in parse.parse_qsl(self.query):
for key, value in parse.parse_qsl(self.query):
if key in params:
if isinstance(params[key], list):
params[key].append(value)
@@ -533,19 +561,22 @@ def urlsplit(url, scheme='', allow_fragments=True):
The parameters are the same as urlparse.urlsplit.
"""
scheme, netloc, path, query, fragment = parse.urlsplit(
url, scheme, allow_fragments)
url, scheme, allow_fragments
)
if allow_fragments and '#' in path:
path, fragment = path.split('#', 1)
if '?' in path:
path, query = path.split('?', 1)
return _ModifiedSplitResult(scheme, netloc,
path, query, fragment)
return _ModifiedSplitResult(scheme, netloc, path, query, fragment)
def set_tcp_keepalive(sock, tcp_keepalive=True,
tcp_keepidle=None,
tcp_keepalive_interval=None,
tcp_keepalive_count=None):
def set_tcp_keepalive(
sock,
tcp_keepalive=True,
tcp_keepidle=None,
tcp_keepalive_interval=None,
tcp_keepalive_count=None,
):
"""Set values for tcp keepalive parameters
This function configures tcp keepalive parameters if users wish to do
@@ -575,22 +606,24 @@ def set_tcp_keepalive(sock, tcp_keepalive=True,
# Idle + Count * Interval effectively gives you the total timeout.
if tcp_keepidle is not None:
if hasattr(socket, 'TCP_KEEPIDLE'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPIDLE,
tcp_keepidle)
sock.setsockopt(
socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, tcp_keepidle
)
else:
LOG.warning('tcp_keepidle not available on your system')
if tcp_keepalive_interval is not None:
if hasattr(socket, 'TCP_KEEPINTVL'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPINTVL,
tcp_keepalive_interval)
sock.setsockopt(
socket.IPPROTO_TCP,
socket.TCP_KEEPINTVL,
tcp_keepalive_interval,
)
else:
LOG.warning('tcp_keepintvl not available on your system')
if tcp_keepalive_count is not None:
if hasattr(socket, 'TCP_KEEPCNT'):
sock.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPCNT,
tcp_keepalive_count)
sock.setsockopt(
socket.IPPROTO_TCP, socket.TCP_KEEPCNT, tcp_keepalive_count
)
else:
LOG.warning('tcp_keepcnt not available on your system')

View File

@@ -47,7 +47,7 @@ def get_members(obj, exclude_hidden=True):
.. versionadded:: 2.3
"""
for (name, value) in inspect.getmembers(obj):
for name, value in inspect.getmembers(obj):
if name.startswith("_") and exclude_hidden:
continue
yield (name, value)
@@ -55,8 +55,10 @@ def get_members(obj, exclude_hidden=True):
def get_member_names(obj, exclude_hidden=True):
"""Get all the member names for a object."""
return [name for (name, _obj) in
get_members(obj, exclude_hidden=exclude_hidden)]
return [
name
for (name, _obj) in get_members(obj, exclude_hidden=exclude_hidden)
]
def get_class_name(obj, fully_qualified=True, truncate_builtins=True):
@@ -86,13 +88,14 @@ def get_class_name(obj, fully_qualified=True, truncate_builtins=True):
if built_in:
return obj.__name__
if fully_qualified and hasattr(obj, '__module__'):
return '{}.{}'.format(obj.__module__, obj.__name__)
return f'{obj.__module__}.{obj.__name__}'
else:
return obj.__name__
def get_all_class_names(obj, up_to=object,
fully_qualified=True, truncate_builtins=True):
def get_all_class_names(
obj, up_to=object, fully_qualified=True, truncate_builtins=True
):
"""Get class names of object parent classes.
Iterate over all class names object is instance or subclass of,
@@ -103,9 +106,11 @@ def get_all_class_names(obj, up_to=object,
obj = type(obj)
for cls in obj.mro():
if issubclass(cls, up_to):
yield get_class_name(cls,
fully_qualified=fully_qualified,
truncate_builtins=truncate_builtins)
yield get_class_name(
cls,
fully_qualified=fully_qualified,
truncate_builtins=truncate_builtins,
)
def get_callable_name(function):
@@ -133,8 +138,11 @@ def get_callable_name(function):
if hasattr(function, 'im_class'):
# This is a unbound method, which exists only in python 2.x
im_class = function.im_class
parts = (im_class.__module__,
im_class.__name__, function.__name__)
parts = (
im_class.__module__,
im_class.__name__,
function.__name__,
)
else:
parts = (function.__module__, function.__name__)
else:
@@ -195,8 +203,9 @@ def get_callable_args(function, required_only=False):
sig = get_signature(function)
function_args = list(sig.parameters.keys())
for param_name, p in sig.parameters.items():
if (p.kind in (Parameter.VAR_POSITIONAL, Parameter.VAR_KEYWORD) or
(required_only and p.default is not Parameter.empty)):
if p.kind in (Parameter.VAR_POSITIONAL, Parameter.VAR_KEYWORD) or (
required_only and p.default is not Parameter.empty
):
function_args.remove(param_name)
return function_args

View File

@@ -27,8 +27,9 @@ import string as _string
import debtcollector.removals
@debtcollector.removals.remove(message='Use hashlib.md5 instead',
category=DeprecationWarning)
@debtcollector.removals.remove(
message='Use hashlib.md5 instead', category=DeprecationWarning
)
def md5(string=b'', usedforsecurity=True):
"""Return an md5 hashlib object using usedforsecurity parameter
@@ -61,12 +62,12 @@ def crypt_mksalt(method):
# to engourage more secure methods.
methods = {'SHA-512': '$6$', 'SHA-256': '$5$'}
if method not in methods:
raise ValueError('Unsupported method: %s' % method)
raise ValueError(f'Unsupported method: {method}')
salt_set = _string.ascii_letters + _string.digits + './'
return ''.join(
[methods[method]] +
[secrets.choice(salt_set) for c in range(16)])
[methods[method]] + [secrets.choice(salt_set) for c in range(16)]
)
def crypt_password(key, salt):

View File

@@ -22,40 +22,48 @@ import pyparsing
def _all_in(x, *y):
x = ast.literal_eval(x)
if not isinstance(x, list):
raise TypeError("<all-in> must compare with a list literal"
" string, EG \"%s\"" % (['aes', 'mmx'],))
raise TypeError(
"<all-in> must compare with a list literal"
" string, EG \"{}\"".format(['aes', 'mmx'])
)
return all(val in x for val in y)
def _range_in(x, *y):
x = ast.literal_eval(x)
if len(y) != 4:
raise TypeError("<range-in> operator has to be followed by 2 "
"space separated numeric value surrounded by "
"brackets \"range_in [ 10 20 ] \"")
raise TypeError(
"<range-in> operator has to be followed by 2 "
"space separated numeric value surrounded by "
"brackets \"range_in [ 10 20 ] \""
)
num_x = float(x)
num_y = float(y[1])
num_z = float(y[2])
if num_y > num_z:
raise TypeError("<range-in> operator's first argument has to be "
"smaller or equal to the second argument EG"
"\"range_in ( 10 20 ] \"")
raise TypeError(
"<range-in> operator's first argument has to be "
"smaller or equal to the second argument EG"
"\"range_in ( 10 20 ] \""
)
if y[0] == '[':
lower = num_x >= num_y
elif y[0] == '(':
lower = num_x > num_y
else:
raise TypeError("The first element should be an opening bracket "
"(\"(\" or \"[\")")
raise TypeError(
"The first element should be an opening bracket (\"(\" or \"[\")"
)
if y[3] == ']':
upper = num_x <= num_z
elif y[3] == ')':
upper = num_x < num_z
else:
raise TypeError("The last element should be a closing bracket "
"(\")\" or \"]\")")
raise TypeError(
"The last element should be a closing bracket (\")\" or \"]\")"
)
return lower and upper
@@ -90,49 +98,49 @@ op_methods = {
def make_grammar():
"""Creates the grammar to be used by a spec matcher.
The grammar created supports the following operations.
The grammar created supports the following operations.
Numerical values:
* ``= :`` equal to or greater than. This is equivalent to ``>=`` and is
supported for `legacy reasons
<http://docs.openstack.org/developer/nova/filter_scheduler.html#ComputeCapabilitiesFilter>`_
* ``!= :`` Float/integer value not equal
* ``<= :`` Float/integer value less than or equal
* ``< :`` Float/integer value less than
* ``== :`` Float/integer value equal
* ``>= :`` Float/integer value greater than or equal
* ``> :`` Float/integer value greater
Numerical values:
* ``= :`` equal to or greater than. This is equivalent to ``>=`` and is
supported for `legacy reasons
<http://docs.openstack.org/developer/nova/filter_scheduler.html#ComputeCapabilitiesFilter>`_
* ``!= :`` Float/integer value not equal
* ``<= :`` Float/integer value less than or equal
* ``< :`` Float/integer value less than
* ``== :`` Float/integer value equal
* ``>= :`` Float/integer value greater than or equal
* ``> :`` Float/integer value greater
String operations:
* ``s!= :`` Not equal
* ``s< :`` Less than
* ``s<= :`` Less than or equal
* ``s== :`` Equal
* ``s> :`` Greater than
* ``s>= :`` Greater than or equal
String operations:
* ``s!= :`` Not equal
* ``s< :`` Less than
* ``s<= :`` Less than or equal
* ``s== :`` Equal
* ``s> :`` Greater than
* ``s>= :`` Greater than or equal
Other operations:
* ``<all-in> :`` All items 'in' value
* ``<in> :`` Item 'in' value, like a substring in a string.
* ``<or> :`` Logical 'or'
* ``<range-in>:`` Range tester with customizable boundary conditions, tests
whether value is in the range, boundary condition could be
inclusve \'[\' or exclusive \'(\'.
Other operations:
* ``<all-in> :`` All items 'in' value
* ``<in> :`` Item 'in' value, like a substring in a string.
* ``<or> :`` Logical 'or'
* ``<range-in>:`` Range tester with customizable boundary conditions, tests
whether value is in the range, boundary condition could be
inclusve \'[\' or exclusive \'(\'.
If no operator is specified the default is ``s==`` (string equality comparison)
If no operator is specified the default is ``s==`` (string equality comparison)
Example operations:
* ``">= 60"`` Is the numerical value greater than or equal to 60
* ``"<or> spam <or> eggs"`` Does the value contain ``spam`` or ``eggs``
* ``"s== 2.1.0"`` Is the string value equal to ``2.1.0``
* ``"<in> gcc"`` Is the string ``gcc`` contained in the value string
* ``"<all-in> aes mmx"`` Are both ``aes`` and ``mmx`` in the value
* ``"<range-in> [ 10 20 ]"`` float(value) >= 10 and float(value) <= 20
* ``"<range-in> ( 10 20 ]"`` float(value) > 10 and float(value) <= 20
* ``"<range-in> ( 10 20 )"`` float(value) > 10 and float(value) < 20
Example operations:
* ``">= 60"`` Is the numerical value greater than or equal to 60
* ``"<or> spam <or> eggs"`` Does the value contain ``spam`` or ``eggs``
* ``"s== 2.1.0"`` Is the string value equal to ``2.1.0``
* ``"<in> gcc"`` Is the string ``gcc`` contained in the value string
* ``"<all-in> aes mmx"`` Are both ``aes`` and ``mmx`` in the value
* ``"<range-in> [ 10 20 ]"`` float(value) >= 10 and float(value) <= 20
* ``"<range-in> ( 10 20 ]"`` float(value) > 10 and float(value) <= 20
* ``"<range-in> ( 10 20 )"`` float(value) > 10 and float(value) < 20
:returns: A pyparsing.MatchFirst object. See
https://pythonhosted.org/pyparsing/ for details on pyparsing.
:returns: A pyparsing.MatchFirst object. See
https://pythonhosted.org/pyparsing/ for details on pyparsing.
"""
# This is apparently how pyparsing recommends to be used,
# as http://pyparsing.wikispaces.com/share/view/644825 states that
@@ -140,23 +148,34 @@ Example operations:
unary_ops = (
# Order matters here (so that '=' doesn't match before '==')
pyparsing.Literal("==") | pyparsing.Literal("=") |
pyparsing.Literal("!=") | pyparsing.Literal("<in>") |
pyparsing.Literal(">=") | pyparsing.Literal("<=") |
pyparsing.Literal(">") | pyparsing.Literal("<") |
pyparsing.Literal("s==") | pyparsing.Literal("s!=") |
pyparsing.Literal("==")
| pyparsing.Literal("=")
| pyparsing.Literal("!=")
| pyparsing.Literal("<in>")
| pyparsing.Literal(">=")
| pyparsing.Literal("<=")
| pyparsing.Literal(">")
| pyparsing.Literal("<")
| pyparsing.Literal("s==")
| pyparsing.Literal("s!=")
|
# Order matters here (so that '<' doesn't match before '<=')
pyparsing.Literal("s<=") | pyparsing.Literal("s<") |
pyparsing.Literal("s<=")
| pyparsing.Literal("s<")
|
# Order matters here (so that '>' doesn't match before '>=')
pyparsing.Literal("s>=") | pyparsing.Literal("s>"))
pyparsing.Literal("s>=")
| pyparsing.Literal("s>")
)
all_in_nary_op = pyparsing.Literal("<all-in>")
or_ = pyparsing.Literal("<or>")
range_in_binary_op = pyparsing.Literal("<range-in>")
# An atom is anything not an keyword followed by anything but whitespace
atom = ~(unary_ops | all_in_nary_op | or_ | range_in_binary_op) + \
pyparsing.Regex(r"\S+")
atom = ~(
unary_ops | all_in_nary_op | or_ | range_in_binary_op
) + pyparsing.Regex(r"\S+")
unary = unary_ops + atom
range_op = range_in_binary_op + atom + atom + atom + atom

View File

@@ -53,8 +53,10 @@ UNIT_PREFIX_EXPONENT = {
UNIT_SYSTEM_INFO = {
'IEC': (1024, re.compile(r'(^[-+]?\d*\.?\d+)([KMGTPEZYRQ]i?)?(b|bit|B)$')),
'SI': (1000, re.compile(r'(^[-+]?\d*\.?\d+)([kMGTPEZYRQ])?(b|bit|B)$')),
'mixed': (None, re.compile(
r'(^[-+]?\d*\.?\d+)([kKMGTPEZYRQ]i?)?(b|bit|B)$')),
'mixed': (
None,
re.compile(r'(^[-+]?\d*\.?\d+)([kKMGTPEZYRQ]i?)?(b|bit|B)$'),
),
}
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
@@ -66,17 +68,43 @@ SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
# NOTE(flaper87): The following globals are used by `mask_password` and
# `mask_dict_password`. They must all be lowercase.
_SANITIZE_KEYS = ['adminpass', 'admin_pass', 'password', 'admin_password',
'auth_token', 'new_pass', 'auth_password', 'secret_uuid',
'secret', 'sys_pswd', 'token', 'configdrive',
'chappassword', 'encrypted_key', 'private_key',
'fernetkey', 'sslkey', 'passphrase',
'cephclusterfsid', 'octaviaheartbeatkey', 'rabbitcookie',
'cephmanilaclientkey', 'pacemakerremoteauthkey',
'designaterndckey', 'cephadminkey', 'heatauthencryptionkey',
'cephclientkey', 'keystonecredential',
'barbicansimplecryptokek', 'cephrgwkey', 'swifthashsuffix',
'migrationsshkey', 'cephmdskey', 'cephmonkey', 'chapsecret']
_SANITIZE_KEYS = [
'adminpass',
'admin_pass',
'password',
'admin_password',
'auth_token',
'new_pass',
'auth_password',
'secret_uuid',
'secret',
'sys_pswd',
'token',
'configdrive',
'chappassword',
'encrypted_key',
'private_key',
'fernetkey',
'sslkey',
'passphrase',
'cephclusterfsid',
'octaviaheartbeatkey',
'rabbitcookie',
'cephmanilaclientkey',
'pacemakerremoteauthkey',
'designaterndckey',
'cephadminkey',
'heatauthencryptionkey',
'cephclientkey',
'keystonecredential',
'barbicansimplecryptokek',
'cephrgwkey',
'swifthashsuffix',
'migrationsshkey',
'cephmdskey',
'cephmonkey',
'chapsecret',
]
# NOTE(ldbragst): Let's build a list of regex objects using the list of
# _SANITIZE_KEYS we already have. This way, we only have to add the new key
@@ -89,20 +117,24 @@ _SANITIZE_PATTERNS_WILDCARD = {}
# NOTE(amrith): Some regular expressions have only one parameter, some
# have two parameters. Use different lists of patterns here.
_FORMAT_PATTERNS_1 = [r'(%(key)s[0-9]*\s*[=]\s*)[^\s^\'^\"]+']
_FORMAT_PATTERNS_2 = [r'(%(key)s[0-9]*\s*[=]\s*[\"\'])[^\"\']*([\"\'])',
r'(%(key)s[0-9]*\s*[=]\s*[\"])[^\"]*([\"])',
r'(%(key)s[0-9]*\s*[=]\s*[\'])[^\']*([\'])',
r'(%(key)s[0-9]*\s+[\"\'])[^\"\']*([\"\'])',
r'([-]{2}%(key)s[0-9]*\s+)[^\'^\"^=^\s]+([\s]*)',
r'(<%(key)s[0-9]*>)[^<]*(</%(key)s[0-9]*>)',
r'([\"\']%(key)s[0-9]*[\"\']\s*:\s*[\"\'])[^\"\']*'
r'([\"\'])',
r'([\'"][^"\']*%(key)s[0-9]*[\'"]\s*:\s*u?[\'"])[^\"\']*'
r'([\'"])',
r'([\'"][^\'"]*%(key)s[0-9]*[\'"]\s*,\s*\'--?[A-z]+'
r'\'\s*,\s*u?[\'"])[^\"\']*([\'"])',
r'(%(key)s[0-9]*\s*--?[A-z]+\s*)\S+(\s*)']
_FORMAT_PATTERNS_WILDCARD = [r'([\'\"][^\"\']*%(key)s[0-9]*[\'\"]\s*:\s*u?[\'\"].*[\'\"])[^\"\']*([\'\"])'] # noqa: E501
_FORMAT_PATTERNS_2 = [
r'(%(key)s[0-9]*\s*[=]\s*[\"\'])[^\"\']*([\"\'])',
r'(%(key)s[0-9]*\s*[=]\s*[\"])[^\"]*([\"])',
r'(%(key)s[0-9]*\s*[=]\s*[\'])[^\']*([\'])',
r'(%(key)s[0-9]*\s+[\"\'])[^\"\']*([\"\'])',
r'([-]{2}%(key)s[0-9]*\s+)[^\'^\"^=^\s]+([\s]*)',
r'(<%(key)s[0-9]*>)[^<]*(</%(key)s[0-9]*>)',
r'([\"\']%(key)s[0-9]*[\"\']\s*:\s*[\"\'])[^\"\']*'
r'([\"\'])',
r'([\'"][^"\']*%(key)s[0-9]*[\'"]\s*:\s*u?[\'"])[^\"\']*'
r'([\'"])',
r'([\'"][^\'"]*%(key)s[0-9]*[\'"]\s*,\s*\'--?[A-z]+'
r'\'\s*,\s*u?[\'"])[^\"\']*([\'"])',
r'(%(key)s[0-9]*\s*--?[A-z]+\s*)\S+(\s*)',
]
_FORMAT_PATTERNS_WILDCARD = [
r'([\'\"][^\"\']*%(key)s[0-9]*[\'\"]\s*:\s*u?[\'\"].*[\'\"])[^\"\']*([\'\"])'
] # noqa: E501
# NOTE(dhellmann): Keep a separate list of patterns by key so we only
# need to apply the substitutions for keys we find using a quick "in"
@@ -168,10 +200,12 @@ def bool_from_string(subject, strict=False, default=False):
return False
elif strict:
acceptable = ', '.join(
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
msg = _("Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s") % {'val': subject,
'acceptable': acceptable}
f"'{s}'" for s in sorted(TRUE_STRINGS + FALSE_STRINGS)
)
msg = _(
"Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s"
) % {'val': subject, 'acceptable': acceptable}
raise ValueError(msg)
else:
return default
@@ -195,16 +229,58 @@ def string_to_bytes(text, unit_system='IEC', return_int=False):
The units supported for IEC / mixed::
Kb(it), Kib(it), Mb(it), Mib(it), Gb(it), Gib(it), Tb(it), Tib(it),
Pb(it), Pib(it), Eb(it), Eib(it), Zb(it), Zib(it), Yb(it), Yib(it),
(
Kb(it),
Kib(it),
Mb(it),
Mib(it),
Gb(it),
Gib(it),
Tb(it),
Tib(it),
)
(
Pb(it),
Pib(it),
Eb(it),
Eib(it),
Zb(it),
Zib(it),
Yb(it),
Yib(it),
)
Rb(it), Rib(it), Qb(it), Qib(it)
KB, KiB, MB, MiB, GB, GiB, TB, TiB, PB, PiB, EB, EiB, ZB, ZiB,
(
KB,
KiB,
MB,
MiB,
GB,
GiB,
TB,
TiB,
PB,
PiB,
EB,
EiB,
ZB,
ZiB,
)
YB, YiB, RB, RiB, QB, QiB
The units supported for SI ::
kb(it), Mb(it), Gb(it), Tb(it), Pb(it), Eb(it), Zb(it), Yb(it),
(
kb(it),
Mb(it),
Gb(it),
Tb(it),
Pb(it),
Eb(it),
Zb(it),
Yb(it),
)
Rb(it), Qb(it)
kB, MB, GB, TB, PB, EB, ZB, YB, RB, QB
@@ -284,8 +360,11 @@ def to_slug(value, incoming=None, errors="strict"):
# NOTE(aababilov): no need to use safe_(encode|decode) here:
# encodings are always "ascii", error handling is always "ignore"
# and types are always known (first: unicode; second: str)
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = (
unicodedata.normalize("NFKD", value)
.encode("ascii", "ignore")
.decode("ascii")
)
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
return SLUGIFY_HYPHENATE_RE.sub("-", value)
@@ -430,8 +509,7 @@ def mask_dict_password(dictionary, secret="***"): # nosec
"""
if not isinstance(dictionary, collections.abc.Mapping):
raise TypeError("Expected a Mapping, got %s instead."
% type(dictionary))
raise TypeError(f"Expected a Mapping, got {type(dictionary)} instead.")
out = {}
for k, v in dictionary.items():
if isinstance(v, collections.abc.Mapping):
@@ -492,15 +570,15 @@ def check_string_length(value, name=None, min_length=0, max_length=None):
length = len(value)
if length < min_length:
msg = _("%(name)s has %(length)s characters, less than "
"%(min_length)s.") % {'name': name, 'length': length,
'min_length': min_length}
msg = _(
"%(name)s has %(length)s characters, less than %(min_length)s."
) % {'name': name, 'length': length, 'min_length': min_length}
raise ValueError(msg)
if max_length and length > max_length:
msg = _("%(name)s has %(length)s characters, more than "
"%(max_length)s.") % {'name': name, 'length': length,
'max_length': max_length}
msg = _(
"%(name)s has %(length)s characters, more than %(max_length)s."
) % {'name': name, 'length': length, 'max_length': max_length}
raise ValueError(msg)
@@ -519,18 +597,21 @@ def validate_integer(value, name, min_value=None, max_value=None):
try:
value = int(str(value))
except (ValueError, UnicodeEncodeError):
msg = _('%(value_name)s must be an integer'
) % {'value_name': name}
msg = _('%(value_name)s must be an integer') % {'value_name': name}
raise ValueError(msg)
if min_value is not None and value < min_value:
msg = _('%(value_name)s must be >= %(min_value)d'
) % {'value_name': name, 'min_value': min_value}
msg = _('%(value_name)s must be >= %(min_value)d') % {
'value_name': name,
'min_value': min_value,
}
raise ValueError(msg)
if max_value is not None and value > max_value:
msg = _('%(value_name)s must be <= %(max_value)d'
) % {'value_name': name, 'max_value': max_value}
msg = _('%(value_name)s must be <= %(max_value)d') % {
'value_name': name,
'max_value': max_value,
}
raise ValueError(msg)
return value
@@ -561,24 +642,34 @@ def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError(_('minsegs > maxsegs: %(min)d > %(max)d)') %
{'min': minsegs, 'max': maxsegs})
raise ValueError(
_('minsegs > maxsegs: %(min)d > %(max)d)')
% {'min': minsegs, 'max': maxsegs}
)
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
if (
segs[0]
or count < minsegs
or count > maxsegs
or '' in segs[1:minsegs]
):
raise ValueError(_('Invalid path: %s') % urllib.parse.quote(path))
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
if (
segs[0]
or count < minsegs
or count > maxsegs + 1
or '' in segs[1:minsegs]
or (count == maxsegs + 1 and segs[maxsegs])
):
raise ValueError(_('Invalid path: %s') % urllib.parse.quote(path))
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
@@ -595,13 +686,12 @@ def split_by_commas(value):
# pyparsing is a slow import; defer loading until we need it
import pyparsing as pp
word = (
pp.QuotedString(quoteChar='"', escChar='\\') |
pp.Word(pp.printables, excludeChars='",')
word = pp.QuotedString(quoteChar='"', escChar='\\') | pp.Word(
pp.printables, excludeChars='",'
)
grammar = pp.stringStart + pp.delimitedList(word) + pp.stringEnd
try:
return list(grammar.parseString(value))
except pp.ParseException:
raise ValueError("Invalid value: %s" % value)
raise ValueError(f"Invalid value: {value}")

View File

@@ -24,7 +24,6 @@ _TRUE_VALUES = ('true', '1', 'yes')
class TestCase(testtools.TestCase):
"""Test case base class for all unit tests."""
def setUp(self):

View File

@@ -13,16 +13,16 @@
# under the License.
class FakeDriver():
class FakeDriver:
def __init__(self, first_arg=True):
self.first_arg = first_arg
class FakeDriver2():
class FakeDriver2:
def __init__(self, first_arg):
self.first_arg = first_arg
class FakeDriver3():
class FakeDriver3:
def __init__(self):
raise ImportError("ImportError occurs in __init__")

File diff suppressed because it is too large Load Diff

View File

@@ -26,7 +26,6 @@ load_tests = testscenarios.load_tests_apply_scenarios
class ImageUtilsHumanRawTestCase(test_base.BaseTestCase):
_image_name = [
('disk_config', dict(image_name='disk.config')),
]
@@ -36,47 +35,59 @@ class ImageUtilsHumanRawTestCase(test_base.BaseTestCase):
]
_virtual_size = [
('64M', dict(virtual_size='64M',
exp_virtual_size=67108864)),
('64M_with_byte_hint', dict(virtual_size='64M (67108844 bytes)',
exp_virtual_size=67108844)),
('64M_byte', dict(virtual_size='67108844',
exp_virtual_size=67108844)),
('64_MiB_with_byte_hint', dict(virtual_size='64 MiB (67108844 bytes)',
exp_virtual_size=67108844)),
('4.4M', dict(virtual_size='4.4M',
exp_virtual_size=4613735)),
('4.4M_with_byte_hint', dict(virtual_size='4.4M (4592640 bytes)',
exp_virtual_size=4592640)),
('4.4_MiB_with_byte_hint', dict(virtual_size='4.4 MiB (4592640 bytes)',
exp_virtual_size=4592640)),
('2K', dict(virtual_size='2K',
exp_virtual_size=2048)),
('2K_with_byte_hint', dict(virtual_size='2K (2048 bytes)',
exp_virtual_size=2048)),
('2_KiB_with_byte_hint', dict(virtual_size='2 KiB (2048 bytes)',
exp_virtual_size=2048)),
('1e+03_MiB', dict(virtual_size='1e+03 MiB',
exp_virtual_size=1048576000)),
('64M', dict(virtual_size='64M', exp_virtual_size=67108864)),
(
'64M_with_byte_hint',
dict(
virtual_size='64M (67108844 bytes)', exp_virtual_size=67108844
),
),
('64M_byte', dict(virtual_size='67108844', exp_virtual_size=67108844)),
(
'64_MiB_with_byte_hint',
dict(
virtual_size='64 MiB (67108844 bytes)',
exp_virtual_size=67108844,
),
),
('4.4M', dict(virtual_size='4.4M', exp_virtual_size=4613735)),
(
'4.4M_with_byte_hint',
dict(
virtual_size='4.4M (4592640 bytes)', exp_virtual_size=4592640
),
),
(
'4.4_MiB_with_byte_hint',
dict(
virtual_size='4.4 MiB (4592640 bytes)',
exp_virtual_size=4592640,
),
),
('2K', dict(virtual_size='2K', exp_virtual_size=2048)),
(
'2K_with_byte_hint',
dict(virtual_size='2K (2048 bytes)', exp_virtual_size=2048),
),
(
'2_KiB_with_byte_hint',
dict(virtual_size='2 KiB (2048 bytes)', exp_virtual_size=2048),
),
(
'1e+03_MiB',
dict(virtual_size='1e+03 MiB', exp_virtual_size=1048576000),
),
]
_disk_size = [
('96K', dict(disk_size='96K',
exp_disk_size=98304)),
('96_KiB', dict(disk_size='96 KiB',
exp_disk_size=98304)),
('96K_byte', dict(disk_size='98304',
exp_disk_size=98304)),
('98304_B', dict(disk_size='98304 B',
exp_disk_size=98304)),
('3.1G', dict(disk_size='3.1G',
exp_disk_size=3328599655)),
('3.1_GiB', dict(disk_size='3.1 GiB',
exp_disk_size=3328599655)),
('unavailable', dict(disk_size='unavailable',
exp_disk_size=0)),
('1e+03_MiB', dict(disk_size='1e+03 MiB',
exp_disk_size=1048576000)),
('96K', dict(disk_size='96K', exp_disk_size=98304)),
('96_KiB', dict(disk_size='96 KiB', exp_disk_size=98304)),
('96K_byte', dict(disk_size='98304', exp_disk_size=98304)),
('98304_B', dict(disk_size='98304 B', exp_disk_size=98304)),
('3.1G', dict(disk_size='3.1G', exp_disk_size=3328599655)),
('3.1_GiB', dict(disk_size='3.1 GiB', exp_disk_size=3328599655)),
('unavailable', dict(disk_size='unavailable', exp_disk_size=0)),
('1e+03_MiB', dict(disk_size='1e+03 MiB', exp_disk_size=1048576000)),
]
_garbage_before_snapshot = [
@@ -99,26 +110,33 @@ class ImageUtilsHumanRawTestCase(test_base.BaseTestCase):
cls._virtual_size,
cls._disk_size,
cls._garbage_before_snapshot,
cls._snapshot_count)
cls._snapshot_count,
)
def _initialize_img_info(self):
return ('image: %s' % self.image_name,
'file_format: %s' % self.file_format,
'virtual_size: %s' % self.virtual_size,
'disk_size: %s' % self.disk_size)
return (
f'image: {self.image_name}',
f'file_format: {self.file_format}',
f'virtual_size: {self.virtual_size}',
f'disk_size: {self.disk_size}',
)
def _insert_snapshots(self, img_info):
img_info = img_info + ('Snapshot list:',)
img_info = img_info + ('ID '
'TAG '
'VM SIZE '
'DATE '
'VM CLOCK',)
img_info = img_info + (
'ID '
'TAG '
'VM SIZE '
'DATE '
'VM CLOCK',
)
for i in range(self.snapshot_count):
img_info = img_info + ('%d '
'd9a9784a500742a7bb95627bb3aace38 '
'0 2012-08-20 10:52:46 '
'00:00:00.000' % (i + 1),)
img_info = img_info + (
'%d '
'd9a9784a500742a7bb95627bb3aace38 '
'0 2012-08-20 10:52:46 '
'00:00:00.000' % (i + 1),
)
return img_info
def _base_validation(self, image_info):
@@ -149,7 +167,6 @@ ImageUtilsHumanRawTestCase.generate_scenarios()
class ImageUtilsHumanQemuTestCase(ImageUtilsHumanRawTestCase):
_file_format = [
('qcow2', dict(file_format='qcow2')),
]
@@ -165,19 +182,29 @@ class ImageUtilsHumanQemuTestCase(ImageUtilsHumanRawTestCase):
_qcow2_backing_file = [
('no_backing_file', dict(backing_file=None)),
('backing_file_path',
dict(backing_file='/var/lib/nova/a328c7998805951a_2',
exp_backing_file='/var/lib/nova/a328c7998805951a_2')),
('backing_file_path_with_actual_path',
dict(backing_file='/var/lib/nova/a328c7998805951a_2 '
'(actual path: /b/3a988059e51a_2)',
exp_backing_file='/b/3a988059e51a_2')),
(
'backing_file_path',
dict(
backing_file='/var/lib/nova/a328c7998805951a_2',
exp_backing_file='/var/lib/nova/a328c7998805951a_2',
),
),
(
'backing_file_path_with_actual_path',
dict(
backing_file='/var/lib/nova/a328c7998805951a_2 '
'(actual path: /b/3a988059e51a_2)',
exp_backing_file='/b/3a988059e51a_2',
),
),
]
_qcow2_backing_file_format = [
('no_backing_file_format', dict(backing_file_format=None)),
('backing_file_format', dict(backing_file_format='qcow2',
exp_backing_file_format='qcow2')),
(
'backing_file_format',
dict(backing_file_format='qcow2', exp_backing_file_format='qcow2'),
),
]
@classmethod
@@ -192,20 +219,21 @@ class ImageUtilsHumanQemuTestCase(ImageUtilsHumanRawTestCase):
cls._qcow2_cluster_size,
cls._qcow2_encrypted,
cls._qcow2_backing_file,
cls._qcow2_backing_file_format)
cls._qcow2_backing_file_format,
)
@mock.patch("debtcollector.deprecate")
def test_qemu_img_info_human_format(self, mock_deprecate):
img_info = self._initialize_img_info()
img_info = img_info + ('cluster_size: %s' % self.cluster_size,)
img_info = img_info + (f'cluster_size: {self.cluster_size}',)
if self.backing_file is not None:
img_info = img_info + ('backing file: %s' %
self.backing_file,)
img_info = img_info + (f'backing file: {self.backing_file}',)
if self.backing_file_format is not None:
img_info = img_info + ('backing file format: %s' %
self.backing_file_format,)
img_info = img_info + (
f'backing file format: {self.backing_file_format}',
)
if self.encrypted is not None:
img_info = img_info + ('encrypted: %s' % self.encrypted,)
img_info = img_info + (f'encrypted: {self.encrypted}',)
if self.garbage_before_snapshot is True:
img_info = img_info + ('blah BLAH: bb',)
if self.snapshot_count is not None:
@@ -219,11 +247,12 @@ class ImageUtilsHumanQemuTestCase(ImageUtilsHumanRawTestCase):
self._base_validation(image_info)
self.assertEqual(image_info.cluster_size, self.exp_cluster_size)
if self.backing_file is not None:
self.assertEqual(image_info.backing_file,
self.exp_backing_file)
self.assertEqual(image_info.backing_file, self.exp_backing_file)
if self.backing_file_format is not None:
self.assertEqual(image_info.backing_file_format,
self.exp_backing_file_format)
self.assertEqual(
image_info.backing_file_format,
self.exp_backing_file_format,
)
if self.encrypted is not None:
self.assertEqual(image_info.encrypted, self.encrypted)
@@ -233,11 +262,17 @@ ImageUtilsHumanQemuTestCase.generate_scenarios()
class ImageUtilsBlankTestCase(test_base.BaseTestCase):
def test_qemu_img_info_blank(self):
example_output = '\n'.join(['image: None', 'file_format: None',
'virtual_size: None', 'disk_size: None',
'cluster_size: None',
'backing_file: None',
'backing_file_format: None'])
example_output = '\n'.join(
[
'image: None',
'file_format: None',
'virtual_size: None',
'disk_size: None',
'cluster_size: None',
'backing_file: None',
'backing_file_format: None',
]
)
image_info = imageutils.QemuImgInfo()
self.assertEqual(str(image_info), example_output)
self.assertEqual(len(image_info.snapshots), 0)

View File

@@ -20,19 +20,18 @@ from oslo_utils import dictutils as du
class DictUtilsTestCase(test_base.BaseTestCase):
def test_flatten_dict_to_keypairs(self):
data = {'a': 'A', 'b': 'B',
'nested': {'a': 'A', 'b': 'B'}}
data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B'}}
pairs = list(du.flatten_dict_to_keypairs(data))
self.assertEqual([('a', 'A'), ('b', 'B'),
('nested:a', 'A'), ('nested:b', 'B')],
pairs)
self.assertEqual(
[('a', 'A'), ('b', 'B'), ('nested:a', 'A'), ('nested:b', 'B')],
pairs,
)
def test_flatten_dict_to_keypairs_with_separator(self):
data = {'a': 'A', 'b': 'B',
'nested': {'a': 'A', 'b': 'B'}}
data = {'a': 'A', 'b': 'B', 'nested': {'a': 'A', 'b': 'B'}}
pairs = list(du.flatten_dict_to_keypairs(data, separator='.'))
self.assertEqual([('a', 'A'), ('b', 'B'),
('nested.a', 'A'), ('nested.b', 'B')],
pairs)
self.assertEqual(
[('a', 'A'), ('b', 'B'), ('nested.a', 'A'), ('nested.b', 'B')],
pairs,
)

View File

@@ -120,14 +120,15 @@ class EventletUtilsTest(test_base.BaseTestCase):
self.assertNotIn(m, str(w.message))
def test_invalid_patch_check(self):
self.assertRaises(ValueError,
eventletutils.warn_eventlet_not_patched,
['blah.blah'])
self.assertRaises(
ValueError, eventletutils.warn_eventlet_not_patched, ['blah.blah']
)
@mock.patch('oslo_utils.eventletutils._eventlet')
def test_event_api_compat(self, mock_eventlet):
with mock.patch('oslo_utils.eventletutils.is_monkey_patched',
return_value=True):
with mock.patch(
'oslo_utils.eventletutils.is_monkey_patched', return_value=True
):
e_event = eventletutils.Event()
self.assertIsInstance(e_event, eventletutils.EventletEvent)
@@ -135,8 +136,11 @@ class EventletUtilsTest(test_base.BaseTestCase):
t_event_cls = threading.Event
self.assertIsInstance(t_event, t_event_cls)
public_methods = [m for m in dir(t_event) if not m.startswith("_") and
callable(getattr(t_event, m))]
public_methods = [
m
for m in dir(t_event)
if not m.startswith("_") and callable(getattr(t_event, m))
]
for method in public_methods:
self.assertTrue(hasattr(e_event, method))

View File

@@ -31,19 +31,20 @@ class Fail2(excutils.CausedByException):
class CausedByTest(test_base.BaseTestCase):
def test_caused_by_explicit(self):
e = self.assertRaises(Fail1,
excutils.raise_with_cause,
Fail1, "I was broken",
cause=Fail2("I have been broken"))
e = self.assertRaises(
Fail1,
excutils.raise_with_cause,
Fail1,
"I was broken",
cause=Fail2("I have been broken"),
)
self.assertIsInstance(e.cause, Fail2)
e_p = e.pformat()
self.assertIn("I have been broken", e_p)
self.assertIn("Fail2", e_p)
def test_caused_by_implicit(self):
def raises_chained():
try:
raise Fail2("I have been broken")
@@ -58,9 +59,7 @@ class CausedByTest(test_base.BaseTestCase):
class SaveAndReraiseTest(test_base.BaseTestCase):
def test_save_and_reraise_exception_forced(self):
def _force_reraise():
try:
raise OSError("I broke")
@@ -72,7 +71,6 @@ class SaveAndReraiseTest(test_base.BaseTestCase):
self.assertRaises(IOError, _force_reraise)
def test_save_and_reraise_exception_capture_reraise(self):
def _force_reraise():
try:
raise OSError("I broke")
@@ -131,8 +129,9 @@ class SaveAndReraiseTest(test_base.BaseTestCase):
ctxt.reraise = False
@mock.patch('logging.getLogger')
def test_save_and_reraise_exception_dropped_no_reraise(self,
get_logger_mock):
def test_save_and_reraise_exception_dropped_no_reraise(
self, get_logger_mock
):
logger = get_logger_mock()
e = None
msg = 'second exception'
@@ -161,7 +160,6 @@ class SaveAndReraiseTest(test_base.BaseTestCase):
class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
def setUp(self):
super().setUp()
@@ -177,7 +175,6 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
def test_exc_retrier_1exc_gives_1log(self, mock_now, mock_log):
self._exceptions = [
Exception('unexpected %d' % 1),
]
@@ -190,9 +187,11 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
mock_log.assert_called_once_with(
'Unexpected exception occurred %d time(s)... retrying.' % 1
)
mock_now.assert_has_calls([
mock.call(),
])
mock_now.assert_has_calls(
[
mock.call(),
]
)
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
@@ -218,9 +217,13 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
self.assertEqual([], self._exceptions)
self.assertEqual(10, len(mock_now.mock_calls))
self.assertEqual(1, len(mock_log.mock_calls))
mock_log.assert_has_calls([
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
])
mock_log.assert_has_calls(
[
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
]
)
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
@@ -248,10 +251,16 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
self.assertEqual([], self._exceptions)
self.assertEqual(4, len(mock_now.mock_calls))
self.assertEqual(2, len(mock_log.mock_calls))
mock_log.assert_has_calls([
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
])
mock_log.assert_has_calls(
[
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
]
)
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
@@ -298,10 +307,16 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
self.assertEqual([], self._exceptions)
self.assertEqual(12, len(mock_now.mock_calls))
self.assertEqual(2, len(mock_log.mock_calls))
mock_log.assert_has_calls([
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
mock.call('Unexpected exception occurred 5 time(s)... retrying.'),
])
mock_log.assert_has_calls(
[
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
mock.call(
'Unexpected exception occurred 5 time(s)... retrying.'
),
]
)
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
@@ -343,15 +358,20 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
self.assertEqual([], self._exceptions)
self.assertEqual(5, len(mock_now.mock_calls))
self.assertEqual(2, len(mock_log.mock_calls))
mock_log.assert_has_calls([
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
])
mock_log.assert_has_calls(
[
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
]
)
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
def test_exc_retrier_mixed_4exc_2min_gives_2logs(self, mock_now, mock_log):
self._exceptions = [
Exception('unexpected 1'),
]
@@ -383,10 +403,16 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
self.assertEqual([], self._exceptions)
self.assertEqual(5, len(mock_now.mock_calls))
self.assertEqual(2, len(mock_log.mock_calls))
mock_log.assert_has_calls([
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
])
mock_log.assert_has_calls(
[
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
]
)
@mock.patch.object(logging, 'exception')
@mock.patch.object(timeutils, 'now')
@@ -419,15 +445,22 @@ class ForeverRetryUncaughtExceptionsTest(test_base.BaseTestCase):
self.assertEqual([], self._exceptions)
self.assertEqual(7, len(mock_now.mock_calls))
self.assertEqual(3, len(mock_log.mock_calls))
mock_log.assert_has_calls([
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
mock.call('Unexpected exception occurred 2 time(s)... retrying.'),
mock.call('Unexpected exception occurred 1 time(s)... retrying.'),
])
mock_log.assert_has_calls(
[
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
mock.call(
'Unexpected exception occurred 2 time(s)... retrying.'
),
mock.call(
'Unexpected exception occurred 1 time(s)... retrying.'
),
]
)
class ExceptionFilterTest(test_base.BaseTestCase):
def _make_filter_func(self, ignore_classes=AssertionError):
@excutils.exception_filter
def ignore_exceptions(ex):
@@ -496,9 +529,9 @@ class ExceptionFilterTest(test_base.BaseTestCase):
raise RuntimeError
except Exception as exc2:
self.assertIsNot(exc1, exc2)
raised = self.assertRaises(RuntimeError,
ignore_assertion_error,
exc1)
raised = self.assertRaises(
RuntimeError, ignore_assertion_error, exc1
)
self.assertIs(exc1, raised)
def test_raise_previous_after_filtered_func_call(self):

View File

@@ -36,11 +36,12 @@ class EnsureTree(test_base.BaseTestCase):
def test_ensure_tree(self):
tmpdir = tempfile.mkdtemp()
try:
testdir = '{}/foo/bar/baz'.format(tmpdir)
testdir = f'{tmpdir}/foo/bar/baz'
fileutils.ensure_tree(testdir, TEST_PERMISSIONS)
self.assertTrue(os.path.isdir(testdir))
self.assertEqual(os.stat(testdir).st_mode,
TEST_PERMISSIONS | stat.S_IFDIR)
self.assertEqual(
os.stat(testdir).st_mode, TEST_PERMISSIONS | stat.S_IFDIR
)
finally:
if os.path.exists(tmpdir):
shutil.rmtree(tmpdir)
@@ -115,8 +116,8 @@ class RemovePathOnError(test_base.BaseTestCase):
try:
with fileutils.remove_path_on_error(
tmpdir,
lambda path: fileutils.delete_if_exists(path, os.rmdir)):
tmpdir, lambda path: fileutils.delete_if_exists(path, os.rmdir)
):
raise Exception
except Exception:
self.assertFalse(os.path.exists(tmpdir))
@@ -144,7 +145,7 @@ class WriteToTempfileTestCase(test_base.BaseTestCase):
def test_file_with_not_existing_path(self):
random_dir = uuid.uuid4().hex
path = '/tmp/%s/test1' % random_dir
path = f'/tmp/{random_dir}/test1'
res = fileutils.write_to_tempfile(self.content, path=path)
self.assertTrue(os.path.exists(res))
(basepath, tmpfile) = os.path.split(res)
@@ -169,10 +170,10 @@ class WriteToTempfileTestCase(test_base.BaseTestCase):
def test_file_with_not_existing_path_and_not_default_suffix(self):
suffix = '.txt'
random_dir = uuid.uuid4().hex
path = '/tmp/%s/test2' % random_dir
res = fileutils.write_to_tempfile(self.content,
path=path,
suffix=suffix)
path = f'/tmp/{random_dir}/test2'
res = fileutils.write_to_tempfile(
self.content, path=path, suffix=suffix
)
self.assertTrue(os.path.exists(res))
(basepath, tmpfile) = os.path.split(res)
self.assertTrue(tmpfile.startswith('tmp'))
@@ -195,7 +196,6 @@ class WriteToTempfileTestCase(test_base.BaseTestCase):
class TestComputeFileChecksum(test_base.BaseTestCase):
def setUp(self):
super().setUp()
self.content = b'fake_content'
@@ -227,7 +227,8 @@ class TestComputeFileChecksum(test_base.BaseTestCase):
with mock.patch.object(time, "sleep") as sleep_mock:
actual_checksum = fileutils.compute_file_checksum(
path, read_chunksize=4)
path, read_chunksize=4
)
sleep_mock.assert_has_calls([mock.call(0)] * 3)
# Just to make sure that there were exactly 3 calls
@@ -242,8 +243,9 @@ class TestComputeFileChecksum(test_base.BaseTestCase):
expected_checksum = hashlib.sha512()
expected_checksum.update(self.content)
actual_checksum = fileutils.compute_file_checksum(path,
algorithm='sha512')
actual_checksum = fileutils.compute_file_checksum(
path, algorithm='sha512'
)
self.assertEqual(expected_checksum.hexdigest(), actual_checksum)
@@ -252,8 +254,9 @@ class TestComputeFileChecksum(test_base.BaseTestCase):
self.assertTrue(os.path.exists(path))
self.check_file_content(self.content, path)
self.assertRaises(ValueError, fileutils.compute_file_checksum,
path, algorithm='foo')
self.assertRaises(
ValueError, fileutils.compute_file_checksum, path, algorithm='foo'
)
def test_file_does_not_exist(self):
random_file_name = uuid.uuid4().hex
@@ -287,8 +290,9 @@ class LastBytesTestCase(test_base.BaseTestCase):
self.assertEqual(0, unread_bytes)
def test_non_exist_file(self):
self.assertRaises(IOError, fileutils.last_bytes,
'non_exist_file', 1000)
self.assertRaises(
IOError, fileutils.last_bytes, 'non_exist_file', 1000
)
class FileTypeTestCase(test_base.BaseTestCase):
@@ -296,10 +300,7 @@ class FileTypeTestCase(test_base.BaseTestCase):
def setUp(self):
super().setUp()
data = {
'name': 'test',
'website': 'example.com'
}
data = {'name': 'test', 'website': 'example.com'}
temp_dir = tempfile.mkdtemp()
self.json_file = tempfile.mktemp(dir=temp_dir)
self.yaml_file = tempfile.mktemp(dir=temp_dir)

View File

@@ -25,7 +25,6 @@ from oslo_utils import uuidutils
class TimeFixtureTest(test_base.BaseTestCase):
def test_set_time_override_using_default(self):
# When the fixture is used with its default constructor, the
# override_time is set to the current timestamp.
@@ -66,7 +65,6 @@ class TimeFixtureTest(test_base.BaseTestCase):
class UUIDSentinelsTest(test_base.BaseTestCase):
def test_different_sentinel(self):
uuid1 = uuids.foobar
uuid2 = uuids.barfoo

View File

@@ -22,7 +22,6 @@ from oslo_utils import importutils
class ImportUtilsTest(test_base.BaseTestCase):
# NOTE(jkoelker) There has GOT to be a way to test this. But mocking
# __import__ is the devil. Right now we just make
# sure we can import something from the stdlib
@@ -31,8 +30,9 @@ class ImportUtilsTest(test_base.BaseTestCase):
self.assertEqual(sys.modules['datetime'].datetime, dt)
def test_import_bad_class(self):
self.assertRaises(ImportError, importutils.import_class,
'lol.u_mad.brah')
self.assertRaises(
ImportError, importutils.import_class, 'lol.u_mad.brah'
)
def test_import_module(self):
dt = importutils.import_module('datetime')
@@ -43,69 +43,88 @@ class ImportUtilsTest(test_base.BaseTestCase):
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_optional_arg_present(self):
obj = importutils.import_object('oslo_utils.tests.fake.FakeDriver',
first_arg=False)
obj = importutils.import_object(
'oslo_utils.tests.fake.FakeDriver', first_arg=False
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_required_arg_not_present(self):
# arg 1 isn't optional here
self.assertRaises(TypeError, importutils.import_object,
'oslo_utils.tests.fake.FakeDriver2')
self.assertRaises(
TypeError,
importutils.import_object,
'oslo_utils.tests.fake.FakeDriver2',
)
def test_import_object_required_arg_present(self):
obj = importutils.import_object('oslo_utils.tests.fake.FakeDriver2',
first_arg=False)
obj = importutils.import_object(
'oslo_utils.tests.fake.FakeDriver2', first_arg=False
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver2')
# namespace tests
def test_import_object_ns_optional_arg_not_present(self):
obj = importutils.import_object_ns('oslo_utils',
'tests.fake.FakeDriver')
obj = importutils.import_object_ns(
'oslo_utils', 'tests.fake.FakeDriver'
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_optional_arg_present(self):
obj = importutils.import_object_ns('oslo_utils',
'tests.fake.FakeDriver',
first_arg=False)
obj = importutils.import_object_ns(
'oslo_utils', 'tests.fake.FakeDriver', first_arg=False
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_required_arg_not_present(self):
# arg 1 isn't optional here
self.assertRaises(TypeError, importutils.import_object_ns,
'oslo_utils', 'tests.fake.FakeDriver2')
self.assertRaises(
TypeError,
importutils.import_object_ns,
'oslo_utils',
'tests.fake.FakeDriver2',
)
def test_import_object_ns_required_arg_present(self):
obj = importutils.import_object_ns('oslo_utils',
'tests.fake.FakeDriver2',
first_arg=False)
obj = importutils.import_object_ns(
'oslo_utils', 'tests.fake.FakeDriver2', first_arg=False
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver2')
# namespace tests
def test_import_object_ns_full_optional_arg_not_present(self):
obj = importutils.import_object_ns('tests2',
'oslo_utils.tests.fake.FakeDriver')
obj = importutils.import_object_ns(
'tests2', 'oslo_utils.tests.fake.FakeDriver'
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_full_optional_arg_present(self):
obj = importutils.import_object_ns('tests2',
'oslo_utils.tests.fake.FakeDriver',
first_arg=False)
obj = importutils.import_object_ns(
'tests2', 'oslo_utils.tests.fake.FakeDriver', first_arg=False
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver')
def test_import_object_ns_full_required_arg_not_present(self):
# arg 1 isn't optional here
self.assertRaises(TypeError, importutils.import_object_ns,
'tests2', 'oslo_utils.tests.fake.FakeDriver2')
self.assertRaises(
TypeError,
importutils.import_object_ns,
'tests2',
'oslo_utils.tests.fake.FakeDriver2',
)
def test_import_object_ns_full_required_arg_present(self):
obj = importutils.import_object_ns('tests2',
'oslo_utils.tests.fake.FakeDriver2',
first_arg=False)
obj = importutils.import_object_ns(
'tests2', 'oslo_utils.tests.fake.FakeDriver2', first_arg=False
)
self.assertEqual(obj.__class__.__name__, 'FakeDriver2')
def test_import_object_ns_raise_import_error_in_init(self):
self.assertRaises(ImportError, importutils.import_object_ns,
'tests2', 'oslo_utils.tests.fake.FakeDriver3')
self.assertRaises(
ImportError,
importutils.import_object_ns,
'tests2',
'oslo_utils.tests.fake.FakeDriver3',
)
def test_import_object(self):
dt = importutils.import_object('datetime.time')
@@ -120,20 +139,30 @@ class ImportUtilsTest(test_base.BaseTestCase):
v2 = importutils.import_versioned_module('oslo_utils.tests.fake', 2)
self.assertEqual(sys.modules['oslo_utils.tests.fake.v2'], v2)
dummpy = importutils.import_versioned_module('oslo_utils.tests.fake',
2, 'dummpy')
self.assertEqual(sys.modules['oslo_utils.tests.fake.v2.dummpy'],
dummpy)
dummpy = importutils.import_versioned_module(
'oslo_utils.tests.fake', 2, 'dummpy'
)
self.assertEqual(
sys.modules['oslo_utils.tests.fake.v2.dummpy'], dummpy
)
def test_import_versioned_module_wrong_version_parameter(self):
self.assertRaises(ValueError,
importutils.import_versioned_module,
'oslo_utils.tests.fake', "2.0", 'fake')
self.assertRaises(
ValueError,
importutils.import_versioned_module,
'oslo_utils.tests.fake',
"2.0",
'fake',
)
def test_import_versioned_module_error(self):
self.assertRaises(ImportError,
importutils.import_versioned_module,
'oslo_utils.tests.fake', 2, 'fake')
self.assertRaises(
ImportError,
importutils.import_versioned_module,
'oslo_utils.tests.fake',
2,
'fake',
)
def test_try_import(self):
dt = importutils.try_import('datetime')
@@ -144,8 +173,9 @@ class ImportUtilsTest(test_base.BaseTestCase):
self.assertIsNone(foo)
def test_import_any_none_found(self):
self.assertRaises(ImportError, importutils.import_any,
'foo.bar', 'foo.foo.bar')
self.assertRaises(
ImportError, importutils.import_any, 'foo.bar', 'foo.foo.bar'
)
def test_import_any_found(self):
dt = importutils.import_any('foo.bar', 'datetime')

View File

@@ -26,7 +26,6 @@ from oslo_utils import netutils
class NetworkUtilsTest(test_base.BaseTestCase):
def test_no_host(self):
result = netutils.urlsplit('http://')
self.assertEqual('', result.netloc)
@@ -35,24 +34,27 @@ class NetworkUtilsTest(test_base.BaseTestCase):
self.assertEqual('http', result.scheme)
def test_parse_host_port(self):
self.assertEqual(('server01', 80),
netutils.parse_host_port('server01:80'))
self.assertEqual(('server01', None),
netutils.parse_host_port('server01'))
self.assertEqual(('server01', 1234),
netutils.parse_host_port('server01',
default_port=1234))
self.assertEqual(('::1', 80),
netutils.parse_host_port('[::1]:80'))
self.assertEqual(('::1', None),
netutils.parse_host_port('[::1]'))
self.assertEqual(('::1', 1234),
netutils.parse_host_port('[::1]',
default_port=1234))
self.assertEqual(('2001:db8:85a3::8a2e:370:7334', 1234),
netutils.parse_host_port(
'2001:db8:85a3::8a2e:370:7334',
default_port=1234))
self.assertEqual(
('server01', 80), netutils.parse_host_port('server01:80')
)
self.assertEqual(
('server01', None), netutils.parse_host_port('server01')
)
self.assertEqual(
('server01', 1234),
netutils.parse_host_port('server01', default_port=1234),
)
self.assertEqual(('::1', 80), netutils.parse_host_port('[::1]:80'))
self.assertEqual(('::1', None), netutils.parse_host_port('[::1]'))
self.assertEqual(
('::1', 1234), netutils.parse_host_port('[::1]', default_port=1234)
)
self.assertEqual(
('2001:db8:85a3::8a2e:370:7334', 1234),
netutils.parse_host_port(
'2001:db8:85a3::8a2e:370:7334', default_port=1234
),
)
def test_urlsplit(self):
result = netutils.urlsplit('rpc://myhost?someparam#somefragment')
@@ -63,8 +65,8 @@ class NetworkUtilsTest(test_base.BaseTestCase):
self.assertEqual(result.fragment, 'somefragment')
result = netutils.urlsplit(
'rpc://myhost/mypath?someparam#somefragment',
allow_fragments=False)
'rpc://myhost/mypath?someparam#somefragment', allow_fragments=False
)
self.assertEqual(result.scheme, 'rpc')
self.assertEqual(result.netloc, 'myhost')
self.assertEqual(result.path, '/mypath')
@@ -73,7 +75,8 @@ class NetworkUtilsTest(test_base.BaseTestCase):
result = netutils.urlsplit(
'rpc://user:pass@myhost/mypath?someparam#somefragment',
allow_fragments=False)
allow_fragments=False,
)
self.assertEqual(result.scheme, 'rpc')
self.assertEqual(result.netloc, 'user:pass@myhost')
self.assertEqual(result.path, '/mypath')
@@ -130,23 +133,23 @@ class NetworkUtilsTest(test_base.BaseTestCase):
mock_sock = mock.Mock()
netutils.set_tcp_keepalive(mock_sock, True, 100, 10, 5)
calls = [
mock.call.setsockopt(socket.SOL_SOCKET,
socket.SO_KEEPALIVE, True),
mock.call.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, True),
]
if hasattr(socket, 'TCP_KEEPIDLE'):
calls += [
mock.call.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPIDLE, 100)
mock.call.setsockopt(
socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 100
)
]
if hasattr(socket, 'TCP_KEEPINTVL'):
calls += [
mock.call.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPINTVL, 10),
mock.call.setsockopt(
socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 10
),
]
if hasattr(socket, 'TCP_KEEPCNT'):
calls += [
mock.call.setsockopt(socket.IPPROTO_TCP,
socket.TCP_KEEPCNT, 5)
mock.call.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5)
]
mock_sock.assert_has_calls(calls)
@@ -169,26 +172,14 @@ class NetworkUtilsTest(test_base.BaseTestCase):
mock_log.warning.assert_not_called()
mock_log.reset_mock()
self.assertFalse(
netutils.is_valid_ipv4('10', strict=True)
)
self.assertFalse(
netutils.is_valid_ipv4('10.10', strict=True)
)
self.assertFalse(
netutils.is_valid_ipv4('10.10.10', strict=True)
)
self.assertFalse(netutils.is_valid_ipv4('10', strict=True))
self.assertFalse(netutils.is_valid_ipv4('10.10', strict=True))
self.assertFalse(netutils.is_valid_ipv4('10.10.10', strict=True))
mock_log.warning.assert_not_called()
mock_log.reset_mock()
self.assertTrue(
netutils.is_valid_ipv4('10', strict=False)
)
self.assertTrue(
netutils.is_valid_ipv4('10.10', strict=False)
)
self.assertTrue(
netutils.is_valid_ipv4('10.10.10', strict=False)
)
self.assertTrue(netutils.is_valid_ipv4('10', strict=False))
self.assertTrue(netutils.is_valid_ipv4('10.10', strict=False))
self.assertTrue(netutils.is_valid_ipv4('10.10.10', strict=False))
mock_log.warning.assert_not_called()
mock_log.reset_mock()
@@ -199,16 +190,20 @@ class NetworkUtilsTest(test_base.BaseTestCase):
self.assertFalse(netutils.is_valid_ip('fe%80::1%eth0'))
self.assertFalse(netutils.is_valid_ipv6(
'1fff::a88:85a3::172.31.128.1'))
self.assertFalse(
netutils.is_valid_ipv6('1fff::a88:85a3::172.31.128.1')
)
self.assertFalse(netutils.is_valid_ipv6(''))
def test_get_noscope_ipv6(self):
self.assertEqual('2001:db8::ff00:42:8329',
netutils.get_noscope_ipv6('2001:db8::ff00:42:8329%1'))
self.assertEqual('ff02::5678',
netutils.get_noscope_ipv6('ff02::5678%eth0'))
self.assertEqual(
'2001:db8::ff00:42:8329',
netutils.get_noscope_ipv6('2001:db8::ff00:42:8329%1'),
)
self.assertEqual(
'ff02::5678', netutils.get_noscope_ipv6('ff02::5678%eth0')
)
self.assertEqual('fe80::1', netutils.get_noscope_ipv6('fe80::1%eth0'))
self.assertEqual('::1', netutils.get_noscope_ipv6('::1%eth0'))
self.assertEqual('::1', netutils.get_noscope_ipv6('::1'))
@@ -248,8 +243,11 @@ class NetworkUtilsTest(test_base.BaseTestCase):
self.assertTrue(netutils.is_valid_cidr('10.0.0.1/32'))
self.assertTrue(netutils.is_valid_cidr('0.0.0.0/0'))
self.assertTrue(netutils.is_valid_cidr('2600::/64'))
self.assertTrue(netutils.is_valid_cidr(
'0000:0000:0000:0000:0000:0000:0000:0001/32'))
self.assertTrue(
netutils.is_valid_cidr(
'0000:0000:0000:0000:0000:0000:0000:0001/32'
)
)
self.assertFalse(netutils.is_valid_cidr('10.0.0.1'))
self.assertFalse(netutils.is_valid_cidr('10.0.0.1/33'))
@@ -257,72 +255,107 @@ class NetworkUtilsTest(test_base.BaseTestCase):
def test_is_valid_ipv6_cidr(self):
self.assertTrue(netutils.is_valid_ipv6_cidr("2600::/64"))
self.assertTrue(netutils.is_valid_ipv6_cidr(
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"))
self.assertTrue(netutils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001/32"))
self.assertTrue(netutils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001"))
self.assertTrue(
netutils.is_valid_ipv6_cidr(
"abcd:ef01:2345:6789:abcd:ef01:192.168.254.254/48"
)
)
self.assertTrue(
netutils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001/32"
)
)
self.assertTrue(
netutils.is_valid_ipv6_cidr(
"0000:0000:0000:0000:0000:0000:0000:0001"
)
)
self.assertFalse(netutils.is_valid_ipv6_cidr("foo"))
self.assertFalse(netutils.is_valid_ipv6_cidr("127.0.0.1"))
def test_valid_port(self):
valid_inputs = [0, '0', 1, '1', 2, '3', '5', 8, 13, 21,
'80', '3246', '65535']
valid_inputs = [
0,
'0',
1,
'1',
2,
'3',
'5',
8,
13,
21,
'80',
'3246',
'65535',
]
for input_str in valid_inputs:
self.assertTrue(netutils.is_valid_port(input_str))
def test_valid_port_fail(self):
invalid_inputs = ['-32768', '65536', 528491, '528491',
'528.491', 'thirty-seven', None]
invalid_inputs = [
'-32768',
'65536',
528491,
'528491',
'528.491',
'thirty-seven',
None,
]
for input_str in invalid_inputs:
self.assertFalse(netutils.is_valid_port(input_str))
def test_get_my_ipv4(self):
mock_sock = mock.Mock()
mock_sock.getsockname.return_value = ['1.2.3.4', '']
sock_attrs = {
'return_value.__enter__.return_value': mock_sock}
sock_attrs = {'return_value.__enter__.return_value': mock_sock}
with mock.patch('socket.socket', **sock_attrs):
addr = netutils.get_my_ipv4()
self.assertEqual(addr, '1.2.3.4')
def test_get_my_ipv4_disabled(self):
with (mock.patch('socket.socket', side_effect=socket.error()),
mock.patch('builtins.open', side_effect=FileNotFoundError())):
with (
mock.patch('socket.socket', side_effect=OSError()),
mock.patch('builtins.open', side_effect=FileNotFoundError()),
):
addr = netutils.get_my_ipv4()
self.assertEqual(addr, '127.0.0.1')
def test_get_my_ipv6(self):
mock_sock = mock.Mock()
mock_sock.getsockname.return_value = ['2001:db8::2', '', '', '']
sock_attrs = {
'return_value.__enter__.return_value': mock_sock}
sock_attrs = {'return_value.__enter__.return_value': mock_sock}
with mock.patch('socket.socket', **sock_attrs):
addr = netutils.get_my_ipv6()
self.assertEqual(addr, '2001:db8::2')
def test_get_my_ipv6_disabled(self):
with (mock.patch('socket.socket', side_effect=socket.error()),
mock.patch('builtins.open', side_effect=FileNotFoundError())):
with (
mock.patch('socket.socket', side_effect=OSError()),
mock.patch('builtins.open', side_effect=FileNotFoundError()),
):
addr = netutils.get_my_ipv6()
self.assertEqual(addr, '::1')
def test_is_int_in_range(self):
valid_inputs = [(1, -100, 100),
('1', -100, 100),
(100, -100, 100),
('100', -100, 100),
(-100, -100, 100),
('-100', -100, 100)]
valid_inputs = [
(1, -100, 100),
('1', -100, 100),
(100, -100, 100),
('100', -100, 100),
(-100, -100, 100),
('-100', -100, 100),
]
for input_value in valid_inputs:
self.assertTrue(netutils._is_int_in_range(*input_value))
def test_is_int_not_in_range(self):
invalid_inputs = [(None, 1, 100),
('ten', 1, 100),
(-1, 0, 255),
('None', 1, 100)]
invalid_inputs = [
(None, 1, 100),
('ten', 1, 100),
(-1, 0, 255),
('None', 1, 100),
]
for input_value in invalid_inputs:
self.assertFalse(netutils._is_int_in_range(*input_value))
@@ -365,56 +398,64 @@ class NetworkUtilsTest(test_base.BaseTestCase):
@mock.patch('builtins.open')
@mock.patch('psutil.net_if_addrs')
def test_get_my_ipv4_address_with_default_route(
self, mock_ifaddrs, mock_open):
self, mock_ifaddrs, mock_open
):
mock_open.return_value = io.StringIO(
"""Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT
eth0 00000000 01cc12ac 0003 0 0 600 00000000 0 0 0
eth0 00cc12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0
eth1 00cd12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0""") # noqa : E501
eth1 00cd12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0"""
) # noqa : E501
addr = namedtuple('addr', ['family', 'address'])
mock_ifaddrs.return_value = {
'eth0': [
addr(family=socket.AF_INET, address='172.18.204.2'),
addr(family=socket.AF_INET6, address='2001:db8::2')
addr(family=socket.AF_INET6, address='2001:db8::2'),
],
'eth1': [
addr(family=socket.AF_INET, address='172.18.205.2'),
addr(family=socket.AF_INET6, address='2001:db8::1000::2')
]}
addr(family=socket.AF_INET6, address='2001:db8::1000::2'),
],
}
self.assertEqual('172.18.204.2', netutils._get_my_ipv4_address())
mock_open.assert_called_once_with('/proc/net/route')
@mock.patch('builtins.open')
@mock.patch('psutil.net_if_addrs')
def test_get_my_ipv6_address_with_default_route(
self, mock_ifaddrs, mock_open):
self, mock_ifaddrs, mock_open
):
mock_open.return_value = io.StringIO(
"""00000000000000000000000000000000 00 00000000000000000000000000000000 00 20010db8000000000000000000000001 00000000 00000000 00000000 08000000 eth0
20010db8000000000000000000000000 31 00000000000000000000000000000000 00 00000000000000000000000000000000 00000000 00000000 00000000 08000000 eth0
20010db8100000000000000000000000 31 00000000000000000000000000000000 00 00000000000000000000000000000000 00000000 00000000 00000000 08000000 eth1""") # noqa: E501
20010db8100000000000000000000000 31 00000000000000000000000000000000 00 00000000000000000000000000000000 00000000 00000000 00000000 08000000 eth1"""
) # noqa: E501
addr = namedtuple('addr', ['family', 'address'])
mock_ifaddrs.return_value = {
'eth0': [
addr(family=socket.AF_INET, address='172.18.204.2'),
addr(family=socket.AF_INET6, address='2001:db8::2')
addr(family=socket.AF_INET6, address='2001:db8::2'),
],
'eth1': [
addr(family=socket.AF_INET, address='172.18.205.2'),
addr(family=socket.AF_INET6, address='2001:db8::1000::2')
]}
addr(family=socket.AF_INET6, address='2001:db8::1000::2'),
],
}
self.assertEqual('2001:db8::2', netutils._get_my_ipv6_address())
mock_open.assert_called_once_with('/proc/net/ipv6_route')
@mock.patch('builtins.open')
@mock.patch('psutil.net_if_addrs')
def test_get_my_ipv4_address_without_default_route(
self, mock_ifaddrs, mock_open):
self, mock_ifaddrs, mock_open
):
mock_open.return_value = io.StringIO(
"""Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT
eth0 00cc12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0
eth1 00cd12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0""") # noqa : E501
eth1 00cd12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0"""
) # noqa : E501
self.assertEqual('127.0.0.1', netutils._get_my_ipv4_address())
mock_open.assert_called_once_with('/proc/net/route')
@@ -423,10 +464,12 @@ eth1 00cd12ac 00000000 0001 0 0 600 00FFFFFF 0 0 0""") # noqa : E501
@mock.patch('builtins.open')
@mock.patch('psutil.net_if_addrs')
def test_get_my_ipv6_address_without_default_route(
self, mock_ifaddrs, mock_open):
self, mock_ifaddrs, mock_open
):
mock_open.return_value = io.StringIO(
"""20010db8000000000000000000000000 31 00000000000000000000000000000000 00 00000000000000000000000000000000 00000000 00000000 00000000 08000000 eth0
20010db8100000000000000000000000 31 00000000000000000000000000000000 00 00000000000000000000000000000000 00000000 00000000 00000000 08000000 eth1""") # noqa: E501
20010db8100000000000000000000000 31 00000000000000000000000000000000 00 00000000000000000000000000000000 00000000 00000000 00000000 08000000 eth1"""
) # noqa: E501
self.assertEqual('::1', netutils._get_my_ipv6_address())
mock_open.assert_called_once_with('/proc/net/ipv6_route')
@@ -437,39 +480,48 @@ class IPv6byEUI64TestCase(test_base.BaseTestCase):
"""Unit tests to generate IPv6 by EUI-64 operations."""
def test_generate_IPv6_by_EUI64(self):
addr = netutils.get_ipv6_addr_by_EUI64('2001:db8::',
'00:16:3e:33:44:55')
addr = netutils.get_ipv6_addr_by_EUI64(
'2001:db8::', '00:16:3e:33:44:55'
)
self.assertEqual('2001:db8::216:3eff:fe33:4455', addr.format())
def test_generate_IPv6_with_IPv4_prefix(self):
ipv4_prefix = '10.0.8'
mac = '00:16:3e:33:44:55'
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(ipv4_prefix, mac))
self.assertRaises(
ValueError,
lambda: netutils.get_ipv6_addr_by_EUI64(ipv4_prefix, mac),
)
def test_generate_IPv6_with_bad_mac(self):
bad_mac = '00:16:3e:33:44:5Z'
prefix = '2001:db8::'
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(prefix, bad_mac))
self.assertRaises(
ValueError,
lambda: netutils.get_ipv6_addr_by_EUI64(prefix, bad_mac),
)
def test_generate_IPv6_with_bad_prefix(self):
mac = '00:16:3e:33:44:55'
bad_prefix = 'bb'
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(bad_prefix, mac))
self.assertRaises(
ValueError,
lambda: netutils.get_ipv6_addr_by_EUI64(bad_prefix, mac),
)
def test_generate_IPv6_with_error_prefix_type(self):
mac = '00:16:3e:33:44:55'
prefix = 123
self.assertRaises(TypeError, lambda:
netutils.get_ipv6_addr_by_EUI64(prefix, mac))
self.assertRaises(
TypeError, lambda: netutils.get_ipv6_addr_by_EUI64(prefix, mac)
)
def test_generate_IPv6_with_empty_prefix(self):
mac = '00:16:3e:33:44:55'
prefix = ''
self.assertRaises(ValueError, lambda:
netutils.get_ipv6_addr_by_EUI64(prefix, mac))
self.assertRaises(
ValueError, lambda: netutils.get_ipv6_addr_by_EUI64(prefix, mac)
)
class MACbyIPv6TestCase(test_base.BaseTestCase):
@@ -479,28 +531,32 @@ class MACbyIPv6TestCase(test_base.BaseTestCase):
self.assertEqual(
netaddr.EUI('00:16:3e:33:44:55'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('2001:db8::216:3eff:fe33:4455')),
netaddr.IPAddress('2001:db8::216:3eff:fe33:4455')
),
)
def test_random_qemu_mac(self):
self.assertEqual(
netaddr.EUI('52:54:00:42:02:19'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('fe80::5054:ff:fe42:219')),
netaddr.IPAddress('fe80::5054:ff:fe42:219')
),
)
def test_local(self):
self.assertEqual(
netaddr.EUI('02:00:00:00:00:00'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('fe80::ff:fe00:0')),
netaddr.IPAddress('fe80::ff:fe00:0')
),
)
def test_universal(self):
self.assertEqual(
netaddr.EUI('00:00:00:00:00:00'),
netutils.get_mac_addr_by_ipv6(
netaddr.IPAddress('fe80::200:ff:fe00:0')),
netaddr.IPAddress('fe80::200:ff:fe00:0')
),
)
@@ -511,12 +567,12 @@ def mock_file_content(content):
class TestIsIPv6Enabled(test_base.BaseTestCase):
def setUp(self):
super().setUp()
def reset_detection_flag():
netutils._IS_IPV6_ENABLED = None
reset_detection_flag()
self.addCleanup(reset_detection_flag)
@@ -533,8 +589,7 @@ class TestIsIPv6Enabled(test_base.BaseTestCase):
self.assertFalse(enabled)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('builtins.open',
side_effect=AssertionError('should not read'))
@mock.patch('builtins.open', side_effect=AssertionError('should not read'))
def test_disabled_non_exists(self, mock_open, exists):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
@@ -543,15 +598,17 @@ class TestIsIPv6Enabled(test_base.BaseTestCase):
def test_memoize_enabled(self, exists):
# Reset the flag to appear that we haven't looked for it yet.
netutils._IS_IPV6_ENABLED = None
with mock.patch('builtins.open',
return_value=mock_file_content('0')) as mock_open:
with mock.patch(
'builtins.open', return_value=mock_file_content('0')
) as mock_open:
enabled = netutils.is_ipv6_enabled()
self.assertTrue(mock_open.called)
self.assertTrue(netutils._IS_IPV6_ENABLED)
self.assertTrue(enabled)
# The second call should not use open again
with mock.patch('builtins.open',
side_effect=AssertionError('should not be called')):
with mock.patch(
'builtins.open', side_effect=AssertionError('should not be called')
):
enabled = netutils.is_ipv6_enabled()
self.assertTrue(enabled)
@@ -559,19 +616,18 @@ class TestIsIPv6Enabled(test_base.BaseTestCase):
def test_memoize_disabled(self, exists):
# Reset the flag to appear that we haven't looked for it yet.
netutils._IS_IPV6_ENABLED = None
with mock.patch('builtins.open',
return_value=mock_file_content('1')):
with mock.patch('builtins.open', return_value=mock_file_content('1')):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
# The second call should not use open again
with mock.patch('builtins.open',
side_effect=AssertionError('should not be called')):
with mock.patch(
'builtins.open', side_effect=AssertionError('should not be called')
):
enabled = netutils.is_ipv6_enabled()
self.assertFalse(enabled)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('builtins.open',
side_effect=AssertionError('should not read'))
@mock.patch('builtins.open', side_effect=AssertionError('should not read'))
def test_memoize_not_exists(self, mock_open, exists):
# Reset the flag to appear that we haven't looked for it yet.
netutils._IS_IPV6_ENABLED = None

View File

@@ -20,12 +20,14 @@ from oslo_utils import reflection
RUNTIME_ERROR_CLASSES = [
'RuntimeError', 'Exception', 'BaseException', 'object',
'RuntimeError',
'Exception',
'BaseException',
'object',
]
def dummy_decorator(f):
@functools.wraps(f)
def wrapper(*args, **kwargs):
return f(*args, **kwargs)
@@ -54,7 +56,6 @@ class TestObject:
class Class:
def method(self, c, d):
pass
@@ -110,7 +111,6 @@ class MemberGetTest(test_base.BaseTestCase):
class CallbackEqualityTest(test_base.BaseTestCase):
def test_different_simple_callbacks(self):
def a():
pass
@@ -120,9 +120,7 @@ class CallbackEqualityTest(test_base.BaseTestCase):
self.assertFalse(reflection.is_same_callback(a, b))
def test_static_instance_callbacks(self):
class A:
@staticmethod
def b(a, b, c):
pass
@@ -133,7 +131,6 @@ class CallbackEqualityTest(test_base.BaseTestCase):
self.assertTrue(reflection.is_same_callback(a.b, b.b))
def test_different_instance_callbacks(self):
class A:
def b(self):
pass
@@ -161,7 +158,6 @@ class BoundMethodTest(test_base.BaseTestCase):
class GetCallableNameTest(test_base.BaseTestCase):
def test_mere_function(self):
name = reflection.get_callable_name(mere_function)
self.assertEqual('.'.join((__name__, 'mere_function')), name)
@@ -192,8 +188,9 @@ class GetCallableNameTest(test_base.BaseTestCase):
def test_callable_class_call(self):
name = reflection.get_callable_name(CallableClass().__call__)
self.assertEqual('.'.join((__name__, 'CallableClass',
'__call__')), name)
self.assertEqual(
'.'.join((__name__, 'CallableClass', '__call__')), name
)
class GetCallableNameTestExtended(test_base.BaseTestCase):
@@ -206,35 +203,47 @@ class GetCallableNameTestExtended(test_base.BaseTestCase):
def test_inner_callable_class(self):
obj = self.InnerCallableClass()
name = reflection.get_callable_name(obj.__call__)
expected_name = '.'.join((__name__, 'GetCallableNameTestExtended',
'InnerCallableClass', '__call__'))
expected_name = '.'.join(
(
__name__,
'GetCallableNameTestExtended',
'InnerCallableClass',
'__call__',
)
)
self.assertEqual(expected_name, name)
def test_inner_callable_function(self):
def a():
def b():
pass
return b
name = reflection.get_callable_name(a())
expected_name = '.'.join((__name__, 'GetCallableNameTestExtended',
'test_inner_callable_function', '<locals>',
'a', '<locals>', 'b'))
expected_name = '.'.join(
(
__name__,
'GetCallableNameTestExtended',
'test_inner_callable_function',
'<locals>',
'a',
'<locals>',
'b',
)
)
self.assertEqual(expected_name, name)
def test_inner_class(self):
obj = self.InnerCallableClass()
name = reflection.get_callable_name(obj)
expected_name = '.'.join((__name__,
'GetCallableNameTestExtended',
'InnerCallableClass'))
expected_name = '.'.join(
(__name__, 'GetCallableNameTestExtended', 'InnerCallableClass')
)
self.assertEqual(expected_name, name)
class GetCallableArgsTest(test_base.BaseTestCase):
def test_mere_function(self):
result = reflection.get_callable_args(mere_function)
self.assertEqual(['a', 'b'], result)
@@ -244,8 +253,9 @@ class GetCallableArgsTest(test_base.BaseTestCase):
self.assertEqual(['a', 'b', 'optional'], result)
def test_required_only(self):
result = reflection.get_callable_args(function_with_defs,
required_only=True)
result = reflection.get_callable_args(
function_with_defs, required_only=True
)
self.assertEqual(['a', 'b'], result)
def test_method(self):
@@ -272,12 +282,12 @@ class GetCallableArgsTest(test_base.BaseTestCase):
@dummy_decorator
def special_fun(x, y):
pass
result = reflection.get_callable_args(special_fun)
self.assertEqual(['x', 'y'], result)
class AcceptsKwargsTest(test_base.BaseTestCase):
def test_no_kwargs(self):
self.assertEqual(False, reflection.accepts_kwargs(mere_function))
@@ -286,7 +296,6 @@ class AcceptsKwargsTest(test_base.BaseTestCase):
class GetClassNameTest(test_base.BaseTestCase):
def test_std_exception(self):
name = reflection.get_class_name(RuntimeError)
self.assertEqual('RuntimeError', name)
@@ -312,36 +321,37 @@ class GetClassNameTest(test_base.BaseTestCase):
def test_class_method(self):
name = reflection.get_class_name(Class.class_method)
self.assertEqual('%s.Class' % __name__, name)
self.assertEqual(f'{__name__}.Class', name)
# test with fully_qualified=False
name = reflection.get_class_name(Class.class_method,
fully_qualified=False)
name = reflection.get_class_name(
Class.class_method, fully_qualified=False
)
self.assertEqual('Class', name)
def test_static_method(self):
self.assertRaises(TypeError, reflection.get_class_name,
Class.static_method)
self.assertRaises(
TypeError, reflection.get_class_name, Class.static_method
)
def test_unbound_method(self):
self.assertRaises(TypeError, reflection.get_class_name,
mere_function)
self.assertRaises(TypeError, reflection.get_class_name, mere_function)
def test_bound_method(self):
c = Class()
name = reflection.get_class_name(c.method)
self.assertEqual('%s.Class' % __name__, name)
self.assertEqual(f'{__name__}.Class', name)
# test with fully_qualified=False
name = reflection.get_class_name(c.method, fully_qualified=False)
self.assertEqual('Class', name)
class GetAllClassNamesTest(test_base.BaseTestCase):
def test_std_class(self):
names = list(reflection.get_all_class_names(RuntimeError))
self.assertEqual(RUNTIME_ERROR_CLASSES, names)
def test_std_class_up_to(self):
names = list(reflection.get_all_class_names(RuntimeError,
up_to=Exception))
names = list(
reflection.get_all_class_names(RuntimeError, up_to=Exception)
)
self.assertEqual(RUNTIME_ERROR_CLASSES[:-2], names)

View File

@@ -21,11 +21,10 @@ import testscenarios
from oslo_utils import secretutils
class SecretUtilsTest(testscenarios.TestWithScenarios,
test_base.BaseTestCase):
_gen_digest = lambda text: hmac.new(b'foo', text.encode('utf-8'),
digestmod=hashlib.sha1).digest()
class SecretUtilsTest(testscenarios.TestWithScenarios, test_base.BaseTestCase):
_gen_digest = lambda text: hmac.new(
b'foo', text.encode('utf-8'), digestmod=hashlib.sha1
).digest()
scenarios = [
('binary', {'converter': _gen_digest}),
('unicode', {'converter': lambda text: text}),
@@ -38,12 +37,14 @@ class SecretUtilsTest(testscenarios.TestWithScenarios,
digest = secretutils.md5(self._test_data).digest()
self.assertEqual(digest, self._md5_digest)
digest = secretutils.md5(self._test_data,
usedforsecurity=True).digest()
digest = secretutils.md5(
self._test_data, usedforsecurity=True
).digest()
self.assertEqual(digest, self._md5_digest)
digest = secretutils.md5(self._test_data,
usedforsecurity=False).digest()
digest = secretutils.md5(
self._test_data, usedforsecurity=False
).digest()
self.assertEqual(digest, self._md5_digest)
def test_md5_without_data(self):
@@ -66,17 +67,21 @@ class SecretUtilsTest(testscenarios.TestWithScenarios,
self.assertRaises(TypeError, hashlib.md5, 'foo')
self.assertRaises(TypeError, secretutils.md5, 'foo')
self.assertRaises(
TypeError, secretutils.md5, 'foo', usedforsecurity=True)
TypeError, secretutils.md5, 'foo', usedforsecurity=True
)
self.assertRaises(
TypeError, secretutils.md5, 'foo', usedforsecurity=False)
TypeError, secretutils.md5, 'foo', usedforsecurity=False
)
def test_none_data_raises_type_error(self):
self.assertRaises(TypeError, hashlib.md5, None)
self.assertRaises(TypeError, secretutils.md5, None)
self.assertRaises(
TypeError, secretutils.md5, None, usedforsecurity=True)
TypeError, secretutils.md5, None, usedforsecurity=True
)
self.assertRaises(
TypeError, secretutils.md5, None, usedforsecurity=False)
TypeError, secretutils.md5, None, usedforsecurity=False
)
def test_password_mksalt(self):
self.assertRaises(ValueError, secretutils.crypt_mksalt, 'MD5')
@@ -90,8 +95,10 @@ class SecretUtilsTest(testscenarios.TestWithScenarios,
def test_password_crypt(self):
self.assertEqual(
'$5$mysalt$fcnMdhaFpUmeWtGOgVuImueZGL1v0Q1kUVbV2NbFOX4',
secretutils.crypt_password('mytopsecret', '$5$mysalt$'))
secretutils.crypt_password('mytopsecret', '$5$mysalt$'),
)
self.assertEqual(
'$6$mysalt$jTEJ24XtvcWmav/sTQb1tYqmk1kBQD/sxcMIxEPUcie'
'J8L9AuCTWxYlxGz.XtIQYWspWkUXQz9zPIFTSKubP6.',
secretutils.crypt_password('mytopsecret', '$6$mysalt$'))
secretutils.crypt_password('mytopsecret', '$6$mysalt$'),
)

View File

@@ -21,609 +21,451 @@ class SpecsMatcherTestCase(test_base.BaseTestCase):
assertion(specs_matcher.match(value, req))
def test_specs_matches_simple(self):
self._do_specs_matcher_test(
value='1',
req='1',
matches=True)
self._do_specs_matcher_test(value='1', req='1', matches=True)
def test_specs_fails_string_vs_int(self):
# With no operator specified it is a string comparison test, therefore
# '1' does not equal '01'
self._do_specs_matcher_test(
value='01',
req='1',
matches=False)
self._do_specs_matcher_test(value='01', req='1', matches=False)
def test_specs_match_int_leading_zero(self):
# Show that numerical comparison works with leading zero
self._do_specs_matcher_test(
value='01',
req='== 1',
matches=True)
self._do_specs_matcher_test(value='01', req='== 1', matches=True)
def test_specs_fails_simple(self):
self._do_specs_matcher_test(
value='',
req='1',
matches=False)
self._do_specs_matcher_test(value='', req='1', matches=False)
def test_specs_fails_simple2(self):
self._do_specs_matcher_test(
value='3',
req='1',
matches=False)
self._do_specs_matcher_test(value='3', req='1', matches=False)
def test_specs_fails_simple3(self):
self._do_specs_matcher_test(
value='222',
req='2',
matches=False)
self._do_specs_matcher_test(value='222', req='2', matches=False)
def test_specs_fails_with_bogus_ops(self):
self._do_specs_matcher_test(
value='4',
req='! 2',
matches=False)
self._do_specs_matcher_test(value='4', req='! 2', matches=False)
def test_specs_matches_with_op_eq(self):
self._do_specs_matcher_test(
value='123',
req='= 123',
matches=True)
self._do_specs_matcher_test(value='123', req='= 123', matches=True)
def test_specs_matches_with_op_eq2(self):
self._do_specs_matcher_test(
value='124',
req='= 123',
matches=True)
self._do_specs_matcher_test(value='124', req='= 123', matches=True)
def test_specs_fails_with_op_eq(self):
self._do_specs_matcher_test(
value='34',
req='= 234',
matches=False)
self._do_specs_matcher_test(value='34', req='= 234', matches=False)
def test_specs_fails_with_op_eq3(self):
self._do_specs_matcher_test(
value='34',
req='=',
matches=False)
self._do_specs_matcher_test(value='34', req='=', matches=False)
def test_specs_matches_with_op_seq(self):
self._do_specs_matcher_test(
value='123',
req='s== 123',
matches=True)
self._do_specs_matcher_test(value='123', req='s== 123', matches=True)
def test_specs_fails_with_op_seq(self):
self._do_specs_matcher_test(
value='1234',
req='s== 123',
matches=False)
self._do_specs_matcher_test(value='1234', req='s== 123', matches=False)
def test_specs_matches_with_op_sneq(self):
self._do_specs_matcher_test(
value='1234',
req='s!= 123',
matches=True)
self._do_specs_matcher_test(value='1234', req='s!= 123', matches=True)
def test_specs_fails_with_op_sneq(self):
self._do_specs_matcher_test(
value='123',
req='s!= 123',
matches=False)
self._do_specs_matcher_test(value='123', req='s!= 123', matches=False)
def test_specs_matches_with_op_sge(self):
self._do_specs_matcher_test(
value='234',
req='s>= 1000',
matches=True)
self._do_specs_matcher_test(value='234', req='s>= 1000', matches=True)
def test_specs_matches_with_op_sge2(self):
self._do_specs_matcher_test(
value='234',
req='s>= 234',
matches=True)
self._do_specs_matcher_test(value='234', req='s>= 234', matches=True)
def test_specs_fails_with_op_sge(self):
self._do_specs_matcher_test(
value='1000',
req='s>= 234',
matches=False)
self._do_specs_matcher_test(value='1000', req='s>= 234', matches=False)
def test_specs_matches_with_op_sle(self):
self._do_specs_matcher_test(
value='1000',
req='s<= 1234',
matches=True)
self._do_specs_matcher_test(value='1000', req='s<= 1234', matches=True)
def test_specs_matches_with_op_sle2(self):
self._do_specs_matcher_test(
value='1234',
req='s<= 1234',
matches=True)
self._do_specs_matcher_test(value='1234', req='s<= 1234', matches=True)
def test_specs_fails_with_op_sle(self):
self._do_specs_matcher_test(
value='1234',
req='s<= 1000',
matches=False)
value='1234', req='s<= 1000', matches=False
)
def test_specs_matches_with_op_sl(self):
self._do_specs_matcher_test(
value='12',
req='s< 2',
matches=True)
self._do_specs_matcher_test(value='12', req='s< 2', matches=True)
def test_specs_fails_with_op_sl(self):
self._do_specs_matcher_test(
value='2',
req='s< 12',
matches=False)
self._do_specs_matcher_test(value='2', req='s< 12', matches=False)
def test_specs_fails_with_op_sl2(self):
self._do_specs_matcher_test(
value='12',
req='s< 12',
matches=False)
self._do_specs_matcher_test(value='12', req='s< 12', matches=False)
def test_specs_matches_with_op_sg(self):
self._do_specs_matcher_test(
value='2',
req='s> 12',
matches=True)
self._do_specs_matcher_test(value='2', req='s> 12', matches=True)
def test_specs_fails_with_op_sg(self):
self._do_specs_matcher_test(
value='12',
req='s> 2',
matches=False)
self._do_specs_matcher_test(value='12', req='s> 2', matches=False)
def test_specs_fails_with_op_sg2(self):
self._do_specs_matcher_test(
value='12',
req='s> 12',
matches=False)
self._do_specs_matcher_test(value='12', req='s> 12', matches=False)
def test_specs_matches_with_op_in(self):
self._do_specs_matcher_test(
value='12311321',
req='<in> 11',
matches=True)
value='12311321', req='<in> 11', matches=True
)
def test_specs_matches_with_op_in2(self):
self._do_specs_matcher_test(
value='12311321',
req='<in> 12311321',
matches=True)
value='12311321', req='<in> 12311321', matches=True
)
def test_specs_matches_with_op_in3(self):
self._do_specs_matcher_test(
value='12311321',
req='<in> 12311321 <in>',
matches=True)
value='12311321', req='<in> 12311321 <in>', matches=True
)
def test_specs_fails_with_op_in(self):
self._do_specs_matcher_test(
value='12310321',
req='<in> 11',
matches=False)
value='12310321', req='<in> 11', matches=False
)
def test_specs_fails_with_op_in2(self):
self._do_specs_matcher_test(
value='12310321',
req='<in> 11 <in>',
matches=False)
value='12310321', req='<in> 11 <in>', matches=False
)
def test_specs_matches_with_op_or(self):
self._do_specs_matcher_test(
value='12',
req='<or> 11 <or> 12',
matches=True)
value='12', req='<or> 11 <or> 12', matches=True
)
def test_specs_matches_with_op_or2(self):
self._do_specs_matcher_test(
value='12',
req='<or> 11 <or> 12 <or>',
matches=True)
value='12', req='<or> 11 <or> 12 <or>', matches=True
)
def test_specs_matches_with_op_or3(self):
self._do_specs_matcher_test(
value='12',
req='<or> 12',
matches=True)
self._do_specs_matcher_test(value='12', req='<or> 12', matches=True)
def test_specs_fails_with_op_or(self):
self._do_specs_matcher_test(
value='13',
req='<or> 11 <or> 12',
matches=False)
value='13', req='<or> 11 <or> 12', matches=False
)
def test_specs_fails_with_op_or2(self):
self._do_specs_matcher_test(
value='13',
req='<or> 11 <or> 12 <or>',
matches=False)
value='13', req='<or> 11 <or> 12 <or>', matches=False
)
def test_specs_fails_with_op_or3(self):
self._do_specs_matcher_test(
value='13',
req='<or> 11',
matches=False)
self._do_specs_matcher_test(value='13', req='<or> 11', matches=False)
def test_specs_matches_with_op_le(self):
self._do_specs_matcher_test(
value='2',
req='<= 10',
matches=True)
self._do_specs_matcher_test(value='2', req='<= 10', matches=True)
def test_specs_matches_with_op_le2(self):
self._do_specs_matcher_test(
value='10',
req='<= 10',
matches=True)
self._do_specs_matcher_test(value='10', req='<= 10', matches=True)
def test_specs_fails_with_op_le(self):
self._do_specs_matcher_test(
value='3',
req='<= 2',
matches=False)
self._do_specs_matcher_test(value='3', req='<= 2', matches=False)
def test_specs_matches_with_op_ge(self):
self._do_specs_matcher_test(
value='3',
req='>= 1',
matches=True)
self._do_specs_matcher_test(value='3', req='>= 1', matches=True)
def test_specs_matches_with_op_ge2(self):
self._do_specs_matcher_test(
value='3.0',
req='>= 3',
matches=True)
self._do_specs_matcher_test(value='3.0', req='>= 3', matches=True)
def test_specs_matches_with_op_g(self):
self._do_specs_matcher_test(
value='3',
req='> 1',
matches=True)
self._do_specs_matcher_test(value='3', req='> 1', matches=True)
def test_specs_matches_with_op_g2(self):
self._do_specs_matcher_test(
value='3',
req='> 3',
matches=False)
self._do_specs_matcher_test(value='3', req='> 3', matches=False)
def test_specs_matches_with_op_g3(self):
self._do_specs_matcher_test(
value='3.0',
req='> 2',
matches=True)
self._do_specs_matcher_test(value='3.0', req='> 2', matches=True)
def test_specs_matches_with_op_l(self):
self._do_specs_matcher_test(
value='3',
req='< 5',
matches=True)
self._do_specs_matcher_test(value='3', req='< 5', matches=True)
def test_specs_matches_with_op_l2(self):
self._do_specs_matcher_test(
value='3',
req='< 3',
matches=False)
self._do_specs_matcher_test(value='3', req='< 3', matches=False)
def test_specs_matches_with_op_l3(self):
self._do_specs_matcher_test(
value='1.0',
req='< 6',
matches=True)
self._do_specs_matcher_test(value='1.0', req='< 6', matches=True)
def test_specs_fails_with_op_ge(self):
self._do_specs_matcher_test(
value='2',
req='>= 3',
matches=False)
self._do_specs_matcher_test(value='2', req='>= 3', matches=False)
def test_specs_matches_with_op_ne(self):
self._do_specs_matcher_test(
value='3.2',
req='!= 3.1',
matches=True)
self._do_specs_matcher_test(value='3.2', req='!= 3.1', matches=True)
def test_specs_fails_with_op_ne(self):
self._do_specs_matcher_test(
value='3.2',
req='!= 3.2',
matches=False)
self._do_specs_matcher_test(value='3.2', req='!= 3.2', matches=False)
def test_specs_matches_with_op_eqeq(self):
self._do_specs_matcher_test(
value='3',
req='== 3',
matches=True)
self._do_specs_matcher_test(value='3', req='== 3', matches=True)
def test_specs_matches_with_op_eqeq2(self):
self._do_specs_matcher_test(
value='3.0',
req='== 3',
matches=True)
self._do_specs_matcher_test(value='3.0', req='== 3', matches=True)
def test_specs_fails_with_op_eqeq(self):
self._do_specs_matcher_test(
value='3.0',
req='== 3.1',
matches=False)
self._do_specs_matcher_test(value='3.0', req='== 3.1', matches=False)
def test_specs_matches_all_with_op_allin(self):
self._do_specs_matcher_test(
value=str(['aes', 'mmx', 'aux']),
req='<all-in> aes mmx',
matches=True)
matches=True,
)
def test_specs_matches_one_with_op_allin(self):
self._do_specs_matcher_test(
value=str(['aes', 'mmx', 'aux']),
req='<all-in> mmx',
matches=True)
value=str(['aes', 'mmx', 'aux']), req='<all-in> mmx', matches=True
)
def test_specs_fails_with_op_allin(self):
self._do_specs_matcher_test(
value=str(['aes', 'mmx', 'aux']),
req='<all-in> txt',
matches=False)
matches=False,
)
def test_specs_fails_all_with_op_allin(self):
self._do_specs_matcher_test(
value=str(['aes', 'mmx', 'aux']),
req='<all-in> txt 3dnow',
matches=False)
matches=False,
)
def test_specs_fails_match_one_with_op_allin(self):
self._do_specs_matcher_test(
value=str(['aes', 'mmx', 'aux']),
req='<all-in> txt aes',
matches=False)
matches=False,
)
def test_specs_fails_match_substr_single(self):
self._do_specs_matcher_test(
value=str(['X_X']),
req='<all-in> _',
matches=False)
value=str(['X_X']), req='<all-in> _', matches=False
)
def test_specs_fails_match_substr(self):
self._do_specs_matcher_test(
value=str(['X___X']),
req='<all-in> ___',
matches=False)
value=str(['X___X']), req='<all-in> ___', matches=False
)
def test_specs_fails_match_substr_reversed(self):
self._do_specs_matcher_test(
value=str(['aes', 'mmx', 'aux']),
req='<all-in> XaesX',
matches=False)
matches=False,
)
def test_specs_fails_onechar_with_op_allin(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value=str(['aes', 'mmx', 'aux']),
req='<all-in> e')
req='<all-in> e',
)
def test_specs_errors_list_with_op_allin(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value=['aes', 'mmx', 'aux'],
req='<all-in> aes')
req='<all-in> aes',
)
def test_specs_errors_str_with_op_allin(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value='aes',
req='<all-in> aes')
TypeError, specs_matcher.match, value='aes', req='<all-in> aes'
)
def test_specs_errors_dict_literal_with_op_allin(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value=str({'aes': 1}),
req='<all-in> aes')
req='<all-in> aes',
)
def test_specs_errors_bad_literal_with_op_allin(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="^&*($",
req='<all-in> aes')
TypeError, specs_matcher.match, value="^&*($", req='<all-in> aes'
)
def test_specs_fails_not_enough_args_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> [ 10 ]')
TypeError, specs_matcher.match, value="23", req='<range-in> [ 10 ]'
)
def test_specs_fails_no_brackets_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> 10 20')
TypeError, specs_matcher.match, value="23", req='<range-in> 10 20'
)
def test_specs_fails_no_opening_bracket_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> 10 20 ]')
req='<range-in> 10 20 ]',
)
def test_specs_fails_no_closing_bracket_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> [ 10 20')
req='<range-in> [ 10 20',
)
def test_specs_fails_invalid_brackets_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> { 10 20 }')
req='<range-in> { 10 20 }',
)
def test_specs_fails_not_opening_brackets_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> ) 10 20 )')
req='<range-in> ) 10 20 )',
)
def test_specs_fails_not_closing_brackets_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> ( 10 20 (')
req='<range-in> ( 10 20 (',
)
def test_specs_fails_reverse_brackets_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> ) 10 20 (')
req='<range-in> ) 10 20 (',
)
def test_specs_fails_too_many_args_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> [ 10 20 30 ]')
req='<range-in> [ 10 20 30 ]',
)
def test_specs_fails_bad_limits_with_op_rangein(self):
self.assertRaises(
TypeError,
specs_matcher.match,
value="23",
req='<range-in> [ 20 10 ]')
req='<range-in> [ 20 10 ]',
)
def test_specs_fails_match_beyond_scope_with_op_rangein_le(self):
self._do_specs_matcher_test(
matches=False,
value="23",
req='<range-in> [ 10 20 ]')
matches=False, value="23", req='<range-in> [ 10 20 ]'
)
def test_specs_fails_match_beyond_scope_with_op_rangein_lt(self):
self._do_specs_matcher_test(
matches=False,
value="23",
req='<range-in> [ 10 20 )')
matches=False, value="23", req='<range-in> [ 10 20 )'
)
def test_specs_fails_match_under_scope_with_op_rangein_ge(self):
self._do_specs_matcher_test(
matches=False,
value="5",
req='<range-in> [ 10 20 ]')
matches=False, value="5", req='<range-in> [ 10 20 ]'
)
def test_specs_fails_match_under_scope_with_op_rangein_gt(self):
self._do_specs_matcher_test(
matches=False,
value="5",
req='<range-in> ( 10 20 ]')
matches=False, value="5", req='<range-in> ( 10 20 ]'
)
def test_specs_fails_match_float_beyond_scope_with_op_rangein_le(self):
self._do_specs_matcher_test(
matches=False,
value="20.3",
req='<range-in> [ 10.1 20.2 ]')
matches=False, value="20.3", req='<range-in> [ 10.1 20.2 ]'
)
def test_specs_fails_match_float_beyond_scope_with_op_rangein_lt(self):
self._do_specs_matcher_test(
matches=False,
value="20.3",
req='<range-in> [ 10.1 20.2 )')
matches=False, value="20.3", req='<range-in> [ 10.1 20.2 )'
)
def test_specs_fails_match_float_under_scope_with_op_rangein_ge(self):
self._do_specs_matcher_test(
matches=False,
value="5.0",
req='<range-in> [ 5.1 20.2 ]')
matches=False, value="5.0", req='<range-in> [ 5.1 20.2 ]'
)
def test_specs_fails_match_float_under_scope_with_op_rangein_gt(self):
self._do_specs_matcher_test(
matches=False,
value="5.0",
req='<range-in> ( 5.1 20.2 ]')
matches=False, value="5.0", req='<range-in> ( 5.1 20.2 ]'
)
def test_specs_matches_int_lower_int_range_with_op_rangein_ge(self):
self._do_specs_matcher_test(
matches=True,
value="10",
req='<range-in> [ 10 20 ]')
matches=True, value="10", req='<range-in> [ 10 20 ]'
)
def test_specs_fails_matchesint_lower_int_range_with_op_rangein_gt(self):
self._do_specs_matcher_test(
matches=False,
value="10",
req='<range-in> ( 10 20 ]')
matches=False, value="10", req='<range-in> ( 10 20 ]'
)
def test_specs_matches_float_lower_float_range_with_op_rangein_ge(self):
self._do_specs_matcher_test(
matches=True,
value="10.1",
req='<range-in> [ 10.1 20 ]')
matches=True, value="10.1", req='<range-in> [ 10.1 20 ]'
)
def test_specs_fails_matche_float_lower_float_range_with_op_rangein_gt(
self):
self,
):
self._do_specs_matcher_test(
matches=False,
value="10.1",
req='<range-in> ( 10.1 20 ]')
matches=False, value="10.1", req='<range-in> ( 10.1 20 ]'
)
def test_specs_matches_int_with_int_range_with_op_rangein(self):
self._do_specs_matcher_test(
matches=True,
value="15",
req='<range-in> [ 10 20 ]')
matches=True, value="15", req='<range-in> [ 10 20 ]'
)
def test_specs_matches_float_with_int_limit_with_op_rangein(self):
self._do_specs_matcher_test(
matches=True,
value="15.5",
req='<range-in> [ 10 20 ]')
matches=True, value="15.5", req='<range-in> [ 10 20 ]'
)
def test_specs_matches_int_upper_int_range_with_op_rangein(self):
self._do_specs_matcher_test(
matches=True,
value="20",
req='<range-in> [ 10 20 ]')
matches=True, value="20", req='<range-in> [ 10 20 ]'
)
def test_specs_fails_matche_int_upper_int_range_with_op_rangein_lt(self):
self._do_specs_matcher_test(
matches=False,
value="20",
req='<range-in> [ 10 20 )')
matches=False, value="20", req='<range-in> [ 10 20 )'
)
def test_specs_matches_float_upper_mixed_range_with_op_rangein(self):
self._do_specs_matcher_test(
matches=True,
value="20.5",
req='<range-in> [ 10 20.5 ]')
matches=True, value="20.5", req='<range-in> [ 10 20.5 ]'
)
def test_specs_fails_matche_float_upper_mixed_range_with_op_rangein_lt(
self):
self,
):
self._do_specs_matcher_test(
matches=False,
value="20.5",
req='<range-in> [ 10 20.5 )')
matches=False, value="20.5", req='<range-in> [ 10 20.5 )'
)
def test_specs_matches_float_with_float_limit_with_op_rangein(self):
self._do_specs_matcher_test(
matches=True,
value="12.5",
req='<range-in> [ 10.1 20.1 ]')
matches=True, value="12.5", req='<range-in> [ 10.1 20.1 ]'
)
def test_specs_matches_only_one_with_op_rangein(self):
self._do_specs_matcher_test(
matches=True,
value="10.1",
req='<range-in> [ 10.1 10.1 ]')
matches=True, value="10.1", req='<range-in> [ 10.1 10.1 ]'
)

View File

@@ -29,7 +29,6 @@ load_tests = testscenarios.load_tests_apply_scenarios
class StrUtilsTest(test_base.BaseTestCase):
def test_bool_bool_from_string(self):
self.assertTrue(strutils.bool_from_string(True))
self.assertFalse(strutils.bool_from_string(False))
@@ -59,8 +58,9 @@ class StrUtilsTest(test_base.BaseTestCase):
self.assertFalse(strutils.bool_from_string(c('no')))
self.assertFalse(strutils.bool_from_string(c('0')))
self.assertFalse(strutils.bool_from_string(c('42')))
self.assertFalse(strutils.bool_from_string(c(
'This should not be True')))
self.assertFalse(
strutils.bool_from_string(c('This should not be True'))
)
self.assertFalse(strutils.bool_from_string(c('F')))
self.assertFalse(strutils.bool_from_string(c('f')))
self.assertFalse(strutils.bool_from_string(c('N')))
@@ -79,11 +79,14 @@ class StrUtilsTest(test_base.BaseTestCase):
self._test_bool_from_string(str)
self.assertFalse(strutils.bool_from_string('使用', strict=False))
exc = self.assertRaises(ValueError, strutils.bool_from_string,
'使用', strict=True)
expected_msg = ("Unrecognized value '使用', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'")
exc = self.assertRaises(
ValueError, strutils.bool_from_string, '使用', strict=True
)
expected_msg = (
"Unrecognized value '使用', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'"
)
self.assertEqual(expected_msg, str(exc))
def test_other_bool_from_string(self):
@@ -99,28 +102,37 @@ class StrUtilsTest(test_base.BaseTestCase):
def test_strict_bool_from_string(self):
# None isn't allowed in strict mode
exc = self.assertRaises(ValueError, strutils.bool_from_string, None,
strict=True)
expected_msg = ("Unrecognized value 'None', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'")
exc = self.assertRaises(
ValueError, strutils.bool_from_string, None, strict=True
)
expected_msg = (
"Unrecognized value 'None', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'"
)
self.assertEqual(expected_msg, str(exc))
# Unrecognized strings aren't allowed
self.assertFalse(strutils.bool_from_string('Other', strict=False))
exc = self.assertRaises(ValueError, strutils.bool_from_string, 'Other',
strict=True)
expected_msg = ("Unrecognized value 'Other', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'")
exc = self.assertRaises(
ValueError, strutils.bool_from_string, 'Other', strict=True
)
expected_msg = (
"Unrecognized value 'Other', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'"
)
self.assertEqual(expected_msg, str(exc))
# Unrecognized numbers aren't allowed
exc = self.assertRaises(ValueError, strutils.bool_from_string, 2,
strict=True)
expected_msg = ("Unrecognized value '2', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'")
exc = self.assertRaises(
ValueError, strutils.bool_from_string, 2, strict=True
)
expected_msg = (
"Unrecognized value '2', acceptable values are:"
" '0', '1', 'f', 'false', 'n', 'no', 'off', 'on',"
" 't', 'true', 'y', 'yes'"
)
self.assertEqual(expected_msg, str(exc))
# False-like values are allowed
@@ -136,8 +148,9 @@ class StrUtilsTest(test_base.BaseTestCase):
# Avoid font-similarity issues (one looks like lowercase-el, zero like
# oh, etc...)
for char in ('O', 'o', 'L', 'l', 'I', 'i'):
self.assertRaises(ValueError, strutils.bool_from_string, char,
strict=True)
self.assertRaises(
ValueError, strutils.bool_from_string, char, strict=True
)
def test_int_from_bool_as_string(self):
self.assertEqual(1, strutils.int_from_bool_as_string(True))
@@ -163,20 +176,16 @@ class StrUtilsTest(test_base.BaseTestCase):
self.assertRaises(TypeError, to_slug, True)
self.assertEqual("hello", to_slug("hello"))
self.assertEqual("two-words", to_slug("Two Words"))
self.assertEqual("ma-any-spa-ce-es",
to_slug("Ma-any\t spa--ce- es"))
self.assertEqual("ma-any-spa-ce-es", to_slug("Ma-any\t spa--ce- es"))
self.assertEqual("excamation", to_slug("exc!amation!"))
self.assertEqual("ampserand", to_slug("&ampser$and"))
self.assertEqual("ju5tnum8er", to_slug("ju5tnum8er"))
self.assertEqual("strip-", to_slug(" strip - "))
self.assertEqual("perche",
to_slug(b"perch\xc3\xa9"))
self.assertEqual("strange",
to_slug("\x80strange", errors="ignore"))
self.assertEqual("perche", to_slug(b"perch\xc3\xa9"))
self.assertEqual("strange", to_slug("\x80strange", errors="ignore"))
class StringToBytesTest(test_base.BaseTestCase):
_unit_system = [
('si', dict(unit_system='SI')),
('iec', dict(unit_system='IEC')),
@@ -240,17 +249,18 @@ class StringToBytesTest(test_base.BaseTestCase):
@classmethod
def generate_scenarios(cls):
cls.scenarios = testscenarios.multiply_scenarios(cls._unit_system,
cls._sign,
cls._magnitude,
cls._unit_prefix,
cls._unit_suffix,
cls._return_int)
cls.scenarios = testscenarios.multiply_scenarios(
cls._unit_system,
cls._sign,
cls._magnitude,
cls._unit_prefix,
cls._unit_suffix,
cls._return_int,
)
def test_string_to_bytes(self):
def _get_quantity(sign, magnitude, unit_suffix):
res = float('{}{}'.format(sign, magnitude))
res = float(f'{sign}{magnitude}')
if unit_suffix in ['b', 'bit']:
res /= 8
return res
@@ -264,7 +274,7 @@ class StringToBytesTest(test_base.BaseTestCase):
if unit_prefix.endswith('i'):
res = getattr(units, unit_prefix)
else:
res = getattr(units, '%si' % unit_prefix)
res = getattr(units, f'{unit_prefix}i')
elif unit_system == 'mixed':
# Note: this will return 'i' units as power-of-two,
# and other units as power-of-ten. Additionally, for
@@ -275,21 +285,28 @@ class StringToBytesTest(test_base.BaseTestCase):
res = getattr(units, unit_prefix)
return res
text = ''.join([self.sign, self.magnitude, self.unit_prefix,
self.unit_suffix])
err_si = self.unit_system == 'SI' and (self.unit_prefix == 'K' or
self.unit_prefix.endswith('i'))
text = ''.join(
[self.sign, self.magnitude, self.unit_prefix, self.unit_suffix]
)
err_si = self.unit_system == 'SI' and (
self.unit_prefix == 'K' or self.unit_prefix.endswith('i')
)
err_iec = self.unit_system == 'IEC' and self.unit_prefix == 'k'
if getattr(self, 'assert_error', False) or err_si or err_iec:
self.assertRaises(ValueError, strutils.string_to_bytes,
text, unit_system=self.unit_system,
return_int=self.return_int)
self.assertRaises(
ValueError,
strutils.string_to_bytes,
text,
unit_system=self.unit_system,
return_int=self.return_int,
)
return
quantity = _get_quantity(self.sign, self.magnitude, self.unit_suffix)
constant = _get_constant(self.unit_prefix, self.unit_system)
expected = quantity * constant
actual = strutils.string_to_bytes(text, unit_system=self.unit_system,
return_int=self.return_int)
actual = strutils.string_to_bytes(
text, unit_system=self.unit_system, return_int=self.return_int
)
if self.return_int:
self.assertEqual(actual, int(math.ceil(expected)))
else:
@@ -300,7 +317,6 @@ StringToBytesTest.generate_scenarios()
class MaskPasswordTestCase(test_base.BaseTestCase):
def test_namespace_objects(self):
payload = """
Namespace(passcode='', username='', password='my"password',
@@ -313,7 +329,6 @@ class MaskPasswordTestCase(test_base.BaseTestCase):
self.assertEqual(expected, strutils.mask_password(payload))
def test_sanitize_keys(self):
lowered = [k.lower() for k in strutils._SANITIZE_KEYS]
message = "The _SANITIZE_KEYS must all be lowercase."
self.assertEqual(strutils._SANITIZE_KEYS, lowered, message)
@@ -574,53 +589,63 @@ class MaskPasswordTestCase(test_base.BaseTestCase):
def test_mask_password(self):
payload = "test = 'password' : 'aaaaaa'"
expected = "test = 'password' : '111'"
self.assertEqual(expected,
strutils.mask_password(payload, secret='111'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='111')
)
payload = 'mysqld --password "aaaaaa"'
expected = 'mysqld --password "****"'
self.assertEqual(expected,
strutils.mask_password(payload, secret='****'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='****')
)
payload = 'mysqld --password aaaaaa'
expected = 'mysqld --password ???'
self.assertEqual(expected,
strutils.mask_password(payload, secret='???'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='???')
)
payload = 'mysqld --password = "aaaaaa"'
expected = 'mysqld --password = "****"'
self.assertEqual(expected,
strutils.mask_password(payload, secret='****'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='****')
)
payload = "mysqld --password = 'aaaaaa'"
expected = "mysqld --password = '****'"
self.assertEqual(expected,
strutils.mask_password(payload, secret='****'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='****')
)
payload = "mysqld --password = aaaaaa"
expected = "mysqld --password = ****"
self.assertEqual(expected,
strutils.mask_password(payload, secret='****'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='****')
)
payload = "test = password = aaaaaa"
expected = "test = password = 111"
self.assertEqual(expected,
strutils.mask_password(payload, secret='111'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='111')
)
payload = "test = password= aaaaaa"
expected = "test = password= 111"
self.assertEqual(expected,
strutils.mask_password(payload, secret='111'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='111')
)
payload = "test = password =aaaaaa"
expected = "test = password =111"
self.assertEqual(expected,
strutils.mask_password(payload, secret='111'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='111')
)
payload = "test = password=aaaaaa"
expected = "test = password=111"
self.assertEqual(expected,
strutils.mask_password(payload, secret='111'))
self.assertEqual(
expected, strutils.mask_password(payload, secret='111')
)
payload = 'test = "original_password" : "aaaaaaaaa"'
expected = 'test = "original_password" : "***"'
@@ -649,50 +674,56 @@ class MaskPasswordTestCase(test_base.BaseTestCase):
expected = """{'token':'***'}"""
self.assertEqual(expected, strutils.mask_password(payload))
payload = ("test = 'node.session.auth.password','-v','TL0EfN33',"
"'nomask'")
expected = ("test = 'node.session.auth.password','-v','***',"
"'nomask'")
payload = (
"test = 'node.session.auth.password','-v','TL0EfN33','nomask'"
)
expected = "test = 'node.session.auth.password','-v','***','nomask'"
self.assertEqual(expected, strutils.mask_password(payload))
payload = ("test = 'node.session.auth.password', '--password', "
"'TL0EfN33', 'nomask'")
expected = ("test = 'node.session.auth.password', '--password', "
"'***', 'nomask'")
payload = (
"test = 'node.session.auth.password', '--password', "
"'TL0EfN33', 'nomask'"
)
expected = (
"test = 'node.session.auth.password', '--password', "
"'***', 'nomask'"
)
self.assertEqual(expected, strutils.mask_password(payload))
payload = ("test = 'node.session.auth.password', '--password', "
"'TL0EfN33'")
expected = ("test = 'node.session.auth.password', '--password', "
"'***'")
payload = (
"test = 'node.session.auth.password', '--password', 'TL0EfN33'"
)
expected = "test = 'node.session.auth.password', '--password', '***'"
self.assertEqual(expected, strutils.mask_password(payload))
payload = "test = node.session.auth.password -v TL0EfN33 nomask"
expected = "test = node.session.auth.password -v *** nomask"
self.assertEqual(expected, strutils.mask_password(payload))
payload = ("test = node.session.auth.password --password TL0EfN33 "
"nomask")
expected = ("test = node.session.auth.password --password *** "
"nomask")
payload = (
"test = node.session.auth.password --password TL0EfN33 nomask"
)
expected = "test = node.session.auth.password --password *** nomask"
self.assertEqual(expected, strutils.mask_password(payload))
payload = ("test = node.session.auth.password --password TL0EfN33")
expected = ("test = node.session.auth.password --password ***")
payload = "test = node.session.auth.password --password TL0EfN33"
expected = "test = node.session.auth.password --password ***"
self.assertEqual(expected, strutils.mask_password(payload))
payload = "test = cmd --password my\xe9\x80\x80pass"
expected = ("test = cmd --password ***")
expected = "test = cmd --password ***"
self.assertEqual(expected, strutils.mask_password(payload))
class TestMapping(collections.abc.Mapping):
"""Test class for non-dict mappings"""
def __init__(self):
super().__init__()
self.data = {'password': 'shhh',
'foo': 'bar',
}
self.data = {
'password': 'shhh',
'foo': 'bar',
}
def __getitem__(self, key):
return self.data[key]
@@ -706,121 +737,135 @@ class TestMapping(collections.abc.Mapping):
class NestedMapping(TestMapping):
"""Test class that contains an instance of TestMapping"""
def __init__(self):
super().__init__()
self.data = {'nested': TestMapping()}
class MaskDictionaryPasswordTestCase(test_base.BaseTestCase):
def test_dictionary(self):
payload = {'password': 'TL0EfN33'}
expected = {'password': '***'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'password': 'TL0Ef"N33'}
expected = {'password': '***'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'user': 'admin', 'password': 'TL0EfN33'}
expected = {'user': 'admin', 'password': '***'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'strval': 'somestring',
'dictval': {'user': 'admin', 'password': 'TL0EfN33'}}
expected = {'strval': 'somestring',
'dictval': {'user': 'admin', 'password': '***'}}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
payload = {
'strval': 'somestring',
'dictval': {'user': 'admin', 'password': 'TL0EfN33'},
}
expected = {
'strval': 'somestring',
'dictval': {'user': 'admin', 'password': '***'},
}
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'strval': '--password abc',
'dont_change': 'this is fine',
'dictval': {'user': 'admin', 'password': b'TL0EfN33'}}
expected = {'strval': '--password ***',
'dont_change': 'this is fine',
'dictval': {'user': 'admin', 'password': '***'}}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
payload = {
'strval': '--password abc',
'dont_change': 'this is fine',
'dictval': {'user': 'admin', 'password': b'TL0EfN33'},
}
expected = {
'strval': '--password ***',
'dont_change': 'this is fine',
'dictval': {'user': 'admin', 'password': '***'},
}
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'ipmi_password': 'KeDrahishvowphyecMornEm0or('}
expected = {'ipmi_password': '***'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'passwords': {'KeystoneFernetKey1': 'c5FijjS'}}
expected = {'passwords': {'KeystoneFernetKey1': '***'}}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'passwords': {'keystonecredential0': 'c5FijjS'}}
expected = {'passwords': {'keystonecredential0': '***'}}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
def test_do_no_harm(self):
payload = {}
expected = {}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
payload = {'somekey': 'somevalue',
'anotherkey': 'anothervalue'}
expected = {'somekey': 'somevalue',
'anotherkey': 'anothervalue'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
payload = {'somekey': 'somevalue', 'anotherkey': 'anothervalue'}
expected = {'somekey': 'somevalue', 'anotherkey': 'anothervalue'}
self.assertEqual(expected, strutils.mask_dict_password(payload))
def test_do_an_int(self):
payload = {}
payload[1] = 2
expected = payload.copy()
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
def test_mask_values(self):
payload = {'somekey': 'test = cmd --password my\xe9\x80\x80pass'}
expected = {'somekey': 'test = cmd --password ***'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
self.assertEqual(expected, strutils.mask_dict_password(payload))
def test_other_non_str_values(self):
payload = {'password': 'DK0PK1AK3', 'bool': True,
'dict': {'cat': 'meow', 'password': "*aa38skdjf"},
'float': 0.1, 'int': 123, 'list': [1, 2], 'none': None,
'str': 'foo'}
expected = {'password': '***', 'bool': True,
'dict': {'cat': 'meow', 'password': '***'},
'float': 0.1, 'int': 123, 'list': [1, 2], 'none': None,
'str': 'foo'}
self.assertEqual(expected,
strutils.mask_dict_password(payload))
payload = {
'password': 'DK0PK1AK3',
'bool': True,
'dict': {'cat': 'meow', 'password': "*aa38skdjf"},
'float': 0.1,
'int': 123,
'list': [1, 2],
'none': None,
'str': 'foo',
}
expected = {
'password': '***',
'bool': True,
'dict': {'cat': 'meow', 'password': '***'},
'float': 0.1,
'int': 123,
'list': [1, 2],
'none': None,
'str': 'foo',
}
self.assertEqual(expected, strutils.mask_dict_password(payload))
def test_argument_untouched(self):
"""Make sure that the argument passed in is not modified"""
payload = {'password': 'DK0PK1AK3', 'bool': True,
'dict': {'cat': 'meow', 'password': "*aa38skdjf"},
'float': 0.1, 'int': 123, 'list': [1, 2], 'none': None,
'str': 'foo'}
payload = {
'password': 'DK0PK1AK3',
'bool': True,
'dict': {'cat': 'meow', 'password': "*aa38skdjf"},
'float': 0.1,
'int': 123,
'list': [1, 2],
'none': None,
'str': 'foo',
}
pristine = copy.deepcopy(payload)
# Send the payload into the function, to see if it gets modified
strutils.mask_dict_password(payload)
self.assertEqual(pristine, payload)
def test_non_dict(self):
expected = {'password': '***',
'foo': 'bar',
}
expected = {
'password': '***',
'foo': 'bar',
}
payload = TestMapping()
self.assertEqual(expected, strutils.mask_dict_password(payload))
def test_nested_non_dict(self):
expected = {'nested': {'password': '***',
'foo': 'bar',
}
}
expected = {
'nested': {
'password': '***',
'foo': 'bar',
}
}
payload = NestedMapping()
self.assertEqual(expected, strutils.mask_dict_password(payload))
@@ -842,7 +887,8 @@ class IsIntLikeTestCase(test_base.BaseTestCase):
self.assertFalse(strutils.is_int_like("...."))
self.assertFalse(strutils.is_int_like("1g"))
self.assertFalse(
strutils.is_int_like("0cc3346e-9fef-4445-abe6-5d2b2690ec64"))
strutils.is_int_like("0cc3346e-9fef-4445-abe6-5d2b2690ec64")
)
self.assertFalse(strutils.is_int_like("a1"))
# NOTE(viktors): 12e3 - is a float number
self.assertFalse(strutils.is_int_like("12e3"))
@@ -853,36 +899,44 @@ class IsIntLikeTestCase(test_base.BaseTestCase):
class StringLengthTestCase(test_base.BaseTestCase):
def test_check_string_length(self):
self.assertIsNone(strutils.check_string_length(
'test', 'name', max_length=255))
self.assertRaises(ValueError,
strutils.check_string_length,
'', 'name', min_length=1)
self.assertRaises(ValueError,
strutils.check_string_length,
'a' * 256, 'name', max_length=255)
self.assertRaises(TypeError,
strutils.check_string_length,
11, 'name', max_length=255)
self.assertRaises(TypeError,
strutils.check_string_length,
dict(), 'name', max_length=255)
self.assertIsNone(
strutils.check_string_length('test', 'name', max_length=255)
)
self.assertRaises(
ValueError, strutils.check_string_length, '', 'name', min_length=1
)
self.assertRaises(
ValueError,
strutils.check_string_length,
'a' * 256,
'name',
max_length=255,
)
self.assertRaises(
TypeError, strutils.check_string_length, 11, 'name', max_length=255
)
self.assertRaises(
TypeError,
strutils.check_string_length,
dict(),
'name',
max_length=255,
)
def test_check_string_length_noname(self):
self.assertIsNone(strutils.check_string_length(
'test', max_length=255))
self.assertRaises(ValueError,
strutils.check_string_length,
'', min_length=1)
self.assertRaises(ValueError,
strutils.check_string_length,
'a' * 256, max_length=255)
self.assertRaises(TypeError,
strutils.check_string_length,
11, max_length=255)
self.assertRaises(TypeError,
strutils.check_string_length,
dict(), max_length=255)
self.assertIsNone(strutils.check_string_length('test', max_length=255))
self.assertRaises(
ValueError, strutils.check_string_length, '', min_length=1
)
self.assertRaises(
ValueError, strutils.check_string_length, 'a' * 256, max_length=255
)
self.assertRaises(
TypeError, strutils.check_string_length, 11, max_length=255
)
self.assertRaises(
TypeError, strutils.check_string_length, dict(), max_length=255
)
class SplitPathTestCase(test_base.BaseTestCase):
@@ -906,10 +960,12 @@ class SplitPathTestCase(test_base.BaseTestCase):
self.assertEqual(strutils.split_path('/a/'), ['a'])
self.assertEqual(strutils.split_path('/a/c', 2), ['a', 'c'])
self.assertEqual(strutils.split_path('/a/c/o', 3), ['a', 'c', 'o'])
self.assertEqual(strutils.split_path('/a/c/o/r', 3, 3, True),
['a', 'c', 'o/r'])
self.assertEqual(strutils.split_path('/a/c', 2, 3, True),
['a', 'c', None])
self.assertEqual(
strutils.split_path('/a/c/o/r', 3, 3, True), ['a', 'c', 'o/r']
)
self.assertEqual(
strutils.split_path('/a/c', 2, 3, True), ['a', 'c', None]
)
self.assertEqual(strutils.split_path('/a/c/', 2), ['a', 'c'])
self.assertEqual(strutils.split_path('/a/c/', 2, 3), ['a', 'c', ''])
@@ -941,8 +997,10 @@ class SplitByCommas(test_base.BaseTestCase):
self.check(["a,b", "ac"], '"a,b",ac')
def test_with_backslash_inside_quoted(self):
self.check(['abc"', 'de', 'fg,h', 'klm\\', '"nop'],
r'"abc\"","de","fg,h","klm\\","\"nop"')
self.check(
['abc"', 'de', 'fg,h', 'klm\\', '"nop'],
r'"abc\"","de","fg,h","klm\\","\"nop"',
)
def test_with_backslash_inside_unquoted(self):
self.check([r'a\bc', 'de'], r'a\bc,de')
@@ -953,31 +1011,51 @@ class SplitByCommas(test_base.BaseTestCase):
@ddt.ddt
class ValidateIntegerTestCase(test_base.BaseTestCase):
@ddt.unpack
@ddt.data({"value": 42, "name": "answer", "output": 42},
{"value": "42", "name": "answer", "output": 42},
{"value": "7", "name": "lucky", "output": 7,
"min_value": 7, "max_value": 8},
{"value": 7, "name": "lucky", "output": 7,
"min_value": 6, "max_value": 7},
{"value": 300, "name": "Spartaaa!!!", "output": 300,
"min_value": 300},
{"value": "300", "name": "Spartaaa!!!", "output": 300,
"max_value": 300})
@ddt.data(
{"value": 42, "name": "answer", "output": 42},
{"value": "42", "name": "answer", "output": 42},
{
"value": "7",
"name": "lucky",
"output": 7,
"min_value": 7,
"max_value": 8,
},
{
"value": 7,
"name": "lucky",
"output": 7,
"min_value": 6,
"max_value": 7,
},
{"value": 300, "name": "Spartaaa!!!", "output": 300, "min_value": 300},
{
"value": "300",
"name": "Spartaaa!!!",
"output": 300,
"max_value": 300,
},
)
def test_valid_inputs(self, output, value, name, **kwargs):
self.assertEqual(strutils.validate_integer(value, name,
**kwargs), output)
self.assertEqual(
strutils.validate_integer(value, name, **kwargs), output
)
@ddt.unpack
@ddt.data({"value": "im-not-an-int", "name": ''},
{"value": 3.14, "name": "Pie"},
{"value": "299", "name": "Sparta no-show",
"min_value": 300, "max_value": 300},
{"value": 55, "name": "doing 55 in a 54",
"max_value": 54},
{"value": chr(129), "name": "UnicodeError",
"max_value": 1000})
@ddt.data(
{"value": "im-not-an-int", "name": ''},
{"value": 3.14, "name": "Pie"},
{
"value": "299",
"name": "Sparta no-show",
"min_value": 300,
"max_value": 300,
},
{"value": 55, "name": "doing 55 in a 54", "max_value": 54},
{"value": chr(129), "name": "UnicodeError", "max_value": 1000},
)
def test_invalid_inputs(self, value, name, **kwargs):
self.assertRaises(ValueError, strutils.validate_integer,
value, name, **kwargs)
self.assertRaises(
ValueError, strutils.validate_integer, value, name, **kwargs
)

View File

@@ -32,31 +32,34 @@ def monotonic_iter(start=0, incr=0.05):
class TimeUtilsTest(test_base.BaseTestCase):
def setUp(self):
super().setUp()
self.skynet_self_aware_time_str = '1997-08-29T06:14:00Z'
self.skynet_self_aware_time_ms_str = '1997-08-29T06:14:00.000123Z'
self.skynet_self_aware_time = datetime.datetime(1997, 8, 29, 6, 14, 0)
self.skynet_self_aware_ms_time = datetime.datetime(1997, 8, 29, 6, 14,
0, 123)
self.skynet_self_aware_ms_time = datetime.datetime(
1997, 8, 29, 6, 14, 0, 123
)
self.one_minute_before = datetime.datetime(1997, 8, 29, 6, 13, 0)
self.one_minute_after = datetime.datetime(1997, 8, 29, 6, 15, 0)
self.skynet_self_aware_time_perfect_str = '1997-08-29T06:14:00.000000'
self.skynet_self_aware_time_perfect = datetime.datetime(1997, 8, 29,
6, 14, 0)
self.skynet_self_aware_time_perfect = datetime.datetime(
1997, 8, 29, 6, 14, 0
)
self.addCleanup(timeutils.clear_time_override)
def test_parse_isotime(self):
expect = timeutils.parse_isotime(self.skynet_self_aware_time_str)
skynet_self_aware_time_utc = self.skynet_self_aware_time.replace(
tzinfo=iso8601.iso8601.UTC)
tzinfo=iso8601.iso8601.UTC
)
self.assertEqual(skynet_self_aware_time_utc, expect)
def test_parse_isotime_micro_second_precision(self):
expect = timeutils.parse_isotime(self.skynet_self_aware_time_ms_str)
skynet_self_aware_time_ms_utc = self.skynet_self_aware_ms_time.replace(
tzinfo=iso8601.iso8601.UTC)
tzinfo=iso8601.iso8601.UTC
)
self.assertEqual(skynet_self_aware_time_ms_utc, expect)
def test_parse_strtime(self):
@@ -79,25 +82,25 @@ class TimeUtilsTest(test_base.BaseTestCase):
def test_is_older_than_aware(self):
"""Tests sending is_older_than an 'aware' datetime."""
self._test_is_older_than(lambda x: x.replace(
tzinfo=iso8601.iso8601.UTC))
self._test_is_older_than(
lambda x: x.replace(tzinfo=iso8601.iso8601.UTC)
)
def test_is_older_than_aware_no_utc(self):
self._test_is_older_than(lambda x: x.replace(
tzinfo=iso8601.iso8601.FixedOffset(1, 0, 'foo')).replace(
hour=7))
self._test_is_older_than(
lambda x: x.replace(
tzinfo=iso8601.iso8601.FixedOffset(1, 0, 'foo')
).replace(hour=7)
)
@mock.patch('datetime.datetime', wraps=datetime.datetime)
def _test_is_newer_than(self, fn, datetime_mock):
datetime_mock.now.return_value = self.skynet_self_aware_time
expect_true = timeutils.is_newer_than(fn(self.one_minute_after),
59)
expect_true = timeutils.is_newer_than(fn(self.one_minute_after), 59)
self.assertTrue(expect_true)
expect_false = timeutils.is_newer_than(fn(self.one_minute_after),
60)
expect_false = timeutils.is_newer_than(fn(self.one_minute_after), 60)
self.assertFalse(expect_false)
expect_false = timeutils.is_newer_than(fn(self.one_minute_after),
61)
expect_false = timeutils.is_newer_than(fn(self.one_minute_after), 61)
self.assertFalse(expect_false)
def test_is_newer_than_datetime(self):
@@ -105,13 +108,16 @@ class TimeUtilsTest(test_base.BaseTestCase):
def test_is_newer_than_aware(self):
"""Tests sending is_newer_than an 'aware' datetime."""
self._test_is_newer_than(lambda x: x.replace(
tzinfo=iso8601.iso8601.UTC))
self._test_is_newer_than(
lambda x: x.replace(tzinfo=iso8601.iso8601.UTC)
)
def test_is_newer_than_aware_no_utc(self):
self._test_is_newer_than(lambda x: x.replace(
tzinfo=iso8601.iso8601.FixedOffset(1, 0, 'foo')).replace(
hour=7))
self._test_is_newer_than(
lambda x: x.replace(
tzinfo=iso8601.iso8601.FixedOffset(1, 0, 'foo')
).replace(hour=7)
)
def test_set_time_override_using_default(self):
now = timeutils.utcnow_ts()
@@ -177,10 +183,15 @@ class TimeUtilsTest(test_base.BaseTestCase):
self.assertEqual(now.utcoffset(), backagain.utcoffset())
def test_unmarshall_time_leap_second(self):
leap_dict = dict(day=30, month=6, year=2015,
hour=23, minute=59,
second=timeutils._MAX_DATETIME_SEC + 1,
microsecond=0)
leap_dict = dict(
day=30,
month=6,
year=2015,
hour=23,
minute=59,
second=timeutils._MAX_DATETIME_SEC + 1,
microsecond=0,
)
leap_time = timeutils.unmarshall_time(leap_dict)
leap_dict.update(second=timeutils._MAX_DATETIME_SEC)
@@ -190,10 +201,12 @@ class TimeUtilsTest(test_base.BaseTestCase):
def test_delta_seconds(self):
before = timeutils.utcnow()
after = before + datetime.timedelta(days=7, seconds=59,
microseconds=123456)
self.assertAlmostEqual(604859.123456,
timeutils.delta_seconds(before, after))
after = before + datetime.timedelta(
days=7, seconds=59, microseconds=123456
)
self.assertAlmostEqual(
604859.123456, timeutils.delta_seconds(before, after)
)
def test_is_soon(self):
expires = timeutils.utcnow() + datetime.timedelta(minutes=5)
@@ -208,7 +221,6 @@ class TimeUtilsTest(test_base.BaseTestCase):
class TestIso8601Time(test_base.BaseTestCase):
def _instaneous(self, timestamp, yr, mon, day, hr, minute, sec, micro):
self.assertEqual(timestamp.year, yr)
self.assertEqual(timestamp.month, mon)
@@ -309,7 +321,6 @@ class TestIso8601Time(test_base.BaseTestCase):
class TimeItTest(test_base.BaseTestCase):
@mock.patch('time.sleep')
@mock.patch('oslo_utils.timeutils.now')
def test_timed(self, mock_now, mock_sleep):
@@ -380,9 +391,9 @@ class TimeItTest(test_base.BaseTestCase):
self.assertTrue(mock_now.called)
self.assertTrue(mock_sleep.called)
self.assertTrue(fake_logger.log.called)
fake_logger.log.assert_called_with(logging.DEBUG,
"That took a long time",
mock.ANY)
fake_logger.log.assert_called_with(
logging.DEBUG, "That took a long time", mock.ANY
)
@mock.patch('time.sleep')
@mock.patch('oslo_utils.timeutils.now')
@@ -524,15 +535,15 @@ class StopWatchTest(test_base.BaseTestCase):
watch.split()
self.assertEqual(1, len(watch.splits))
self.assertEqual(watch.splits[0].elapsed,
watch.splits[0].length)
self.assertEqual(watch.splits[0].elapsed, watch.splits[0].length)
watch.split()
splits = watch.splits
self.assertEqual(2, len(splits))
self.assertNotEqual(splits[0].elapsed, splits[1].elapsed)
self.assertEqual(splits[1].length,
splits[1].elapsed - splits[0].elapsed)
self.assertEqual(
splits[1].length, splits[1].elapsed - splits[0].elapsed
)
watch.stop()
self.assertEqual(2, len(watch.splits))

View File

@@ -21,7 +21,6 @@ from oslo_utils import uuidutils
class UUIDUtilsTest(test_base.BaseTestCase):
def test_generate_uuid(self):
uuid_string = uuidutils.generate_uuid()
self.assertIsInstance(uuid_string, str)
@@ -37,18 +36,28 @@ class UUIDUtilsTest(test_base.BaseTestCase):
def test_is_uuid_like(self):
self.assertTrue(uuidutils.is_uuid_like(str(uuid.uuid4())))
self.assertTrue(uuidutils.is_uuid_like(
'{12345678-1234-5678-1234-567812345678}'))
self.assertTrue(uuidutils.is_uuid_like(
'12345678123456781234567812345678'))
self.assertTrue(uuidutils.is_uuid_like(
'urn:uuid:12345678-1234-5678-1234-567812345678'))
self.assertTrue(uuidutils.is_uuid_like(
'urn:bbbaaaaa-aaaa-aaaa-aabb-bbbbbbbbbbbb'))
self.assertTrue(uuidutils.is_uuid_like(
'uuid:bbbaaaaa-aaaa-aaaa-aabb-bbbbbbbbbbbb'))
self.assertTrue(uuidutils.is_uuid_like(
'{}---bbb---aaa--aaa--aaa-----aaa---aaa--bbb-bbb---bbb-bbb-bb-{}'))
self.assertTrue(
uuidutils.is_uuid_like('{12345678-1234-5678-1234-567812345678}')
)
self.assertTrue(
uuidutils.is_uuid_like('12345678123456781234567812345678')
)
self.assertTrue(
uuidutils.is_uuid_like(
'urn:uuid:12345678-1234-5678-1234-567812345678'
)
)
self.assertTrue(
uuidutils.is_uuid_like('urn:bbbaaaaa-aaaa-aaaa-aabb-bbbbbbbbbbbb')
)
self.assertTrue(
uuidutils.is_uuid_like('uuid:bbbaaaaa-aaaa-aaaa-aabb-bbbbbbbbbbbb')
)
self.assertTrue(
uuidutils.is_uuid_like(
'{}---bbb---aaa--aaa--aaa-----aaa---aaa--bbb-bbb---bbb-bbb-bb-{}'
)
)
def test_is_uuid_like_insensitive(self):
self.assertTrue(uuidutils.is_uuid_like(str(uuid.uuid4()).upper()))

View File

@@ -61,38 +61,49 @@ class IsCompatibleTestCase(test_base.BaseTestCase):
rule is not enforced, so a current version of 2.0 is deemed to satisfy
a requirement of 1.0.
"""
self.assertFalse(versionutils.is_compatible('2.0', '1.0',
same_major=False))
self.assertTrue(versionutils.is_compatible('1.0', '1.0',
same_major=False))
self.assertTrue(versionutils.is_compatible('1.0', '2.0',
same_major=False))
self.assertFalse(
versionutils.is_compatible('2.0', '1.0', same_major=False)
)
self.assertTrue(
versionutils.is_compatible('1.0', '1.0', same_major=False)
)
self.assertTrue(
versionutils.is_compatible('1.0', '2.0', same_major=False)
)
def test_convert_version_to_int(self):
self.assertEqual(6002000, versionutils.convert_version_to_int('6.2.0'))
self.assertEqual(6004003,
versionutils.convert_version_to_int((6, 4, 3)))
self.assertEqual(5, versionutils.convert_version_to_int((5, )))
self.assertRaises(ValueError,
versionutils.convert_version_to_int, '5a.6b')
self.assertEqual(
6004003, versionutils.convert_version_to_int((6, 4, 3))
)
self.assertEqual(5, versionutils.convert_version_to_int((5,)))
self.assertRaises(
ValueError, versionutils.convert_version_to_int, '5a.6b'
)
def test_convert_version_to_string(self):
self.assertEqual('6.7.0', versionutils.convert_version_to_str(6007000))
self.assertEqual('4', versionutils.convert_version_to_str(4))
def test_convert_version_to_tuple(self):
self.assertEqual((6, 7, 0),
versionutils.convert_version_to_tuple('6.7.0'))
self.assertEqual((6, 7, 0),
versionutils.convert_version_to_tuple('6.7.0a1'))
self.assertEqual((6, 7, 0),
versionutils.convert_version_to_tuple('6.7.0alpha1'))
self.assertEqual((6, 7, 0),
versionutils.convert_version_to_tuple('6.7.0b1'))
self.assertEqual((6, 7, 0),
versionutils.convert_version_to_tuple('6.7.0beta1'))
self.assertEqual((6, 7, 0),
versionutils.convert_version_to_tuple('6.7.0rc1'))
self.assertEqual(
(6, 7, 0), versionutils.convert_version_to_tuple('6.7.0')
)
self.assertEqual(
(6, 7, 0), versionutils.convert_version_to_tuple('6.7.0a1')
)
self.assertEqual(
(6, 7, 0), versionutils.convert_version_to_tuple('6.7.0alpha1')
)
self.assertEqual(
(6, 7, 0), versionutils.convert_version_to_tuple('6.7.0b1')
)
self.assertEqual(
(6, 7, 0), versionutils.convert_version_to_tuple('6.7.0beta1')
)
self.assertEqual(
(6, 7, 0), versionutils.convert_version_to_tuple('6.7.0rc1')
)
class VersionPredicateTest(test_base.BaseTestCase):
@@ -137,7 +148,15 @@ class VersionPredicateTest(test_base.BaseTestCase):
self.assertFalse(pred.satisfied_by('2.0.0'))
def test_version_predicate_valid_invalid(self):
for invalid_str in ['3.0.0', 'foo', '<> 3.0.0', '>=1.0.0;<2.0.0',
'>abc', '>=1.0.0,', '>=1.0.0,2.0.0']:
for invalid_str in [
'3.0.0',
'foo',
'<> 3.0.0',
'>=1.0.0;<2.0.0',
'>abc',
'>=1.0.0,',
'>=1.0.0,2.0.0',
]:
self.assertRaises(
ValueError, versionutils.VersionPredicate, invalid_str)
ValueError, versionutils.VersionPredicate, invalid_str
)

View File

@@ -22,25 +22,23 @@ from oslo_utils import encodeutils
class EncodeUtilsTest(test_base.BaseTestCase):
def test_safe_decode(self):
safe_decode = encodeutils.safe_decode
self.assertRaises(TypeError, safe_decode, True)
self.assertEqual('ni\xf1o',
safe_decode(b"ni\xc3\xb1o",
incoming="utf-8"))
self.assertEqual(
'ni\xf1o', safe_decode(b"ni\xc3\xb1o", incoming="utf-8")
)
self.assertEqual("strange",
safe_decode(b'\x80strange',
errors='ignore'))
self.assertEqual(
"strange", safe_decode(b'\x80strange', errors='ignore')
)
self.assertEqual('\xc0', safe_decode(b'\xc0',
incoming='iso-8859-1'))
self.assertEqual('\xc0', safe_decode(b'\xc0', incoming='iso-8859-1'))
# Forcing incoming to ascii so it falls back to utf-8
self.assertEqual('ni\xf1o',
safe_decode(b'ni\xc3\xb1o',
incoming='ascii'))
self.assertEqual(
'ni\xf1o', safe_decode(b'ni\xc3\xb1o', incoming='ascii')
)
self.assertEqual('foo', safe_decode(b'foo'))
@@ -60,20 +58,25 @@ class EncodeUtilsTest(test_base.BaseTestCase):
self.assertRaises(TypeError, encodeutils.safe_encode, {})
def test_safe_encode_tuple_instead_of_text(self):
self.assertRaises(TypeError, encodeutils.safe_encode, ('foo', 'bar', ))
self.assertRaises(
TypeError,
encodeutils.safe_encode,
(
'foo',
'bar',
),
)
def test_safe_encode_force_incoming_utf8_to_ascii(self):
# Forcing incoming to ascii so it falls back to utf-8
self.assertEqual(
b'ni\xc3\xb1o',
encodeutils.safe_encode(b'ni\xc3\xb1o',
incoming='ascii'),
encodeutils.safe_encode(b'ni\xc3\xb1o', incoming='ascii'),
)
def test_safe_encode_same_encoding_different_cases(self):
with mock.patch.object(encodeutils, 'safe_decode', mock.Mock()):
utf8 = encodeutils.safe_encode(
'foo\xf1bar', encoding='utf-8')
utf8 = encodeutils.safe_encode('foo\xf1bar', encoding='utf-8')
self.assertEqual(
encodeutils.safe_encode(utf8, 'UTF-8', 'utf-8'),
encodeutils.safe_encode(utf8, 'utf-8', 'UTF-8'),
@@ -87,20 +90,24 @@ class EncodeUtilsTest(test_base.BaseTestCase):
def test_safe_encode_different_encodings(self):
text = 'foo\xc3\xb1bar'
result = encodeutils.safe_encode(
text=text, incoming='utf-8', encoding='iso-8859-1')
text=text, incoming='utf-8', encoding='iso-8859-1'
)
self.assertNotEqual(text, result)
self.assertNotEqual(b"foo\xf1bar", result)
def test_to_utf8(self):
self.assertEqual(encodeutils.to_utf8(b'a\xe9\xff'), # bytes
b'a\xe9\xff')
self.assertEqual(encodeutils.to_utf8('a\xe9\xff\u20ac'), # Unicode
b'a\xc3\xa9\xc3\xbf\xe2\x82\xac')
self.assertRaises(TypeError, encodeutils.to_utf8, 123) # invalid
self.assertEqual(
encodeutils.to_utf8(b'a\xe9\xff'), # bytes
b'a\xe9\xff',
)
self.assertEqual(
encodeutils.to_utf8('a\xe9\xff\u20ac'), # Unicode
b'a\xc3\xa9\xc3\xbf\xe2\x82\xac',
)
self.assertRaises(TypeError, encodeutils.to_utf8, 123) # invalid
# oslo.i18n Message objects should also be accepted for convenience.
# It works because Message is a subclass of str. Use the
# lazy translation to get a Message instance of oslo_i18n.
msg = oslo_i18n_fixture.Translation().lazy("test")
self.assertEqual(encodeutils.to_utf8(msg),
b'test')
self.assertEqual(encodeutils.to_utf8(msg), b'test')

View File

@@ -146,9 +146,9 @@ def set_time_override(override_time=None):
:param override_time: datetime instance or list thereof. If not
given, defaults to the current UTC time.
"""
utcnow.override_time = (
override_time or
datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None))
utcnow.override_time = override_time or datetime.datetime.now(
datetime.timezone.utc
).replace(tzinfo=None)
def advance_time_delta(timedelta):
@@ -191,9 +191,15 @@ def marshall_now(now=None):
"""
if not now:
now = utcnow()
d = dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
minute=now.minute, second=now.second,
microsecond=now.microsecond)
d = dict(
day=now.day,
month=now.month,
year=now.year,
hour=now.hour,
minute=now.minute,
second=now.second,
microsecond=now.microsecond,
)
if now.tzinfo:
# Need to handle either iso8601 or python UTC format
tzname = now.tzinfo.tzname(None)
@@ -215,13 +221,15 @@ def unmarshall_time(tyme):
# so the best thing we can do for now is dropping them
# http://bugs.python.org/issue23574
second = min(tyme['second'], _MAX_DATETIME_SEC)
dt = datetime.datetime(day=tyme['day'],
month=tyme['month'],
year=tyme['year'],
hour=tyme['hour'],
minute=tyme['minute'],
second=second,
microsecond=tyme['microsecond'])
dt = datetime.datetime(
day=tyme['day'],
month=tyme['month'],
year=tyme['year'],
hour=tyme['hour'],
minute=tyme['minute'],
second=second,
microsecond=tyme['microsecond'],
)
tzname = tyme.get('tzname')
if tzname:
# Need to handle either iso8601 or python UTC format
@@ -250,7 +258,7 @@ def is_soon(dt, window):
:return: True if expiration is within the given duration
"""
soon = (utcnow() + datetime.timedelta(seconds=window))
soon = utcnow() + datetime.timedelta(seconds=window)
return normalize_time(dt) <= soon
@@ -280,14 +288,17 @@ class Split:
def __repr__(self):
r = reflection.get_class_name(self, fully_qualified=False)
r += "(elapsed={}, length={})".format(self._elapsed, self._length)
r += f"(elapsed={self._elapsed}, length={self._length})"
return r
def time_it(logger, log_level=logging.DEBUG,
message="It took %(seconds).02f seconds to"
" run function '%(func_name)s'",
enabled=True, min_duration=0.01):
def time_it(
logger,
log_level=logging.DEBUG,
message="It took %(seconds).02f seconds to run function '%(func_name)s'",
enabled=True,
min_duration=0.01,
):
"""Decorator that will log how long its decorated function takes to run.
This does **not** output a log if the decorated function fails
@@ -321,9 +332,14 @@ def time_it(logger, log_level=logging.DEBUG,
result = func(*args, **kwargs)
time_taken = w.elapsed()
if min_duration is None or time_taken >= min_duration:
logger.log(log_level, message,
{'seconds': time_taken,
'func_name': reflection.get_callable_name(func)})
logger.log(
log_level,
message,
{
'seconds': time_taken,
'func_name': reflection.get_callable_name(func),
},
)
return result
return wrapper
@@ -343,13 +359,15 @@ class StopWatch:
.. versionadded:: 1.4
"""
_STARTED = 'STARTED'
_STOPPED = 'STOPPED'
def __init__(self, duration=None):
if duration is not None and duration < 0:
raise ValueError("Duration must be greater or equal to"
" zero and not %s" % duration)
raise ValueError(
f"Duration must be greater or equal to zero and not {duration}"
)
self._duration = duration
self._started_at = None
self._stopped_at = None
@@ -385,9 +403,11 @@ class StopWatch:
self._splits = self._splits + (Split(elapsed, length),)
return self._splits[-1]
else:
raise RuntimeError("Can not create a split time of a stopwatch"
" if it has not been started or if it has been"
" stopped")
raise RuntimeError(
"Can not create a split time of a stopwatch"
" if it has not been started or if it has been"
" stopped"
)
def restart(self):
"""Restarts the watch from a started/stopped state."""
@@ -404,8 +424,10 @@ class StopWatch:
def elapsed(self, maximum=None):
"""Returns how many seconds have elapsed."""
if self._state not in (self._STARTED, self._STOPPED):
raise RuntimeError("Can not get the elapsed time of a stopwatch"
" if it has not been started/stopped")
raise RuntimeError(
"Can not get the elapsed time of a stopwatch"
" if it has not been started/stopped"
)
if self._state == self._STOPPED:
elapsed = self._delta_seconds(self._started_at, self._stopped_at)
else:
@@ -435,20 +457,26 @@ class StopWatch:
:type return_none: boolean
"""
if self._state != self._STARTED:
raise RuntimeError("Can not get the leftover time of a stopwatch"
" that has not been started")
raise RuntimeError(
"Can not get the leftover time of a stopwatch"
" that has not been started"
)
if self._duration is None:
if not return_none:
raise RuntimeError("Can not get the leftover time of a watch"
" that has no duration")
raise RuntimeError(
"Can not get the leftover time of a watch"
" that has no duration"
)
return None
return max(0.0, self._duration - self.elapsed())
def expired(self):
"""Returns if the watch has expired (ie, duration provided elapsed)."""
if self._state not in (self._STARTED, self._STOPPED):
raise RuntimeError("Can not check if a stopwatch has expired"
" if it has not been started/stopped")
raise RuntimeError(
"Can not check if a stopwatch has expired"
" if it has not been started/stopped"
)
if self._duration is None:
return False
return self.elapsed() > self._duration
@@ -467,16 +495,18 @@ class StopWatch:
self._state = self._STARTED
return self
else:
raise RuntimeError("Can not resume a stopwatch that has not been"
" stopped")
raise RuntimeError(
"Can not resume a stopwatch that has not been stopped"
)
def stop(self):
"""Stops the watch."""
if self._state == self._STOPPED:
return self
if self._state != self._STARTED:
raise RuntimeError("Can not stop a stopwatch that has not been"
" started")
raise RuntimeError(
"Can not stop a stopwatch that has not been started"
)
self._stopped_at = now()
self._state = self._STOPPED
return self

View File

@@ -20,43 +20,43 @@ Unit constants
# Binary unit constants.
Ki = 1024
"Binary kilo unit"
Mi = 1024 ** 2
Mi = 1024**2
"Binary mega unit"
Gi = 1024 ** 3
Gi = 1024**3
"Binary giga unit"
Ti = 1024 ** 4
Ti = 1024**4
"Binary tera unit"
Pi = 1024 ** 5
Pi = 1024**5
"Binary peta unit"
Ei = 1024 ** 6
Ei = 1024**6
"Binary exa unit"
Zi = 1024 ** 7
Zi = 1024**7
"Binary zetta unit"
Yi = 1024 ** 8
Yi = 1024**8
"Binary yotta unit"
Ri = 1024 ** 9
Ri = 1024**9
"Binary ronna unit"
Qi = 1024 ** 10
Qi = 1024**10
"Binary quetta unit"
# Decimal unit constants.
k = 1000
"Decimal kilo unit"
M = 1000 ** 2
M = 1000**2
"Decimal mega unit"
G = 1000 ** 3
G = 1000**3
"Decimal giga unit"
T = 1000 ** 4
T = 1000**4
"Decimal tera unit"
P = 1000 ** 5
P = 1000**5
"Decimal peta unit"
E = 1000 ** 6
E = 1000**6
"Decimal exa unit"
Z = 1000 ** 7
Z = 1000**7
"Decimal zetta unit"
Y = 1000 ** 8
Y = 1000**8
"Decimal yotta unit"
R = 1000 ** 9
R = 1000**9
"Decimal ronna unit"
Q = 1000 ** 10
Q = 1000**10
"Decimal quetta unit"

View File

@@ -35,11 +35,13 @@ def generate_uuid(dashed=True):
def _format_uuid_string(string):
return (string.replace('urn:', '')
.replace('uuid:', '')
.strip('{}')
.replace('-', '')
.lower())
return (
string.replace('urn:', '')
.replace('uuid:', '')
.strip('{}')
.replace('-', '')
.lower()
)
def is_uuid_like(val):

View File

@@ -100,20 +100,26 @@ class VersionPredicate:
.. versionadded:: 7.4
"""
_PREDICATE_MATCH = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s]+)\s*$")
_COMP_MAP = {
"<": operator.lt, "<=": operator.le, "==": operator.eq,
">": operator.gt, ">=": operator.ge, "!=": operator.ne
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
">": operator.gt,
">=": operator.ge,
"!=": operator.ne,
}
def __init__(self, predicate_str):
self.pred = [self._parse_predicate(pred) for pred
in predicate_str.split(',')]
self.pred = [
self._parse_predicate(pred) for pred in predicate_str.split(',')
]
def _parse_predicate(self, pred):
res = self._PREDICATE_MATCH.match(pred)
if not res:
raise ValueError("bad package restriction syntax: %s" % pred)
raise ValueError(f"bad package restriction syntax: {pred}")
cond, ver_str = res.groups()
return (cond, packaging.version.Version(ver_str))

View File

@@ -193,9 +193,13 @@ htmlhelp_basename = 'oslo.utilsReleaseNotesDoc'
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'oslo.utilsReleaseNotes.tex',
'oslo.utils Release Notes Documentation',
'oslo.utils Developers', 'manual'),
(
'index',
'oslo.utilsReleaseNotes.tex',
'oslo.utils Release Notes Documentation',
'oslo.utils Developers',
'manual',
),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -224,9 +228,13 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'oslo.utilsReleaseNotes',
'oslo.utils Release Notes Documentation',
['oslo.utils Developers'], 1)
(
'index',
'oslo.utilsReleaseNotes',
'oslo.utils Release Notes Documentation',
['oslo.utils Developers'],
1,
)
]
# If true, show URL addresses after external links.
@@ -238,11 +246,15 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'oslo.utilsReleaseNotes',
'oslo.utils Release Notes Documentation',
'oslo.utils Developers', 'oslo.utilsReleaseNotes',
'One line description of project.',
'Miscellaneous'),
(
'index',
'oslo.utilsReleaseNotes',
'oslo.utils Release Notes Documentation',
'oslo.utils Developers',
'oslo.utilsReleaseNotes',
'One line description of project.',
'Miscellaneous',
),
]
# Documents to append as an appendix to all manuals.

View File

@@ -15,6 +15,4 @@
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
setuptools.setup(setup_requires=['pbr>=2.0.0'], pbr=True)

View File

@@ -13,8 +13,7 @@
# License for the specific language governing permissions and limitations
# under the License.
"""Performance tests for mask_password.
"""
"""Performance tests for mask_password."""
import timeit
@@ -33,14 +32,15 @@ with open(infile) as f:
print('payload has %d bytes' % len(input_str))
for pattern in strutils._SANITIZE_PATTERNS_2['admin_pass']:
print('\ntesting %s' % pattern.pattern)
print(f'\ntesting {pattern.pattern}')
t = timeit.Timer(
r"re.sub(pattern, r'\g<1>***\g<2>', payload)",
"""
f"""
import re
payload = '''{}'''
pattern = re.compile(r'''{}''')
""".format(input_str, pattern.pattern))
payload = '''{input_str}'''
pattern = re.compile(r'''{pattern.pattern}''')
""",
)
print(t.timeit(1))
t = timeit.Timer(

11
tox.ini
View File

@@ -45,10 +45,13 @@ commands =
coverage report
[flake8]
# E731 skipped as assign a lambda expression
# W504 line break after binary operator
ignore = E123,E731,H405,W504
show-source = True
# We only enable the hacking (H) checks
select = H
# H301 Ruff will put commas after imports that can't fit on one line
# H404 Docstrings don't always start with a newline
# H405 Multiline docstrings are okay
ignore = H301,H404,H405
show-source = true
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,__init__.py
[testenv:releasenotes]