Merge "Add ruff"

This commit is contained in:
Zuul
2025-10-27 17:32:00 +00:00
committed by Gerrit Code Review
32 changed files with 949 additions and 656 deletions

View File

@@ -13,7 +13,6 @@
"""Test input for Babel"""
from oslo.i18n import _
from oslo.i18n import _LE
from oslo_log import log as logging

View File

@@ -28,15 +28,17 @@ except KeyError:
PYPI_LOCATION = 'http://pypi.org/project'
KEEP_KEYS = frozenset([
'author',
'author_email',
'maintainer',
'maintainer_email',
'license',
'summary',
'home_page',
])
KEEP_KEYS = frozenset(
[
'author',
'author_email',
'maintainer',
'maintainer_email',
'license',
'summary',
'home_page',
]
)
def iter_names(req):
@@ -51,7 +53,7 @@ def release_data(req):
# Try to find it with various names...
attempted = []
for name in iter_names(req):
url = PYPI_LOCATION + "/%s/json" % (urlparse.quote(name))
url = PYPI_LOCATION + f"/{urlparse.quote(name)}/json"
if url in attempted:
continue
with contextlib.closing(urlreq.urlopen(url)) as uh:
@@ -59,17 +61,20 @@ def release_data(req):
attempted.append(url)
continue
return json.loads(uh.read())
attempted = [" * %s" % u for u in attempted]
raise IOError("Could not find '%s' on pypi\nAttempted urls:\n%s"
% (req.key, "\n".join(attempted)))
attempted = [f" * {u}" for u in attempted]
raise OSError(
"Could not find '{}' on pypi\nAttempted urls:\n{}".format(
req.key, "\n".join(attempted)
)
)
def main():
if len(sys.argv) == 1:
print("%s requirement-file ..." % (sys.argv[0]), file=sys.stderr)
print(f"{sys.argv[0]} requirement-file ...", file=sys.stderr)
sys.exit(1)
for filename in sys.argv[1:]:
print("Analyzing file: %s" % (filename))
print(f"Analyzing file: {filename}")
details = {}
with open(filename, "rb") as fh:
for line in fh.read().splitlines():
@@ -77,15 +82,15 @@ def main():
if line.startswith("#") or not line:
continue
req = packaging.requirement.Requirement(line)
print(" - processing: %s" % (req))
print(f" - processing: {req}")
try:
raw_req_data = release_data(req)
except IOError:
except OSError:
traceback.print_exc()
details[req.key] = None
else:
req_info = {}
for (k, v) in raw_req_data.get('info', {}).items():
for k, v in raw_req_data.get('info', {}).items():
if k not in KEEP_KEYS:
continue
req_info[k] = v
@@ -94,9 +99,12 @@ def main():
'info': req_info,
}
filename, _ext = os.path.splitext(filename)
with open("%s.json" % (filename), "wb") as fh:
fh.write(json.dumps(details, sort_keys=True, indent=4,
separators=(",", ": ")))
with open(f"{filename}.json", "wb") as fh:
fh.write(
json.dumps(
details, sort_keys=True, indent=4, separators=(",", ": ")
)
)
if __name__ == '__main__':

View File

@@ -24,12 +24,14 @@ from openstack_requirements import requirement
MIN_PY_VERSION = '3.5'
PY3_GLOBAL_SPECIFIER_RE = re.compile(
r'python_version(==|>=|>)[\'"]3\.\d+[\'"]')
r'python_version(==|>=|>)[\'"]3\.\d+[\'"]'
)
PY3_LOCAL_SPECIFIER_RE = re.compile(
r'python_version(==|>=|>|<=|<)[\'"]3\.\d+[\'"]')
r'python_version(==|>=|>|<=|<)[\'"]3\.\d+[\'"]'
)
class RequirementsList(object):
class RequirementsList:
def __init__(self, name, project):
self.name = name
self.reqs_by_file = {}
@@ -38,8 +40,7 @@ class RequirementsList(object):
@property
def reqs(self):
return {k: v for d in self.reqs_by_file.values()
for k, v in d.items()}
return {k: v for d in self.reqs_by_file.values() for k, v in d.items()}
def extract_reqs(self, content, strict):
reqs = collections.defaultdict(set)
@@ -51,10 +52,13 @@ class RequirementsList(object):
list_reqs = [r for (r, line) in entries]
# Strip the comments out before checking if there are duplicates
list_reqs_stripped = [r._replace(comment='') for r in list_reqs]
if strict and len(list_reqs_stripped) != len(set(
list_reqs_stripped)):
print("ERROR: Requirements file has duplicate entries "
"for package %s : %r." % (name, list_reqs))
if strict and len(list_reqs_stripped) != len(
set(list_reqs_stripped)
):
print(
"ERROR: Requirements file has duplicate entries "
f"for package {name} : {list_reqs!r}."
)
self.failed = True
reqs[name].update(list_reqs)
return reqs
@@ -67,17 +71,18 @@ class RequirementsList(object):
- each has a list of Requirements objects
- duplicates are not permitted within that list
"""
print("Checking %(name)s" % {'name': self.name})
print(f"Checking {self.name}")
# First, parse.
for fname, content in self.project.get('requirements', {}).items():
print("Processing %(fname)s" % {'fname': fname})
print(f"Processing {fname}")
if strict and not content.endswith('\n'):
print("Requirements file %s does not "
"end with a newline." % fname)
print(
f"Requirements file {fname} does not end with a newline."
)
self.reqs_by_file[fname] = self.extract_reqs(content, strict)
for name, content in project.extras(self.project).items():
print("Processing .[%(extra)s]" % {'extra': name})
print(f"Processing .[{name}]")
self.reqs_by_file[name] = self.extract_reqs(content, strict)
@@ -97,7 +102,6 @@ def _is_requirement_in_global_reqs(
):
req_exclusions = _get_exclusions(local_req)
for global_req in global_reqs:
matching = True
for aname in ['package', 'location', 'markers']:
local_req_val = getattr(local_req, aname)
@@ -106,20 +110,15 @@ def _is_requirement_in_global_reqs(
# if a python 3 version is not spefied in only one of
# global requirements or local requirements, allow it since
# python 3-only is okay
if (
allow_3_only and
matching and
aname == 'markers'
):
if (
not local_req_val and
PY3_GLOBAL_SPECIFIER_RE.match(global_req_val)
if allow_3_only and matching and aname == 'markers':
if not local_req_val and PY3_GLOBAL_SPECIFIER_RE.match(
global_req_val
):
continue
if (
not global_req_val and
local_req_val and
PY3_LOCAL_SPECIFIER_RE.match(local_req_val)
not global_req_val
and local_req_val
and PY3_LOCAL_SPECIFIER_RE.match(local_req_val)
):
continue
@@ -128,9 +127,9 @@ def _is_requirement_in_global_reqs(
# requires a feature that is only available in a newer Python
# library, while other packages are happy without this feature
if (
matching and
aname == 'markers' and
local_req.package in backports
matching
and aname == 'markers'
and local_req.package in backports
):
if re.match(
r'python_version(==|<=|<)[\'"]3\.\d+[\'"]',
@@ -142,9 +141,13 @@ def _is_requirement_in_global_reqs(
)
continue
print(f'WARNING: possible mismatch found for package "{local_req.package}"') # noqa: E501
print(
f'WARNING: possible mismatch found for package "{local_req.package}"'
) # noqa: E501
print(f' Attribute "{aname}" does not match')
print(f' "{local_req_val}" does not match "{global_req_val}"') # noqa: E501
print(
f' "{local_req_val}" does not match "{global_req_val}"'
) # noqa: E501
print(f' {local_req}')
print(f' {global_req}')
matching = False
@@ -160,23 +163,21 @@ def _is_requirement_in_global_reqs(
else:
difference = req_exclusions - global_exclusions
print(
"ERROR: Requirement for package {} "
f"ERROR: Requirement for package {local_req.package} "
"excludes a version not excluded in the "
"global list.\n"
" Local settings : {}\n"
" Global settings: {}\n"
" Unexpected : {}".format(
local_req.package, req_exclusions, global_exclusions,
difference)
f" Local settings : {req_exclusions}\n"
f" Global settings: {global_exclusions}\n"
f" Unexpected : {difference}"
)
return False
print(
"ERROR: "
"Could not find a global requirements entry to match package {}. "
f"Could not find a global requirements entry to match package {local_req.package}. "
"If the package is already included in the global list, "
"the name or platform markers there may not match the local "
"settings.".format(local_req.package)
"settings."
)
return False
@@ -204,9 +205,11 @@ def _get_python3_reqs(reqs):
results.append(req)
else:
req_markers = markers.Marker(req.markers)
if req_markers.evaluate({
'python_version': MIN_PY_VERSION,
}):
if req_markers.evaluate(
{
'python_version': MIN_PY_VERSION,
}
):
results.append(req)
return results
@@ -228,7 +231,7 @@ def _validate_one(
return False
if name not in global_reqs:
print("ERROR: Requirement '%s' not in openstack/requirements" % reqs)
print(f"ERROR: Requirement '{reqs}' not in openstack/requirements")
return True
counts = {}
@@ -240,15 +243,19 @@ def _validate_one(
counts[''] = counts.get('', 0) + 1
if not _is_requirement_in_global_reqs(
req, global_reqs[name], backports, allow_3_only,
req,
global_reqs[name],
backports,
allow_3_only,
):
return True
# check for minimum being defined
min = [s for s in req.specifiers.split(',') if '>' in s]
if not min:
print("ERROR: Requirement for package '%s' has no lower bound" %
name)
print(
f"ERROR: Requirement for package '{name}' has no lower bound"
)
return True
for extra, count in counts.items():
@@ -256,21 +263,27 @@ def _validate_one(
# just need to make sure we have at least the number of entries for
# supported Python 3 versions.
if count != len(global_reqs[name]):
if (allow_3_only and
count >= len(_get_python3_reqs(global_reqs[name]))):
print("WARNING (probably OK for Ussuri and later): "
"Package '%s%s' is only tracking python 3 "
"requirements" % (
name,
('[%s]' % extra) if extra else ''))
if allow_3_only and count >= len(
_get_python3_reqs(global_reqs[name])
):
print(
"WARNING (probably OK for Ussuri and later): "
"Package '{}{}' is only tracking python 3 "
"requirements".format(
name, (f'[{extra}]') if extra else ''
)
)
continue
print("ERROR: Package '%s%s' requirement does not match "
"number of lines (%d) in "
"openstack/requirements" % (
name,
('[%s]' % extra) if extra else '',
len(global_reqs[name])))
print(
"ERROR: Package '{}{}' requirement does not match "
"number of lines ({}) in "
"openstack/requirements".format(
name,
(f'[{extra}]') if extra else '',
len(global_reqs[name]),
)
)
return True
return False
@@ -287,7 +300,7 @@ def validate(
# iterate through the changing entries and see if they match the global
# equivalents we want enforced
for fname, freqs in head_reqs.reqs_by_file.items():
print("Validating %(fname)s" % {'fname': fname})
print(f"Validating {fname}")
for name, reqs in freqs.items():
failed = (
_validate_one(

View File

@@ -28,24 +28,33 @@ from openstack_requirements.utils import read_requirements_file
def main(args=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'project',
default='',
help='path to the project source root folder.')
'project', default='', help='path to the project source root folder.'
)
parser.add_argument(
'-u', '--upper-constraints',
'-u',
'--upper-constraints',
default='upper-constraints.txt',
help='path to the upper-constraints.txt file')
help='path to the upper-constraints.txt file',
)
parser.add_argument(
'-g', '--global-requirements',
'-g',
'--global-requirements',
default='global-requirements.txt',
help='Path to the global-requirements.txt file')
help='Path to the global-requirements.txt file',
)
parser.add_argument(
'-b', '-d', '--denylist',
'-b',
'-d',
'--denylist',
default='denylist.txt',
help='Path to the denylist.txt file')
help='Path to the denylist.txt file',
)
parser.add_argument(
'-G', '--gr-check', action='store_true',
help='Do a specifier check of global-requirements')
'-G',
'--gr-check',
action='store_true',
help='Do a specifier check of global-requirements',
)
args = parser.parse_args(args)
upper_constraints = read_requirements_file(args.upper_constraints)
@@ -55,20 +64,23 @@ def main(args=None):
error_count = 0
for require_file, data in project_data.get('requirements', {}).items():
print(u'\nComparing %s with global-requirements and upper-constraints'
% require_file)
print(
f'\nComparing {require_file} with global-requirements and upper-constraints'
)
requirements = requirement.parse(data)
for name, spec_list in requirements.items():
if not name or name in denylist:
continue
if name not in global_requirements:
print(u'%s from %s not found in global-requirements' % (
name, require_file))
print(
f'{name} from {require_file} not found in global-requirements'
)
error_count += 1
continue
if name not in upper_constraints:
print(u'%s from %s not found in upper-constraints' % (
name, require_file))
print(
f'{name} from {require_file} not found in upper-constraints'
)
error_count += 1
continue
elif spec_list:
@@ -83,9 +95,9 @@ def main(args=None):
# then something is wrong.
if Version(uc_spec.version) not in specs:
print(
u'%s must be <= %s from upper-constraints and '
'include the upper-constraints version' %
(name, uc_spec.version))
f'{name} must be <= {uc_spec.version} from upper-constraints and '
'include the upper-constraints version'
)
error_count += 1
continue
if args.gr_check:
@@ -97,9 +109,9 @@ def main(args=None):
continue
if spec.version not in spec_gr:
print(
u'Specifier %s from %s is failing check '
'from global-requirements specifiers %s' %
(spec.version, name, str(spec_gr)))
f'Specifier {spec.version} from {name} is failing check '
f'from global-requirements specifiers {str(spec_gr)}'
)
error_count += 1
continue

View File

@@ -26,7 +26,8 @@ def edit(reqs, name, replacement):
reqs.pop(key, None)
else:
reqs[key] = [
(requirement.Requirement('', '', '', '', replacement), '')]
(requirement.Requirement('', '', '', '', replacement), '')
]
result = []
for entries in reqs.values():
for entry, _ in entries:
@@ -47,8 +48,8 @@ def _validate_options(options, args):
raise Exception("Not enough arguments given")
if not os.path.exists(args[0]):
raise Exception(
"Constraints file %(con)s not found."
% dict(con=args[0]))
"Constraints file {con} not found.".format(**dict(con=args[0]))
)
def main(argv=None, stdout=None):
@@ -59,17 +60,18 @@ def main(argv=None, stdout=None):
"replacement". If "name" is not present, it is added to the end of
the file. If "replacement" is missing or empty, remove "name" from
the file.
"""))
"""),
)
options, args = parser.parse_args(argv)
if stdout is None:
stdout = sys.stdout
_validate_options(options, args)
args = args + [""]
content = open(args[0], 'rt').read()
content = open(args[0]).read()
reqs = requirement.parse(content, permit_urls=True)
out_reqs = edit(reqs, args[1], args[2])
out = requirement.to_content(out_reqs)
with open(args[0] + '.tmp', 'wt') as f:
with open(args[0] + '.tmp', 'w') as f:
f.write(out)
if os.path.exists(args[0]):
os.remove(args[0])

View File

@@ -34,7 +34,7 @@ SECURITY_WARNING = [
"# testing, and can contain known vulnerabilities. Consumers are\n",
"# *STRONGLY* encouraged to rely on curated distributions of OpenStack\n",
"# or manage security patching of dependencies themselves.\n",
]
]
def _parse_freeze(text):
@@ -47,7 +47,7 @@ def _parse_freeze(text):
for line in text.splitlines():
line = line.strip()
if line.startswith('-'):
raise Exception("Irregular line: %s" % line)
raise Exception(f"Irregular line: {line}")
if line.startswith('#'):
continue
if not line:
@@ -82,27 +82,34 @@ def _freeze(requirements, python):
output = []
try:
version_out = subprocess.check_output(
[python, "--version"], stderr=subprocess.STDOUT)
[python, "--version"], stderr=subprocess.STDOUT
)
output.append(version_out)
version_all = version_out.decode('utf-8').split()[1]
version = '.'.join(version_all.split('.')[:2])
with fixtures.TempDir() as temp:
output.append(subprocess.check_output(
[python, '-m', 'venv', temp.path]))
output.append(
subprocess.check_output([python, '-m', 'venv', temp.path])
)
pip_bin = os.path.join(temp.path, 'bin', 'pip')
output.append(subprocess.check_output(
[pip_bin, 'install', '-U', 'pip', 'setuptools', 'wheel']))
output.append(subprocess.check_output(
[pip_bin, 'install', '-r', requirements]))
freeze = subprocess.check_output(
[pip_bin, 'freeze'])
output.append(
subprocess.check_output(
[pip_bin, 'install', '-U', 'pip', 'setuptools', 'wheel']
)
)
output.append(
subprocess.check_output(
[pip_bin, 'install', '-r', requirements]
)
)
freeze = subprocess.check_output([pip_bin, 'freeze'])
output.append(freeze)
return (version, _parse_freeze(freeze.decode('utf-8')))
except Exception as exc:
if isinstance(exc, subprocess.CalledProcessError):
output.append(exc.output)
raise Exception(
"Failed to generate freeze: %s %s" % (
"Failed to generate freeze: {} {}".format(
b'\n'.join(output).decode('utf-8'),
exc,
)
@@ -132,17 +139,19 @@ def _combine_freezes(freezes, denylist=None):
:return: A list of '\n' terminated lines for a requirements file.
"""
packages = {} # {package : {version : [py_version]}}
excludes = frozenset((requirement.canonical_name(s)
for s in denylist) if denylist else ())
excludes = frozenset(
(requirement.canonical_name(s) for s in denylist) if denylist else ()
)
reference_versions = []
for py_version, freeze in freezes:
if py_version in reference_versions:
raise Exception("Duplicate python %s" % py_version)
raise Exception(f"Duplicate python {py_version}")
reference_versions.append(py_version)
for package, version in freeze:
packages.setdefault(
package, {}).setdefault(version, []).append(py_version)
packages.setdefault(package, {}).setdefault(version, []).append(
py_version
)
for package, versions in sorted(packages.items()):
if package.lower() in excludes:
@@ -151,7 +160,9 @@ def _combine_freezes(freezes, denylist=None):
if len(versions) > 1:
# markers for packages with multiple versions - we use python
# version ranges for these
for idx, (version, py_versions) in enumerate(sorted(versions.items())): # noqa: E501
for idx, (version, py_versions) in enumerate(
sorted(versions.items())
): # noqa: E501
if idx == 0: # lower-bound
marker = f"python_version<='{py_versions[-1]}'"
elif idx + 1 != len(versions): # intermediate version(s)
@@ -163,7 +174,9 @@ def _combine_freezes(freezes, denylist=None):
elif list(versions.values())[0] != reference_versions:
# markers for packages with a single version - these are usually
# version specific so we use strict python versions for these
for idx, (version, py_versions) in enumerate(sorted(versions.items())): # noqa: E501
for idx, (version, py_versions) in enumerate(
sorted(versions.items())
): # noqa: E501
for py_version in sorted(py_versions):
marker = f"python_version=='{py_version}'"
yield f'{package}==={version};{marker}\n'
@@ -179,13 +192,16 @@ def _clone_versions(freezes, options):
if version in options.version_map:
for dst_version in sorted(options.version_map[version]):
if dst_version not in versions:
print("Duplicating %s freeze to %s" %
(version, dst_version), file=sys.stderr)
print(
f"Duplicating {version} freeze to {dst_version}",
file=sys.stderr,
)
freezes.append((dst_version, copy.copy(freeze)))
# -- untested UI glue from here down.
def _validate_options(options):
"""Check that options are valid.
@@ -196,23 +212,30 @@ def _validate_options(options):
for python in options.pythons:
if not shutil.which(python):
raise Exception(
"Python %(python)s not found." % dict(python=python))
"Python {python} not found.".format(**dict(python=python))
)
if not options.requirements:
raise Exception("No requirements file specified - see -r.")
if not os.path.exists(options.requirements):
raise Exception(
"Requirements file %(req)s not found."
% dict(req=options.requirements))
"Requirements file {req} not found.".format(
**dict(req=options.requirements)
)
)
if options.denylist and not os.path.exists(options.denylist):
raise Exception(
"Denylist file %(path)s not found."
% dict(path=options.denylist))
"Denylist file {path} not found.".format(
**dict(path=options.denylist)
)
)
version_map = {}
for map_entry in options.version_map:
if ':' not in map_entry:
raise Exception(
"Invalid version-map entry %(map_entry)s"
% dict(map_entry=map_entry))
"Invalid version-map entry {map_entry}".format(
**dict(map_entry=map_entry)
)
)
src, dst = map_entry.split(':')
version_map.setdefault(src, set())
version_map[src].add(dst)
@@ -223,7 +246,7 @@ def _parse_denylist(path):
"""Return the strings from path if it is not None."""
if path is None:
return []
with open(path, 'rt') as f:
with open(path) as f:
return [line.strip() for line in f]
@@ -243,30 +266,43 @@ def _make_sort_key(line):
def main(argv=None, stdout=None):
parser = optparse.OptionParser()
parser.add_option(
"-p", dest="pythons", action="append",
"-p",
dest="pythons",
action="append",
help="Specify Python versions to use when generating constraints."
"e.g. -p /usr/bin/python3")
"e.g. -p /usr/bin/python3",
)
parser.add_option(
"-r", dest="requirements", help="Requirements file to process.")
"-r", dest="requirements", help="Requirements file to process."
)
parser.add_option(
"-b", "-d", dest="denylist",
help="Filename of a list of package names to exclude.")
"-b",
"-d",
dest="denylist",
help="Filename of a list of package names to exclude.",
)
parser.add_option(
"--version-map", dest='version_map', default=[], action='append',
help=('Add a : separated list of versions to clone. To \'clone\' '
'a freeze generated by python3.4 to python3.5 specify 3.4:3.5. '
'This is intended as as a way to transition between python '
'versions when it\'s not possible to have all versions '
'installed'))
"--version-map",
dest='version_map',
default=[],
action='append',
help=(
'Add a : separated list of versions to clone. To \'clone\' '
'a freeze generated by python3.4 to python3.5 specify 3.4:3.5. '
'This is intended as as a way to transition between python '
'versions when it\'s not possible to have all versions '
'installed'
),
)
options, args = parser.parse_args(argv)
if stdout is None:
stdout = sys.stdout
_validate_options(options)
freezes = [
_freeze(options.requirements, python) for python in options.pythons]
_freeze(options.requirements, python) for python in options.pythons
]
_clone_versions(freezes, options)
denylist = _parse_denylist(options.denylist)
frozen = [
*sorted(_combine_freezes(freezes, denylist), key=_make_sort_key)]
frozen = [*sorted(_combine_freezes(freezes, denylist), key=_make_sort_key)]
stdout.writelines(SECURITY_WARNING + frozen)
stdout.flush()

View File

@@ -19,7 +19,7 @@ from openstack_requirements import requirement
def write_requirements_file(filename, reqs):
with open(filename + 'tmp', 'wt') as f:
with open(filename + 'tmp', 'w') as f:
f.write(reqs)
if os.path.exists(filename):
os.remove(filename)
@@ -28,11 +28,16 @@ def write_requirements_file(filename, reqs):
def main():
parser = argparse.ArgumentParser(
description="Normalize requirements files")
description="Normalize requirements files"
)
parser.add_argument('requirements', help='requirements file input')
parser.add_argument('-s', '--save', action='store_true', default=False,
help=('save normalized requirements '
'file instead of displaying it'))
parser.add_argument(
'-s',
'--save',
action='store_true',
default=False,
help=('save normalized requirements file instead of displaying it'),
)
args = parser.parse_args()
with open(args.requirements) as f:
requirements = [line.strip() for line in f.readlines()]

View File

@@ -10,9 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
"""Apply validation rules to the various requirements lists.
"""
"""Apply validation rules to the various requirements lists."""
import argparse
import os
@@ -44,44 +42,47 @@ def main():
error_count = 0
# Check the format of the constraints file.
print('\nChecking %s' % args.upper_constraints)
print(f'\nChecking {args.upper_constraints}')
constraints_txt = read_requirements_file(args.upper_constraints)
for msg in constraints.check_format(constraints_txt):
print(msg)
error_count += 1
# Check that the constraints and requirements are compatible.
print('\nChecking %s' % args.global_requirements)
print(f'\nChecking {args.global_requirements}')
global_reqs = read_requirements_file(args.global_requirements)
for msg in constraints.check_compatible(global_reqs, constraints_txt):
print(msg)
error_count += 1
# Check requirements to satisfy policy.
print('\nChecking requirements on %s' % args.global_requirements)
print(f'\nChecking requirements on {args.global_requirements}')
for msg in requirement.check_reqs_bounds_policy(global_reqs):
print(msg)
error_count += 1
# Check that global requirements are uniformly formatted
print('\nValidating uniform formatting on %s' % args.global_requirements)
with open(args.global_requirements, 'rt') as f:
print(f'\nValidating uniform formatting on {args.global_requirements}')
with open(args.global_requirements) as f:
for line in f:
if line == '\n':
continue
req = requirement.parse_line(line)
normed_req = req.to_line(comment_prefix=' ', sort_specifiers=True)
if line.rstrip() != normed_req.rstrip():
print("-%s\n+%s" % (line.rstrip(), normed_req.rstrip()))
print(f"-{line.rstrip()}\n+{normed_req.rstrip()}")
error_count += 1
# Check that all of the items in the global-requirements list
# appear in exactly one of the constraints file or the denylist.
print('\nChecking %s' % args.denylist)
print(f'\nChecking {args.denylist}')
denylist = read_requirements_file(args.denylist)
for msg in constraints.check_denylist_coverage(
global_reqs, constraints_txt, denylist,
os.path.basename(args.upper_constraints)):
global_reqs,
constraints_txt,
denylist,
os.path.basename(args.upper_constraints),
):
print(msg)
error_count += 1

View File

@@ -10,24 +10,24 @@
# License for the specific language governing permissions and limitations
# under the License.
"""Apply validation rules to the projects.txt file
"""
"""Apply validation rules to the projects.txt file"""
import argparse
from openstack_requirements import project_config
_BLACKLIST = set([
# NOTE(dhellmann): It's not clear why these don't get updates,
# except that trying to do so may break the test jobs using them
# because of the nature of the projects.
'openstack/hacking',
'openstack/pbr',
# We can't enforce the check rules against this repo.
'openstack/requirements',
])
_BLACKLIST = set(
[
# NOTE(dhellmann): It's not clear why these don't get updates,
# except that trying to do so may break the test jobs using them
# because of the nature of the projects.
'openstack/hacking',
'openstack/pbr',
# We can't enforce the check rules against this repo.
'openstack/requirements',
]
)
def main():
@@ -43,8 +43,8 @@ def main():
error_count = 0
print('\nChecking %s' % args.projects_list)
with open(args.projects_list, 'r') as f:
print(f'\nChecking {args.projects_list}')
with open(args.projects_list) as f:
for repo in f:
repo = repo.strip()
if repo.startswith('#'):
@@ -52,7 +52,8 @@ def main():
if repo in _BLACKLIST:
continue
pe = project_config.require_check_requirements_for_repo(
zuul_projects, repo)
zuul_projects, repo
)
for e in pe:
print(e)
error_count += 1

View File

@@ -19,41 +19,46 @@ from openstack_requirements import requirement
# should not be denylisted. We don't know yet what versions they
# should have, so just ignore them for a little while until we have
# time to figure that out.
UNCONSTRAINABLE = set([
'argparse',
'pip',
'setuptools',
'wmi',
'pywin32',
'pymi',
'wheel',
'', # blank lines
])
UNCONSTRAINABLE = set(
[
'argparse',
'pip',
'setuptools',
'wmi',
'pywin32',
'pymi',
'wheel',
'', # blank lines
]
)
def check_denylist_coverage(global_reqs, constraints, denylist,
constraints_list_name):
def check_denylist_coverage(
global_reqs, constraints, denylist, constraints_list_name
):
"""Report any items that are not properly constrained.
Check that all of the items in the global-requirements list
appear either in the constraints file or the denylist.
"""
to_be_constrained = (
set(global_reqs.keys()) - set(denylist.keys())
- UNCONSTRAINABLE
set(global_reqs.keys()) - set(denylist.keys()) - UNCONSTRAINABLE
)
constrained = set(constraints.keys()) - set([''])
unconstrained = to_be_constrained - constrained
for u in sorted(unconstrained):
yield ('%r appears in global-requirements.txt '
'but not %s or denylist.txt' % (u, constraints_list_name))
yield (
f'{u!r} appears in global-requirements.txt '
f'but not {constraints_list_name} or denylist.txt'
)
# Verify that the denylist packages are not also listed in
# the constraints file.
dupes = constrained.intersection(set(denylist.keys()))
for d in dupes:
yield ('%r appears in both denylist.txt and %s'
% (d, constraints_list_name))
yield (
f'{d!r} appears in both denylist.txt and {constraints_list_name}'
)
def check_format(parsed_constraints):
@@ -61,8 +66,9 @@ def check_format(parsed_constraints):
for name, spec_list in parsed_constraints.items():
for req, original_line in spec_list:
if not req.specifiers.startswith('==='):
yield ('Invalid constraint for %s does not have 3 "=": %s' %
(name, original_line))
yield (
f'Invalid constraint for {name} does not have 3 "=": {original_line}'
)
def check_compatible(global_reqs, constraints):
@@ -88,6 +94,7 @@ def check_compatible(global_reqs, constraints):
:param constraints: The same from given constraints.txt.
:return: A list of the error messages for constraints that failed.
"""
def satisfied(reqs, name, version, failures):
if name not in reqs:
return True
@@ -98,9 +105,11 @@ def check_compatible(global_reqs, constraints):
if spec.contains(version, prereleases=True):
return True
tested.append(constraint.specifiers)
failures.append('Constraint %s for %s does not match requirement %s' %
(version, name, tested))
failures.append(
f'Constraint {version} for {name} does not match requirement {tested}'
)
return False
failures = []
for pkg_constraints in constraints.values():
for constraint, _ in pkg_constraints:

View File

@@ -34,14 +34,15 @@ def extras(project):
# IO from here to the end of the file.
def _safe_read(project, filename, output=None):
if output is None:
output = project
try:
path = os.path.join(project['root'], filename)
with io.open(path, 'rt', encoding="utf-8") as f:
with open(path, encoding="utf-8") as f:
output[filename] = f.read()
except IOError as e:
except OSError as e:
if e.errno != errno.ENOENT:
raise
@@ -62,13 +63,15 @@ def read(root):
requirements = {}
result['requirements'] = requirements
target_files = [
'requirements.txt', 'tools/pip-requires',
'test-requirements.txt', 'tools/test-requires',
'requirements.txt',
'tools/pip-requires',
'test-requirements.txt',
'tools/test-requires',
'doc/requirements.txt',
]
for py_version in (2, 3):
target_files.append('requirements-py%s.txt' % py_version)
target_files.append('test-requirements-py%s.txt' % py_version)
target_files.append(f'requirements-py{py_version}.txt')
target_files.append(f'test-requirements-py{py_version}.txt')
for target_file in target_files:
_safe_read(result, target_file, output=requirements)
return result

View File

@@ -10,8 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
"""Work with the project-config repository.
"""
"""Work with the project-config repository."""
import requests
import yaml
@@ -32,10 +31,7 @@ def get_zuul_projects_data(url=ZUUL_PROJECTS_URL):
raw = yaml.safe_load(r.text)
# Add a mapping from repo name to repo settings, since that is how
# we access this most often.
projects = {
p['project']['name']: p['project']
for p in raw
}
projects = {p['project']['name']: p['project'] for p in raw}
return projects
@@ -49,8 +45,7 @@ def require_check_requirements_for_repo(zuul_projects, repo):
if repo not in zuul_projects:
errors.append(
('did not find %s in %s' % (repo, ZUUL_PROJECTS_FILENAME),
True)
(f'did not find {repo} in {ZUUL_PROJECTS_FILENAME}', True)
)
else:
p = zuul_projects[repo]
@@ -59,7 +54,6 @@ def require_check_requirements_for_repo(zuul_projects, repo):
# jobs, because we want projects to use the templates.
if 'check-requirements' not in templates:
errors.append(
'%s no check-requirements job specified for %s'
% (ZUUL_PROJECTS_FILENAME, repo)
f'{ZUUL_PROJECTS_FILENAME} no check-requirements job specified for {repo}'
)
return errors

View File

@@ -23,42 +23,62 @@ import re
def key_specifier(a):
weight = {'>=': 0, '>': 0,
'===': 1, '==': 1, '~=': 1, '!=': 1,
'<': 2, '<=': 2}
weight = {
'>=': 0,
'>': 0,
'===': 1,
'==': 1,
'~=': 1,
'!=': 1,
'<': 2,
'<=': 2,
}
a = a._spec
return (weight[a[0]], packaging.version.parse(a[1]))
class Requirement(collections.namedtuple('Requirement',
['package', 'location', 'specifiers',
'markers', 'comment', 'extras'])):
def __new__(cls, package, location, specifiers, markers, comment,
extras=None):
return super(Requirement, cls).__new__(
cls, package, location, specifiers, markers, comment,
frozenset(extras or ()))
class Requirement(
collections.namedtuple(
'Requirement',
['package', 'location', 'specifiers', 'markers', 'comment', 'extras'],
)
):
def __new__(
cls, package, location, specifiers, markers, comment, extras=None
):
return super().__new__(
cls,
package,
location,
specifiers,
markers,
comment,
frozenset(extras or ()),
)
def to_line(self, marker_sep=';', line_prefix='', comment_prefix=' ',
sort_specifiers=False):
def to_line(
self,
marker_sep=';',
line_prefix='',
comment_prefix=' ',
sort_specifiers=False,
):
comment_p = comment_prefix if self.package else ''
comment = (comment_p + self.comment if self.comment else '')
comment = comment_p + self.comment if self.comment else ''
marker = marker_sep + self.markers if self.markers else ''
package = line_prefix + self.package if self.package else ''
location = self.location + '#egg=' if self.location else ''
extras = '[%s]' % ",".join(sorted(self.extras)) if self.extras else ''
extras = (
'[{}]'.format(",".join(sorted(self.extras))) if self.extras else ''
)
specifiers = self.specifiers
if sort_specifiers:
_specifiers = packaging.specifiers.SpecifierSet(specifiers)
_specifiers = ['%s' % s for s in sorted(_specifiers,
key=key_specifier)]
_specifiers = [
f'{s}' for s in sorted(_specifiers, key=key_specifier)
]
specifiers = ','.join(_specifiers)
return '%s%s%s%s%s%s\n' % (location,
package,
extras,
specifiers,
marker,
comment)
return f'{location}{package}{extras}{specifiers}{marker}{comment}\n'
Requirements = collections.namedtuple('Requirements', ['reqs'])
@@ -66,7 +86,8 @@ Requirements = collections.namedtuple('Requirements', ['reqs'])
url_re = re.compile(
r'^(?P<url>\s*(?:-e\s)?\s*(?:(?:[a-z]+\+)?(?:[a-z]+))://[^#]*)'
r'#egg=(?P<name>[-\.\w]+)')
r'#egg=(?P<name>[-\.\w]+)'
)
def canonical_name(req_name):
@@ -126,7 +147,7 @@ def parse_line(req_line, permit_urls=False):
marker_pos = max(semi_pos, colon_pos)
if marker_pos < 0:
marker_pos = hash_pos
markers = req_line[marker_pos + 1:hash_pos].strip()
markers = req_line[marker_pos + 1 : hash_pos].strip()
if hash_pos != end:
comment = req_line[hash_pos:]
else:
@@ -154,7 +175,7 @@ def to_content(reqs, marker_sep=';', line_prefix=''):
lines = []
for req in reqs.reqs:
lines.append(req.to_line(marker_sep, line_prefix))
return u''.join(lines)
return ''.join(lines)
def to_dict(req_sequence):
@@ -169,12 +190,15 @@ def to_dict(req_sequence):
def _pass_through(req_line, permit_urls=False):
"""Identify unparsable lines."""
if permit_urls:
return (req_line.startswith('http://tarballs.openstack.org/') or
req_line.startswith('-f'))
return req_line.startswith(
'http://tarballs.openstack.org/'
) or req_line.startswith('-f')
else:
return (req_line.startswith('http://tarballs.openstack.org/') or
req_line.startswith('-e') or
req_line.startswith('-f'))
return (
req_line.startswith('http://tarballs.openstack.org/')
or req_line.startswith('-e')
or req_line.startswith('-f')
)
def to_reqs(content, permit_urls=False):
@@ -209,5 +233,6 @@ def check_reqs_bounds_policy(global_reqs):
if spec.operator == '>=':
lower_bound.add(spec)
if len(lower_bound):
yield ('Requirement %s should not include a >= specifier' %
req.package)
yield (
f'Requirement {req.package} should not include a >= specifier'
)

View File

@@ -29,15 +29,16 @@ class Project(fixtures.Fixture):
"""A single project we can update."""
def __init__(
self, req_path, setup_path, setup_cfg_path, test_req_path=None):
super(Project, self).__init__()
self, req_path, setup_path, setup_cfg_path, test_req_path=None
):
super().__init__()
self._req_path = req_path
self._setup_path = setup_path
self._setup_cfg_path = setup_cfg_path
self._test_req_path = test_req_path
def setUp(self):
super(Project, self).setUp()
super().setUp()
self.root = self.useFixture(fixtures.TempDir()).path
self.req_file = os.path.join(self.root, 'requirements.txt')
self.setup_file = os.path.join(self.root, 'setup.py')
@@ -54,34 +55,39 @@ project_fixture = Project(
"openstack_requirements/tests/files/project.txt",
"openstack_requirements/tests/files/setup.py",
"openstack_requirements/tests/files/setup.cfg",
"openstack_requirements/tests/files/test-project.txt")
"openstack_requirements/tests/files/test-project.txt",
)
bad_project_fixture = Project(
"openstack_requirements/tests/files/project-with-bad-requirement.txt",
"openstack_requirements/tests/files/setup.py",
"openstack_requirements/tests/files/setup.cfg")
"openstack_requirements/tests/files/setup.cfg",
)
oslo_fixture = Project(
"openstack_requirements/tests/files/project-with-oslo-tar.txt",
"openstack_requirements/tests/files/old-setup.py",
"openstack_requirements/tests/files/setup.cfg")
"openstack_requirements/tests/files/setup.cfg",
)
pbr_fixture = Project(
"openstack_requirements/tests/files/project.txt",
"openstack_requirements/tests/files/setup.py",
"openstack_requirements/tests/files/pbr_setup.cfg",
"openstack_requirements/tests/files/test-project.txt")
"openstack_requirements/tests/files/test-project.txt",
)
class GlobalRequirements(fixtures.Fixture):
def setUp(self):
super(GlobalRequirements, self).setUp()
super().setUp()
self.root = self.useFixture(fixtures.TempDir()).path
self.req_file = os.path.join(self.root, "global-requirements.txt")
shutil.copy(
"openstack_requirements/tests/files/gr-base.txt", self.req_file)
"openstack_requirements/tests/files/gr-base.txt", self.req_file
)
self.denylist_file = os.path.join(self.root, "denylist.txt")
shutil.copy(
"openstack_requirements/tests/files/denylist.txt",
self.denylist_file)
self.denylist_file,
)
# Static data for unit testing.
@@ -91,12 +97,14 @@ def make_project(fixture):
global_reqs = requirement.parse(
open("openstack_requirements/tests/files/gr-base.txt", "rt").read())
open("openstack_requirements/tests/files/gr-base.txt").read()
)
upper_constraints = requirement.parse(
open("openstack_requirements/tests/files/upper-constraints.txt",
"rt").read())
open("openstack_requirements/tests/files/upper-constraints.txt").read()
)
denylist = requirement.parse(
open("openstack_requirements/tests/files/denylist.txt", "rt").read())
open("openstack_requirements/tests/files/denylist.txt").read()
)
pbr_project = make_project(pbr_fixture)
project_project = make_project(project_fixture)
bad_project = make_project(bad_project_fixture)

View File

@@ -21,7 +21,7 @@ from swift import __canonical_version__ as version
name = 'swift'
with open('requirements.txt', 'r') as f:
with open('requirements.txt') as f:
requires = [x.strip() for x in f if x.strip()]

View File

@@ -16,6 +16,4 @@
import setuptools
setuptools.setup(
setup_requires=['d2to1', 'pbr>=0.5,<0.6'],
d2to1=True)
setuptools.setup(setup_requires=['d2to1', 'pbr>=0.5,<0.6'], d2to1=True)

View File

@@ -20,7 +20,6 @@ import testtools
class TestIsReqInGlobalReqs(testtools.TestCase):
def setUp(self):
super().setUp()
@@ -29,11 +28,13 @@ class TestIsReqInGlobalReqs(testtools.TestCase):
self.backports = list()
self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout))
self.global_reqs = check.get_global_reqs(textwrap.dedent("""
self.global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.2,!=1.4
withmarker>=1.5;python_version=='3.5'
withmarker>=1.2,!=1.4;python_version=='2.7'
"""))
""")
)
def test_match(self):
"""Test a basic package."""
@@ -48,9 +49,11 @@ class TestIsReqInGlobalReqs(testtools.TestCase):
def test_match_with_markers(self):
"""Test a package specified with python 3 markers."""
req = requirement.parse(textwrap.dedent("""
req = requirement.parse(
textwrap.dedent("""
withmarker>=1.5;python_version=='3.5'
"""))['withmarker'][0][0]
""")
)['withmarker'][0][0]
self.assertTrue(
check._is_requirement_in_global_reqs(
req,
@@ -61,15 +64,17 @@ class TestIsReqInGlobalReqs(testtools.TestCase):
def test_match_with_local_markers(self):
"""Test a package specified with python 3 markers."""
req = requirement.parse(textwrap.dedent("""
req = requirement.parse(
textwrap.dedent("""
name;python_version=='3.5'
"""))['name'][0][0]
""")
)['name'][0][0]
self.assertTrue(
check._is_requirement_in_global_reqs(
req,
self.global_reqs['name'],
self.backports,
allow_3_only=True
allow_3_only=True,
)
)
@@ -79,15 +84,17 @@ class TestIsReqInGlobalReqs(testtools.TestCase):
Python 3 packages are a thing. On those, it's totally unnecessary to
specify e.g. a "python_version>'3" marker for packages.
"""
req = requirement.parse(textwrap.dedent("""
req = requirement.parse(
textwrap.dedent("""
withmarker>=1.5
"""))['withmarker'][0][0]
""")
)['withmarker'][0][0]
self.assertTrue(
check._is_requirement_in_global_reqs(
req,
self.global_reqs['withmarker'],
self.backports,
allow_3_only=True
allow_3_only=True,
)
)
@@ -182,7 +189,6 @@ class TestIsReqInGlobalReqs(testtools.TestCase):
class TestGetExclusions(testtools.TestCase):
def test_none(self):
req = list(check.get_global_reqs('name>=1.2')['name'])[0]
self.assertEqual(
@@ -206,9 +212,8 @@ class TestGetExclusions(testtools.TestCase):
class TestValidateOne(testtools.TestCase):
def setUp(self):
super(TestValidateOne, self).setUp()
super().setUp()
self._stdout_fixture = fixtures.StringStream('stdout')
self.stdout = self.useFixture(self._stdout_fixture).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout))
@@ -217,10 +222,7 @@ class TestValidateOne(testtools.TestCase):
def test_unchanged(self):
# If the line matches the value in the branch list everything
# is OK.
reqs = [
r
for r, line in requirement.parse('name>=1.2,!=1.4')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertFalse(
check._validate_one(
@@ -234,10 +236,7 @@ class TestValidateOne(testtools.TestCase):
def test_denylisted(self):
# If the package is denylisted, everything is OK.
reqs = [
r
for r, line in requirement.parse('name>=1.2,!=1.4')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertFalse(
check._validate_one(
@@ -252,10 +251,7 @@ class TestValidateOne(testtools.TestCase):
def test_denylisted_mismatch(self):
# If the package is denylisted, it doesn't matter if the
# version matches.
reqs = [
r
for r, line in requirement.parse('name>=1.5')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.5')['name']]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertFalse(
check._validate_one(
@@ -269,10 +265,7 @@ class TestValidateOne(testtools.TestCase):
def test_not_in_global_list(self):
# If the package is not in the global list, that is an error.
reqs = [
r
for r, line in requirement.parse('name>=1.2,!=1.4')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']]
global_reqs = check.get_global_reqs('')
self.assertTrue(
check._validate_one(
@@ -286,10 +279,7 @@ class TestValidateOne(testtools.TestCase):
def test_new_item_matches_global_list(self):
# If the new item matches the global list exactly that is OK.
reqs = [
r
for r, line in requirement.parse('name>=1.2,!=1.4')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.2,!=1.4')['name']]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertFalse(
check._validate_one(
@@ -304,10 +294,7 @@ class TestValidateOne(testtools.TestCase):
def test_new_item_lower_min(self):
# If the new item has a lower minimum value than the global
# list, that is OK.
reqs = [
r
for r, line in requirement.parse('name>=1.1,!=1.4')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.1,!=1.4')['name']]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertFalse(
check._validate_one(
@@ -323,8 +310,7 @@ class TestValidateOne(testtools.TestCase):
# If the new item includes an exclusion that is not present in
# the global list that is not OK.
reqs = [
r
for r, line in requirement.parse('name>=1.2,!=1.4,!=1.5')['name']
r for r, line in requirement.parse('name>=1.2,!=1.4,!=1.5')['name']
]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertTrue(
@@ -340,10 +326,7 @@ class TestValidateOne(testtools.TestCase):
def test_new_item_missing_exclusion(self):
# If the new item does not include an exclusion that is
# present in the global list that is OK.
reqs = [
r
for r, line in requirement.parse('name>=1.2')['name']
]
reqs = [r for r, line in requirement.parse('name>=1.2')['name']]
global_reqs = check.get_global_reqs('name>=1.2,!=1.4')
self.assertFalse(
check._validate_one(
@@ -363,14 +346,13 @@ class TestValidateOne(testtools.TestCase):
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
"""))
""")
)
self.assertFalse(
check._validate_one(
'name',
@@ -388,14 +370,13 @@ class TestValidateOne(testtools.TestCase):
r_content = textwrap.dedent("""
name>=1.2,!=1.4;python_version=='2.6'
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
"""))
""")
)
self.assertTrue(
check._validate_one(
'name',
@@ -414,14 +395,13 @@ class TestValidateOne(testtools.TestCase):
name>=1.5;python_version=='3.6'
name>=1.2,!=1.4;python_version=='2.6'
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
"""))
""")
)
self.assertTrue(
check._validate_one(
'name',
@@ -440,15 +420,14 @@ class TestValidateOne(testtools.TestCase):
name>=1.5;python_version=='3.5'
other-name
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
other-name
"""))
""")
)
self.assertFalse(
check._validate_one(
'name',
@@ -468,15 +447,14 @@ class TestValidateOne(testtools.TestCase):
name>=1.5
other-name
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version>='3.5'
name>=1.2,!=1.4;python_version=='2.6'
other-name
"""))
""")
)
self.assertFalse(
check._validate_one(
'name',
@@ -496,14 +474,13 @@ class TestValidateOne(testtools.TestCase):
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
"""))
""")
)
self.assertFalse(
check._validate_one(
'name',
@@ -521,14 +498,13 @@ class TestValidateOne(testtools.TestCase):
r_content = textwrap.dedent("""
name>=1.5;python_version=='3.5'
""")
reqs = [
r
for r, line in requirement.parse(r_content)['name']
]
global_reqs = check.get_global_reqs(textwrap.dedent("""
reqs = [r for r, line in requirement.parse(r_content)['name']]
global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.5'
name>=1.2,!=1.4;python_version=='2.6'
"""))
""")
)
self.assertFalse(
check._validate_one(
'name',
@@ -542,19 +518,22 @@ class TestValidateOne(testtools.TestCase):
class TestBackportPythonMarkers(testtools.TestCase):
def setUp(self):
super(TestBackportPythonMarkers, self).setUp()
super().setUp()
self._stdout_fixture = fixtures.StringStream('stdout')
self.stdout = self.useFixture(self._stdout_fixture).stream
self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout))
self.req = requirement.parse(textwrap.dedent("""
self.req = requirement.parse(
textwrap.dedent("""
name>=1.5;python_version=='3.11'
"""))['name'][0][0]
self.global_reqs = check.get_global_reqs(textwrap.dedent("""
""")
)['name'][0][0]
self.global_reqs = check.get_global_reqs(
textwrap.dedent("""
name>=1.5;python_version=='3.10'
"""))
""")
)
def test_notmatching_no_backport(self):
backports = requirement.parse("")

View File

@@ -32,31 +32,35 @@ def mock_read_requirements_file(filename):
elif os.path.basename(filename) == 'denylist.txt':
return common.denylist
else:
raise IOError('No such file or directory: %s' % filename)
raise OSError(f'No such file or directory: {filename}')
class CheckExistsTest(testtools.TestCase):
def setUp(self):
super(CheckExistsTest, self).setUp()
super().setUp()
@mock.patch(
'openstack_requirements.cmds.check_exists.read_requirements_file',
mock_read_requirements_file)
@mock.patch('openstack_requirements.project.read',
return_value=common.project_project)
mock_read_requirements_file,
)
@mock.patch(
'openstack_requirements.project.read',
return_value=common.project_project,
)
def test_good_project(self, mock_project_read):
ret = check_exists.main([common.project_fixture.root])
self.assertEqual(ret, 0)
@mock.patch(
'openstack_requirements.cmds.check_exists.read_requirements_file',
mock_read_requirements_file)
mock_read_requirements_file,
)
def test_project_missing_from_uc(self):
self.useFixture(common.project_fixture)
orig_mocked_read_req = check_exists.read_requirements_file
read_req_path = ('openstack_requirements.cmds.check_exists.'
'read_requirements_file')
read_req_path = (
'openstack_requirements.cmds.check_exists.read_requirements_file'
)
def remove_req_read_reqs_file(filename):
if filename == 'upper-constraints.txt':
@@ -66,51 +70,64 @@ class CheckExistsTest(testtools.TestCase):
return orig_mocked_read_req(filename)
expected_out = ('six from requirements.txt not found in'
' upper-constraints')
expected_out = (
'six from requirements.txt not found in upper-constraints'
)
# Start capturing some output
mock_stdout = io.StringIO()
with mock.patch('openstack_requirements.project.read',
return_value=common.project_project), \
mock.patch('sys.stdout', mock_stdout), \
mock.patch(read_req_path, remove_req_read_reqs_file):
with (
mock.patch(
'openstack_requirements.project.read',
return_value=common.project_project,
),
mock.patch('sys.stdout', mock_stdout),
mock.patch(read_req_path, remove_req_read_reqs_file),
):
ret = check_exists.main([common.project_fixture.root])
self.assertEqual(ret, 1)
self.assertIn(expected_out, mock_stdout.getvalue())
@mock.patch(
'openstack_requirements.cmds.check_exists.read_requirements_file',
mock_read_requirements_file)
mock_read_requirements_file,
)
def test_project_missing_from_gr(self):
self.useFixture(common.project_fixture)
# Add some random package that wont exist in G-R
with open(common.project_fixture.req_file, 'a') as req_file:
req_file.write(u'SomeRandomModule #Some random module\n')
req_file.write('SomeRandomModule #Some random module\n')
req_file.flush()
expected_out = ('somerandommodule from requirements.txt not found in'
' global-requirements')
expected_out = (
'somerandommodule from requirements.txt not found in'
' global-requirements'
)
# Start capturing some output
mock_stdout = io.StringIO()
proj_read = project.read(common.project_fixture.root)
with mock.patch('openstack_requirements.project.read',
return_value=proj_read), \
mock.patch('sys.stdout', mock_stdout):
with (
mock.patch(
'openstack_requirements.project.read', return_value=proj_read
),
mock.patch('sys.stdout', mock_stdout),
):
ret = check_exists.main([common.project_fixture.root])
self.assertEqual(ret, 1)
self.assertIn(expected_out, mock_stdout.getvalue())
@mock.patch(
'openstack_requirements.cmds.check_exists.read_requirements_file',
mock_read_requirements_file)
mock_read_requirements_file,
)
def test_project_multiple_missing_from_uc_and_gr(self):
self.useFixture(common.project_fixture)
orig_mocked_read_req = check_exists.read_requirements_file
read_req_path = ('openstack_requirements.cmds.check_exists.'
'read_requirements_file')
read_req_path = (
'openstack_requirements.cmds.check_exists.read_requirements_file'
)
def remove_req_read_reqs_file(filename):
if filename == 'upper-constraints.txt':
@@ -124,11 +141,13 @@ class CheckExistsTest(testtools.TestCase):
# lets change the six requirement not include the u-c version
proj_read = project.read(common.project_fixture.root)
proj_read['requirements']['requirements.txt'] = \
proj_read['requirements']['requirements.txt'] = (
proj_read['requirements']['requirements.txt'][:-1] + new_reqs
proj_read['requirements']['test-requirements.txt'] = \
proj_read['requirements']['test-requirements.txt'] + \
'anotherrandommodule\n'
)
proj_read['requirements']['test-requirements.txt'] = (
proj_read['requirements']['test-requirements.txt']
+ 'anotherrandommodule\n'
)
expected_outs = [
'lxml from requirements.txt not found in upper-constraints',
@@ -137,14 +156,18 @@ class CheckExistsTest(testtools.TestCase):
'anotherrandommodule from test-requirements.txt not found in '
'global-requirements',
'six must be <= 1.10.0 from upper-constraints and include the '
'upper-constraints version']
'upper-constraints version',
]
# Start capturing some output
mock_stdout = io.StringIO()
with mock.patch('openstack_requirements.project.read',
return_value=proj_read), \
mock.patch('sys.stdout', mock_stdout), \
mock.patch(read_req_path, remove_req_read_reqs_file):
with (
mock.patch(
'openstack_requirements.project.read', return_value=proj_read
),
mock.patch('sys.stdout', mock_stdout),
mock.patch(read_req_path, remove_req_read_reqs_file),
):
ret = check_exists.main([common.project_fixture.root])
self.assertEqual(ret, 1)
for expected in expected_outs:
@@ -152,45 +175,59 @@ class CheckExistsTest(testtools.TestCase):
@mock.patch(
'openstack_requirements.cmds.check_exists.read_requirements_file',
mock_read_requirements_file)
mock_read_requirements_file,
)
def test_project_req_bigger_then_uc(self):
self.useFixture(common.project_fixture)
# lets change the six requirement not include the u-c version
proj_read = project.read(common.project_fixture.root)
proj_read['requirements']['requirements.txt'] = \
proj_read['requirements']['requirements.txt'] = (
proj_read['requirements']['requirements.txt'][:-1] + '>1.10.0\n'
expected_out = ('six must be <= 1.10.0 from upper-constraints and '
'include the upper-constraints version')
)
expected_out = (
'six must be <= 1.10.0 from upper-constraints and '
'include the upper-constraints version'
)
# Start capturing some output
mock_stdout = io.StringIO()
with mock.patch('openstack_requirements.project.read',
return_value=proj_read), \
mock.patch('sys.stdout', mock_stdout):
with (
mock.patch(
'openstack_requirements.project.read', return_value=proj_read
),
mock.patch('sys.stdout', mock_stdout),
):
ret = check_exists.main([common.project_fixture.root])
self.assertEqual(ret, 1)
self.assertIn(expected_out, mock_stdout.getvalue())
@mock.patch(
'openstack_requirements.cmds.check_exists.read_requirements_file',
mock_read_requirements_file)
mock_read_requirements_file,
)
def test_project_req_not_include_uc_version(self):
self.useFixture(common.project_fixture)
# lets change the six requirement not include the u-c version
proj_read = project.read(common.project_fixture.root)
proj_read['requirements']['requirements.txt'] = \
proj_read['requirements']['requirements.txt'][:-1] + \
'<1.10.0,>1.10.0\n'
expected_out = ('six must be <= 1.10.0 from upper-constraints and '
'include the upper-constraints version')
proj_read['requirements']['requirements.txt'] = (
proj_read['requirements']['requirements.txt'][:-1]
+ '<1.10.0,>1.10.0\n'
)
expected_out = (
'six must be <= 1.10.0 from upper-constraints and '
'include the upper-constraints version'
)
# Start capturing some output
mock_stdout = io.StringIO()
with mock.patch('openstack_requirements.project.read',
return_value=proj_read), \
mock.patch('sys.stdout', mock_stdout):
with (
mock.patch(
'openstack_requirements.project.read', return_value=proj_read
),
mock.patch('sys.stdout', mock_stdout),
):
ret = check_exists.main([common.project_fixture.root])
self.assertEqual(ret, 1)
self.assertIn(expected_out, mock_stdout.getvalue())

View File

@@ -17,21 +17,18 @@ from openstack_requirements import requirement
class TestCheckCompatible(testtools.TestCase):
def test_non_requirement(self):
global_reqs = {}
good_constraints = requirement.parse("foo===1.2.5\n")
self.assertEqual(
[],
constraints.check_compatible(global_reqs, good_constraints)
[], constraints.check_compatible(global_reqs, good_constraints)
)
def test_compatible(self):
global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n")
good_constraints = requirement.parse("foo===1.2.5\n")
self.assertEqual(
[],
constraints.check_compatible(global_reqs, good_constraints)
[], constraints.check_compatible(global_reqs, good_constraints)
)
def test_constraint_below_range(self):
@@ -48,32 +45,29 @@ class TestCheckCompatible(testtools.TestCase):
class TestCheckFormat(testtools.TestCase):
def test_ok(self):
good_constraints = requirement.parse("foo===1.2.5\n")
self.assertEqual(
[],
list(constraints.check_format(good_constraints))
)
self.assertEqual([], list(constraints.check_format(good_constraints)))
def test_two_equals(self):
bad_constraints = requirement.parse("foo==1.2.5\n")
self.assertEqual(
1,
len(list(constraints.check_format(bad_constraints)))
1, len(list(constraints.check_format(bad_constraints)))
)
class TestDenylistCoverage(testtools.TestCase):
def test_constrained(self):
global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n")
good_constraints = requirement.parse("foo===1.2.5\nbar==2.1")
denylist = requirement.parse('flake8\nhacking')
self.assertEqual(
[],
list(constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'))
list(
constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'
)
),
)
def test_denylisted(self):
@@ -82,16 +76,22 @@ class TestDenylistCoverage(testtools.TestCase):
denylist = requirement.parse('flake8\nhacking\nbar')
self.assertEqual(
[],
list(constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'))
list(
constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'
)
),
)
def test_both(self):
global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n")
good_constraints = requirement.parse("foo===1.2.5\nbar>2.0")
denylist = requirement.parse('flake8\nhacking\nbar')
results = list(constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'))
results = list(
constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'
)
)
self.assertEqual(1, len(results))
self.assertIn("'bar' appears in both", results[0])
@@ -99,7 +99,10 @@ class TestDenylistCoverage(testtools.TestCase):
global_reqs = requirement.parse("foo>=1.2\nbar>2.0\n")
good_constraints = requirement.parse("foo===1.2.5\n")
denylist = requirement.parse('flake8\nhacking')
results = list(constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'))
results = list(
constraints.check_denylist_coverage(
global_reqs, good_constraints, denylist, 'test'
)
)
self.assertEqual(1, len(results))
self.assertIn("'bar' appears in global-requirements.txt", results[0])

View File

@@ -26,46 +26,55 @@ load_tests = testscenarios.load_tests_apply_scenarios
class SmokeTest(testtools.TestCase):
def test_make_url(self):
stdout = io.StringIO()
tmpdir = self.useFixture(fixtures.TempDir()).path
constraints_path = os.path.join(tmpdir, 'name.txt')
with open(constraints_path, 'wt') as f:
with open(constraints_path, 'w') as f:
f.write('bar===1\nfoo===1.0.2\nquux==3\n')
rv = edit.main(
[constraints_path, 'foo', '--', '-e /path/to/foo'], stdout)
[constraints_path, 'foo', '--', '-e /path/to/foo'], stdout
)
self.assertEqual(0, rv)
content = open(constraints_path, 'rt').read()
content = open(constraints_path).read()
self.assertEqual('-e /path/to/foo\nbar===1\nquux==3\n', content)
def test_edit_paths(self):
stdout = io.StringIO()
tmpdir = self.useFixture(fixtures.TempDir()).path
constraints_path = os.path.join(tmpdir, 'name.txt')
with open(constraints_path, 'wt') as f:
f.write(textwrap.dedent("""\
with open(constraints_path, 'w') as f:
f.write(
textwrap.dedent("""\
file:///path/to/foo#egg=foo
-e file:///path/to/bar#egg=bar
"""))
""")
)
rv = edit.main(
[constraints_path, 'foo', '--', '-e file:///path/to/foo#egg=foo'],
stdout)
stdout,
)
self.assertEqual(0, rv)
content = open(constraints_path, 'rt').read()
self.assertEqual(textwrap.dedent("""\
content = open(constraints_path).read()
self.assertEqual(
textwrap.dedent("""\
-e file:///path/to/foo#egg=foo
-e file:///path/to/bar#egg=bar
"""), content)
"""),
content,
)
class TestEdit(testtools.TestCase):
def test_add(self):
reqs = {}
res = edit.edit(reqs, 'foo', 'foo==1.2')
self.assertEqual(requirement.Requirements(
[requirement.Requirement('', '', '', '', 'foo==1.2')]), res)
self.assertEqual(
requirement.Requirements(
[requirement.Requirement('', '', '', '', 'foo==1.2')]
),
res,
)
def test_delete(self):
reqs = requirement.parse('foo==1.2\n')
@@ -75,18 +84,30 @@ class TestEdit(testtools.TestCase):
def test_replace(self):
reqs = requirement.parse('foo==1.2\n')
res = edit.edit(reqs, 'foo', 'foo==1.3')
self.assertEqual(requirement.Requirements(
[requirement.Requirement('', '', '', '', 'foo==1.3')]), res)
self.assertEqual(
requirement.Requirements(
[requirement.Requirement('', '', '', '', 'foo==1.3')]
),
res,
)
def test_replace_many(self):
reqs = requirement.parse('foo==1.2;p\nfoo==1.3;q')
res = edit.edit(reqs, 'foo', 'foo==1.3')
self.assertEqual(requirement.Requirements(
[requirement.Requirement('', '', '', '', 'foo==1.3')]), res)
self.assertEqual(
requirement.Requirements(
[requirement.Requirement('', '', '', '', 'foo==1.3')]
),
res,
)
def test_replace_non_canonical(self):
new_req = '-e file:///path#egg=foo_baz'
reqs = requirement.parse("foo-baz===1.0.2\n")
res = edit.edit(reqs, 'foo_baz', new_req)
self.assertEqual(res, requirement.Requirements(
[requirement.Requirement('', '', '', '', new_req)]))
self.assertEqual(
res,
requirement.Requirements(
[requirement.Requirement('', '', '', '', new_req)]
),
)

View File

@@ -21,18 +21,23 @@ from openstack_requirements.cmds import generate
class TestFreeze(testtools.TestCase):
def test_freeze_smoke(self):
# Use an arbitrary python, but make sure it has the venv standard lib.
versions = ['/usr/bin/python3.%(v)s' % dict(v=v) for v in range(5, 10)]
versions = [
'/usr/bin/python3.{v}'.format(**dict(v=v)) for v in range(5, 10)
]
found = [v for v in versions if os.path.exists(v)]
found_with_venv = []
for py in found:
output = str(subprocess.check_output(
[py,
'-c',
'import pkgutil; [print(x) for x in pkgutil.iter_modules()]']
))
output = str(
subprocess.check_output(
[
py,
'-c',
'import pkgutil; [print(x) for x in pkgutil.iter_modules()]',
]
)
)
# Needs both venv and ensurepip
if 'venv' in output and 'ensurepip' in output:
found_with_venv.append(py)
@@ -44,7 +49,7 @@ class TestFreeze(testtools.TestCase):
# break.
pyversion = found_with_venv[-1]
req = self.useFixture(fixtures.TempDir()).path + '/r.txt'
with open(req, 'wt') as output:
with open(req, 'w') as output:
output.write('fixtures==2.0.0')
frozen = generate._freeze(req, pyversion)
expected_version = pyversion[-3:]
@@ -56,12 +61,12 @@ class TestFreeze(testtools.TestCase):
class TestParse(testtools.TestCase):
def test_parse(self):
text = "linecache2==1.0.0\nargparse==1.2\n\n# fred\n"
parsed = generate._parse_freeze(text)
self.assertEqual(
[('linecache2', '1.0.0'), ('argparse', '1.2')], parsed)
[('linecache2', '1.0.0'), ('argparse', '1.2')], parsed
)
def test_editable_banned(self):
text = "-e git:..."
@@ -69,29 +74,33 @@ class TestParse(testtools.TestCase):
class TestCombine(testtools.TestCase):
def test_same_items(self):
fixtures = [('fixtures', '1.2.0')]
freeze_27 = ('2.7', fixtures)
freeze_34 = ('3.4', fixtures)
self.assertEqual(
['fixtures===1.2.0\n'],
list(generate._combine_freezes([freeze_27, freeze_34])))
list(generate._combine_freezes([freeze_27, freeze_34])),
)
def test_distinct_items(self):
freeze_27 = ('2.7', [('fixtures', '1.2.0')])
freeze_34 = ('3.4', [('fixtures', '1.2.0'), ('enum', '1.5.0')])
self.assertEqual(
["enum===1.5.0;python_version=='3.4'\n", 'fixtures===1.2.0\n'],
list(generate._combine_freezes([freeze_27, freeze_34])))
list(generate._combine_freezes([freeze_27, freeze_34])),
)
def test_different_versions(self):
freeze_27 = ('2.7', [('fixtures', '1.2.0')])
freeze_34 = ('3.4', [('fixtures', '1.5.0')])
self.assertEqual(
["fixtures===1.2.0;python_version<='2.7'\n",
"fixtures===1.5.0;python_version>='3.4'\n"],
list(generate._combine_freezes([freeze_27, freeze_34])))
[
"fixtures===1.2.0;python_version<='2.7'\n",
"fixtures===1.5.0;python_version>='3.4'\n",
],
list(generate._combine_freezes([freeze_27, freeze_34])),
)
def test_duplicate_pythons(self):
with testtools.ExpectedException(Exception):
@@ -103,31 +112,37 @@ class TestCombine(testtools.TestCase):
freeze_34 = ('3.4', [('fixtures', '1.2.0'), ('enum', '1.5.0')])
self.assertEqual(
["enum===1.5.0;python_version=='3.4'\n"],
list(generate._combine_freezes(
[freeze_27, freeze_34], denylist=denylist)))
list(
generate._combine_freezes(
[freeze_27, freeze_34], denylist=denylist
)
),
)
def test_denylist_with_safe_name(self):
denylist = ['flake8_docstrings']
freeze_27 = ('2.7', [('flake8-docstrings', '0.2.1.post1'),
('enum', '1.5.0')])
freeze_27 = (
'2.7',
[('flake8-docstrings', '0.2.1.post1'), ('enum', '1.5.0')],
)
self.assertEqual(
['enum===1.5.0\n'],
list(generate._combine_freezes(
[freeze_27], denylist=denylist)))
list(generate._combine_freezes([freeze_27], denylist=denylist)),
)
class Namespace(object):
class Namespace:
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class TestClone(testtools.TestCase):
def test_py34_clone_py35(self):
# Simulate an environment where we have python 3.4 data and need to
# clone that to python 3.5
options = Namespace(version_map={'3.4': set(['3.5']),
'3.5': set(['3.4'])})
options = Namespace(
version_map={'3.4': set(['3.5']), '3.5': set(['3.4'])}
)
freeze_27 = ('2.7', [('dnspython', '1.15.0')])
freeze_34 = ('3.4', [('dnspython3', '1.12.0')])
freeze_35 = ('3.5', [('dnspython3', '1.12.0')])
@@ -142,8 +157,9 @@ class TestClone(testtools.TestCase):
def test_py34_noclone_py35(self):
# Simulate an environment where we have python 3.4 and python 3.5 data
# so there is no need to clone.
options = Namespace(version_map={'3.4': set(['3.5']),
'3.5': set(['3.4'])})
options = Namespace(
version_map={'3.4': set(['3.5']), '3.5': set(['3.4'])}
)
freeze_27 = ('2.7', [('dnspython', '1.15.0')])
freeze_34 = ('3.4', [('dnspython3', '1.12.0')])
freeze_35 = ('3.5', [('other-pkg', '1.0.0')])
@@ -158,8 +174,9 @@ class TestClone(testtools.TestCase):
def test_py35_clone_py34(self):
# Simulate an environment where we have python 3.5 data and need to
# clone that to python 3.4
options = Namespace(version_map={'3.4': set(['3.5']),
'3.5': set(['3.4'])})
options = Namespace(
version_map={'3.4': set(['3.5']), '3.5': set(['3.4'])}
)
freeze_27 = ('2.7', [('dnspython', '1.15.0')])
freeze_34 = ('3.4', [('dnspython3', '1.12.0')])
freeze_35 = ('3.5', [('dnspython3', '1.12.0')])

View File

@@ -25,18 +25,18 @@ load_tests = testscenarios.load_tests_apply_scenarios
class TestReadProject(testtools.TestCase):
def test_pbr(self):
root = self.useFixture(common.pbr_fixture).root
proj = project.read(root)
self.expectThat(proj['root'], matchers.Equals(root))
setup_py = open(root + '/setup.py', 'rt').read()
setup_py = open(root + '/setup.py').read()
self.expectThat(proj['setup.py'], matchers.Equals(setup_py))
setup_cfg = open(root + '/setup.cfg', 'rt').read()
setup_cfg = open(root + '/setup.cfg').read()
self.expectThat(proj['setup.cfg'], matchers.Equals(setup_cfg))
self.expectThat(
proj['requirements'],
matchers.KeysEqual('requirements.txt', 'test-requirements.txt'))
matchers.KeysEqual('requirements.txt', 'test-requirements.txt'),
)
def test_no_setup_py(self):
root = self.useFixture(fixtures.TempDir()).path
@@ -47,24 +47,22 @@ class TestReadProject(testtools.TestCase):
class TestProjectExtras(testtools.TestCase):
def test_smoke(self):
proj = {'setup.cfg': textwrap.dedent(u"""
proj = {
'setup.cfg': textwrap.dedent("""
[extras]
1 =
foo
2 =
foo # fred
bar
""")}
expected = {
'1': '\nfoo',
'2': '\nfoo # fred\nbar'
""")
}
expected = {'1': '\nfoo', '2': '\nfoo # fred\nbar'}
self.assertEqual(expected, project.extras(proj))
def test_none(self):
proj = {'setup.cfg': u"[metadata]\n"}
proj = {'setup.cfg': "[metadata]\n"}
self.assertEqual({}, project.extras(proj))
def test_no_setup_cfg(self):

View File

@@ -22,114 +22,213 @@ load_tests = testscenarios.load_tests_apply_scenarios
class TestParseRequirement(testtools.TestCase):
dist_scenarios = [
('package', dict(
line='swift',
req=requirement.Requirement('swift', '', '', '', ''))),
('specifier', dict(
line='alembic>=0.4.1',
req=requirement.Requirement('alembic', '', '>=0.4.1', '', ''))),
('specifiers', dict(
line='alembic>=0.4.1,!=1.1.8',
req=requirement.Requirement('alembic', '', '!=1.1.8,>=0.4.1', '',
''))),
('comment-only', dict(
line='# foo',
req=requirement.Requirement('', '', '', '', '# foo'))),
('comment', dict(
line='Pint>=0.5 # BSD',
req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD'))),
('comment-with-semicolon', dict(
line='Pint>=0.5 # BSD;fred',
req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD;fred'))),
('case', dict(
line='Babel>=1.3',
req=requirement.Requirement('Babel', '', '>=1.3', '', ''))),
('markers', dict(
line="pywin32;sys_platform=='win32'",
req=requirement.Requirement('pywin32', '', '',
"sys_platform=='win32'", ''))),
('markers-with-comment', dict(
line="Sphinx<=1.2; python_version=='2.7'# Sadface",
req=requirement.Requirement('Sphinx', '', '<=1.2',
"python_version=='2.7'", '# Sadface')))]
(
'package',
dict(
line='swift',
req=requirement.Requirement('swift', '', '', '', ''),
),
),
(
'specifier',
dict(
line='alembic>=0.4.1',
req=requirement.Requirement('alembic', '', '>=0.4.1', '', ''),
),
),
(
'specifiers',
dict(
line='alembic>=0.4.1,!=1.1.8',
req=requirement.Requirement(
'alembic', '', '!=1.1.8,>=0.4.1', '', ''
),
),
),
(
'comment-only',
dict(
line='# foo',
req=requirement.Requirement('', '', '', '', '# foo'),
),
),
(
'comment',
dict(
line='Pint>=0.5 # BSD',
req=requirement.Requirement('Pint', '', '>=0.5', '', '# BSD'),
),
),
(
'comment-with-semicolon',
dict(
line='Pint>=0.5 # BSD;fred',
req=requirement.Requirement(
'Pint', '', '>=0.5', '', '# BSD;fred'
),
),
),
(
'case',
dict(
line='Babel>=1.3',
req=requirement.Requirement('Babel', '', '>=1.3', '', ''),
),
),
(
'markers',
dict(
line="pywin32;sys_platform=='win32'",
req=requirement.Requirement(
'pywin32', '', '', "sys_platform=='win32'", ''
),
),
),
(
'markers-with-comment',
dict(
line="Sphinx<=1.2; python_version=='2.7'# Sadface",
req=requirement.Requirement(
'Sphinx', '', '<=1.2', "python_version=='2.7'", '# Sadface'
),
),
),
]
url_scenarios = [
('url', dict(
line='file:///path/to/thing#egg=thing',
req=requirement.Requirement('thing', 'file:///path/to/thing', '', '',
''),
permit_urls=True)),
('oslo-url', dict(
line='file:///path/to/oslo.thing#egg=oslo.thing',
req=requirement.Requirement('oslo.thing',
'file:///path/to/oslo.thing', '', '', ''),
permit_urls=True)),
('url-comment', dict(
line='file:///path/to/thing#egg=thing # http://altpath#egg=boo',
req=requirement.Requirement('thing', 'file:///path/to/thing', '', '',
'# http://altpath#egg=boo'),
permit_urls=True)),
('editable', dict(
line='-e file:///path/to/bar#egg=bar',
req=requirement.Requirement('bar', '-e file:///path/to/bar', '', '',
''),
permit_urls=True)),
('editable_vcs_git', dict(
line='-e git+http://github.com/path/to/oslo.bar#egg=oslo.bar',
req=requirement.Requirement('oslo.bar',
'-e git+http://github.com'
'/path/to/oslo.bar', '', '', ''),
permit_urls=True)),
('editable_vcs_git_ssh', dict(
line='-e git+ssh://github.com/path/to/oslo.bar#egg=oslo.bar',
req=requirement.Requirement('oslo.bar',
'-e git+ssh://github.com'
'/path/to/oslo.bar', '', '', ''),
permit_urls=True)),
(
'url',
dict(
line='file:///path/to/thing#egg=thing',
req=requirement.Requirement(
'thing', 'file:///path/to/thing', '', '', ''
),
permit_urls=True,
),
),
(
'oslo-url',
dict(
line='file:///path/to/oslo.thing#egg=oslo.thing',
req=requirement.Requirement(
'oslo.thing', 'file:///path/to/oslo.thing', '', '', ''
),
permit_urls=True,
),
),
(
'url-comment',
dict(
line='file:///path/to/thing#egg=thing # http://altpath#egg=boo',
req=requirement.Requirement(
'thing',
'file:///path/to/thing',
'',
'',
'# http://altpath#egg=boo',
),
permit_urls=True,
),
),
(
'editable',
dict(
line='-e file:///path/to/bar#egg=bar',
req=requirement.Requirement(
'bar', '-e file:///path/to/bar', '', '', ''
),
permit_urls=True,
),
),
(
'editable_vcs_git',
dict(
line='-e git+http://github.com/path/to/oslo.bar#egg=oslo.bar',
req=requirement.Requirement(
'oslo.bar',
'-e git+http://github.com/path/to/oslo.bar',
'',
'',
'',
),
permit_urls=True,
),
),
(
'editable_vcs_git_ssh',
dict(
line='-e git+ssh://github.com/path/to/oslo.bar#egg=oslo.bar',
req=requirement.Requirement(
'oslo.bar',
'-e git+ssh://github.com/path/to/oslo.bar',
'',
'',
'',
),
permit_urls=True,
),
),
]
scenarios = dist_scenarios + url_scenarios
def test_parse(self):
parsed = requirement.parse_line(
self.line, permit_urls=getattr(self, 'permit_urls', False))
self.line, permit_urls=getattr(self, 'permit_urls', False)
)
self.assertEqual(self.req, parsed)
class TestParseRequirementFailures(testtools.TestCase):
scenarios = [
('url', dict(line='http://tarballs.openstack.org/oslo.config/'
'oslo.config-1.2.0a3.tar.gz#egg=oslo.config')),
(
'url',
dict(
line='http://tarballs.openstack.org/oslo.config/'
'oslo.config-1.2.0a3.tar.gz#egg=oslo.config'
),
),
('-e', dict(line='-e git+https://foo.com#egg=foo')),
('-f', dict(line='-f http://tarballs.openstack.org/'))]
('-f', dict(line='-f http://tarballs.openstack.org/')),
]
def test_does_not_parse(self):
self.assertRaises(ValueError, requirement.parse_line, self.line)
class TestToContent(testtools.TestCase):
def test_smoke(self):
reqs = requirement.to_content(requirement.Requirements(
[requirement.Requirement(
'foo', '', '<=1', "python_version=='2.7'", '# BSD')]),
marker_sep='!')
self.assertEqual(
"foo<=1!python_version=='2.7' # BSD\n",
reqs)
reqs = requirement.to_content(
requirement.Requirements(
[
requirement.Requirement(
'foo', '', '<=1', "python_version=='2.7'", '# BSD'
)
]
),
marker_sep='!',
)
self.assertEqual("foo<=1!python_version=='2.7' # BSD\n", reqs)
def test_location(self):
reqs = requirement.to_content(requirement.Requirements(
[requirement.Requirement(
'foo', 'file://foo', '', "python_version=='2.7'", '# BSD')]))
reqs = requirement.to_content(
requirement.Requirements(
[
requirement.Requirement(
'foo',
'file://foo',
'',
"python_version=='2.7'",
'# BSD',
)
]
)
)
self.assertEqual(
"file://foo#egg=foo;python_version=='2.7' # BSD\n",
reqs)
"file://foo#egg=foo;python_version=='2.7' # BSD\n", reqs
)
class TestToReqs(testtools.TestCase):
def test_editable(self):
line = '-e file:///foo#egg=foo'
reqs = list(requirement.to_reqs(line, permit_urls=True))
@@ -144,7 +243,8 @@ class TestToReqs(testtools.TestCase):
def test_not_urls(self):
self.assertRaises(
ValueError, list, requirement.to_reqs('file:///foo#egg=foo'))
ValueError, list, requirement.to_reqs('file:///foo#egg=foo')
)
def test_multiline(self):
content = textwrap.dedent("""\
@@ -170,30 +270,31 @@ class TestToReqs(testtools.TestCase):
set(reqs.keys()),
)
self.assertEqual(reqs['oslo-config'][0][0].extras, frozenset(()))
self.assertEqual(reqs['oslo-concurrency'][0][0].extras,
frozenset(('fixtures',)))
self.assertEqual(reqs['oslo-db'][0][0].extras,
frozenset(('fixtures', 'mysql')))
self.assertCountEqual(reqs,
['oslo-config', 'oslo-concurrency', 'oslo-db'])
self.assertEqual(
reqs['oslo-concurrency'][0][0].extras, frozenset(('fixtures',))
)
self.assertEqual(
reqs['oslo-db'][0][0].extras, frozenset(('fixtures', 'mysql'))
)
self.assertCountEqual(
reqs, ['oslo-config', 'oslo-concurrency', 'oslo-db']
)
class TestCanonicalName(testtools.TestCase):
def test_underscores(self):
self.assertEqual('foo-bar', requirement.canonical_name('Foo_bar'))
class TestToDict(testtools.TestCase):
def test_canonicalises(self):
req = requirement.Requirement('Foo_bar', '', '', '', '')
self.assertEqual(
{'foo-bar': [(req, '')]}, requirement.to_dict([(req, '')]))
{'foo-bar': [(req, '')]}, requirement.to_dict([(req, '')])
)
class TestReqPolicy(testtools.TestCase):
def test_requirements_policy_pass(self):
content = textwrap.dedent("""\
cffi!=1.1.2
@@ -209,7 +310,10 @@ class TestReqPolicy(testtools.TestCase):
other>=1,>=2,!=1.1.0
""")
reqs = requirement.parse(content)
self.assertEqual([
'Requirement cffi should not include a >= specifier',
'Requirement other should not include a >= specifier'],
sorted([x for x in requirement.check_reqs_bounds_policy(reqs)]))
self.assertEqual(
[
'Requirement cffi should not include a >= specifier',
'Requirement other should not include a >= specifier',
],
sorted([x for x in requirement.check_reqs_bounds_policy(reqs)]),
)

View File

@@ -2,6 +2,6 @@ from openstack_requirements import requirement
def read_requirements_file(filename):
with open(filename, 'rt') as f:
with open(filename) as f:
body = f.read()
return requirement.parse(body)

View File

@@ -37,13 +37,13 @@ def run_command(cmd):
print(cmd)
cmd_list = shlex.split(str(cmd))
kwargs = {}
if sys.version_info >= (3, ):
kwargs = {
'encoding': 'utf-8',
'errors': 'surrogateescape',
}
p = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, **kwargs)
kwargs = {
'encoding': 'utf-8',
'errors': 'surrogateescape',
}
p = subprocess.Popen(
cmd_list, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs
)
(out, err) = p.communicate()
if p.returncode != 0:
raise SystemError(err)
@@ -51,7 +51,8 @@ def run_command(cmd):
_DEFAULT_REQS_DIR = os.path.expanduser(
'~/src/opendev.org/openstack/requirements')
'~/src/opendev.org/openstack/requirements'
)
def grab_args():
@@ -59,14 +60,19 @@ def grab_args():
parser = argparse.ArgumentParser(
description="Check if project requirements have changed"
)
parser.add_argument('--local', action='store_true',
help='check local changes (not yet in git)')
parser.add_argument(
'--local',
action='store_true',
help='check local changes (not yet in git)',
)
parser.add_argument('src_dir', help='directory to process')
parser.add_argument('branch', nargs='?', default='master',
help='target branch for diffs')
parser.add_argument(
'branch', nargs='?', default='master', help='target branch for diffs'
)
parser.add_argument('--zc', help='what zuul cloner to call')
parser.add_argument('--reqs', help='use a specified requirements tree',
default=None)
parser.add_argument(
'--reqs', help='use a specified requirements tree', default=None
)
return parser.parse_args()
@@ -91,31 +97,29 @@ def main():
if args.local:
print('selecting default requirements directory for local mode')
reqdir = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.abspath(sys.argv[0]))))
os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0])))
)
else:
print('selecting default requirements directory for normal mode')
reqdir = _DEFAULT_REQS_DIR
print('Branch: {}'.format(branch))
print('Source: {}'.format(args.src_dir))
print('Requirements: {}'.format(reqdir))
print(f'Branch: {branch}')
print(f'Source: {args.src_dir}')
print(f'Requirements: {reqdir}')
os.chdir(args.src_dir)
sha, _ = run_command('git log -n 1 --format=%H')
print('Patch under test: {}'.format(sha))
print(f'Patch under test: {sha}')
# build a list of requirements from the global list in the
# openstack/requirements project so we can match them to the changes
with tempdir():
with open(reqdir + '/global-requirements.txt', 'rt') as f:
with open(reqdir + '/global-requirements.txt') as f:
global_reqs = check.get_global_reqs(f.read())
denylist = requirement.parse(
open(reqdir + '/denylist.txt', 'rt').read())
denylist = requirement.parse(open(reqdir + '/denylist.txt').read())
backports_file = reqdir + '/backports.txt'
if os.path.exists(backports_file):
backports = requirement.parse(open(backports_file, 'rt').read())
backports = requirement.parse(open(backports_file).read())
else:
backports = {}
cwd = os.getcwd()

View File

@@ -43,3 +43,17 @@ check-constraints = "openstack_requirements.cmds.check_exists:main"
packages = [
"openstack_requirements"
]
[tool.ruff]
line-length = 79
[tool.ruff.format]
quote-style = "preserve"
docstring-code-format = true
[tool.ruff.lint]
select = ["E4", "E7", "E9", "F", "U"]
# [tool.ruff.lint.per-file-ignores]
# "openstack/tests/*" = ["S"]
# "examples/*" = ["S"]

View File

@@ -16,6 +16,4 @@
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
setuptools.setup(setup_requires=['pbr>=2.0.0'], pbr=True)

View File

@@ -78,13 +78,13 @@ def pin(line, new_cap):
end = parts[1]
# cap to new max version
if end:
new_end = "<=%s #%s" % (new_cap, end)
new_end = f"<={new_cap} #{end}"
else:
new_end = "<=%s" % new_cap
new_end = f"<={new_cap}"
if use_comma is True:
return "%s,%s" % (parts[0].strip(), new_end)
return f"{parts[0].strip()},{new_end}"
else:
return "%s%s" % (parts[0].strip(), new_end)
return f"{parts[0].strip()}{new_end}"
def split(line):
@@ -119,12 +119,13 @@ def freeze(lines):
def main():
parser = argparse.ArgumentParser(
description="Take the output of "
"'pip freeze' and use the installed versions to "
"caps requirements.")
"'pip freeze' and use the installed versions to "
"caps requirements."
)
parser.add_argument('requirements', help='requirements file input')
parser.add_argument(
'freeze',
help='output of pip freeze, taken from a full tempest job')
'freeze', help='output of pip freeze, taken from a full tempest job'
)
args = parser.parse_args()
with open(args.requirements) as f:
requirements = [line.strip() for line in f.readlines()]

View File

@@ -19,7 +19,7 @@ def main():
try:
importlib.import_module(module)
except ImportError as err:
print('Imports for %s failed:\n\t%s' % (script, err))
print(f'Imports for {script} failed:\n\t{err}')
errors += 1
return 1 if errors else 0

View File

@@ -56,9 +56,7 @@ def sort() -> None:
deps.append((line, comment or None))
comment = ''
section_deps[section] = sorted(
deps, key=lambda x: x[0].lower()
)
section_deps[section] = sorted(deps, key=lambda x: x[0].lower())
with open(GLOBAL_REQS, 'w') as fh:
for i, section in enumerate(section_deps):

View File

@@ -41,7 +41,7 @@ import urllib.request as urlreq
import packaging.requirements
class Release(object):
class Release:
name = ""
version = ""
filename = ""
@@ -54,7 +54,7 @@ class Release(object):
self.released = released
def __repr__(self):
return "<Released %s %s %s>" % (self.name, self.version, self.released)
return f"<Released {self.name} {self.version} {self.released}>"
def _parse_pypi_released(datestr):
@@ -79,7 +79,6 @@ def get_requirements():
def get_releases_for_package(name, since):
"""Get the release history from pypi
Use the json API to get the release history from pypi. The
@@ -94,7 +93,7 @@ def get_releases_for_package(name, since):
our purposes.
"""
f = urlreq.urlopen("http://pypi.org/project/%s/json" % name)
f = urlreq.urlopen(f"http://pypi.org/project/{name}/json")
jsondata = f.read()
data = json.loads(jsondata)
releases = []
@@ -106,12 +105,7 @@ def get_releases_for_package(name, since):
if when < since:
continue
releases.append(
Release(
name,
relname,
rel['filename'],
when))
releases.append(Release(name, relname, rel['filename'], when))
break
return releases
@@ -121,9 +115,9 @@ def get_releases_since(reqs, since):
for req in reqs:
all_releases.extend(get_releases_for_package(req, since))
# return these in a sorted order from newest to oldest
sorted_releases = sorted(all_releases,
key=lambda x: x.released,
reverse=True)
sorted_releases = sorted(
all_releases, key=lambda x: x.released, reverse=True
)
return sorted_releases
@@ -131,17 +125,23 @@ def parse_args():
parser = argparse.ArgumentParser(
description=(
'List recent releases of items in global requirements '
'to look for possible breakage'))
parser.add_argument('-s', '--since', type=int,
default=14,
help='look back ``since`` days (default 14)')
'to look for possible breakage'
)
)
parser.add_argument(
'-s',
'--since',
type=int,
default=14,
help='look back ``since`` days (default 14)',
)
return parser.parse_args()
def main():
opts = parse_args()
since = datetime.datetime.today() - datetime.timedelta(days=opts.since)
print("Looking for requirements releases since %s" % since)
print(f"Looking for requirements releases since {since}")
reqs = get_requirements()
# additional sensitive requirements
reqs.append('tox')

View File

@@ -71,11 +71,14 @@ commands = validate-projects {toxinidir}/projects.txt
description = Perform linting
skip_install = true
deps =
ruff~=0.13.0 # MIT
hacking~=7.0 # Apache-2.0
bashate~=2.1 # Apache-2.0
allowlist_externals =
bash
commands =
ruff check --fix --unsafe-fixes
ruff format
flake8
bash -c "find {toxinidir}/tools \
-type f \
@@ -120,4 +123,6 @@ deps = Babel
commands = {toxinidir}/tools/babel-test.sh
[flake8]
# We only enable the hacking (H) checks
select = H
exclude = .venv,.git,.tox,dist,doc,*egg,build