Apply ruff

'ruff check --fix' and 'ruff format'

Change-Id: Iaf8771c00ade50cac196a0c8e898370cf5e3c052
Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
Stephen Finucane
2025-06-27 11:25:04 +01:00
parent 06252b1fe7
commit a47c39e281
6 changed files with 49 additions and 33 deletions

View File

@@ -26,7 +26,7 @@ import requests
HTTP_LOCATION = 'https://service-types.openstack.org/service-types.json'
SPECS_BASE = 'http://specs.openstack.org/openstack'
OUTDIR = 'doc/build/html'
HEADER = '''<div class="section" id="service-types-authority">
HEADER = """<div class="section" id="service-types-authority">
<h1>OpenStack Service Types Authority Data</h1>
<p>For more information on the files, see:
<a href='{specs_base}/service-types-authority'>
@@ -43,7 +43,7 @@ HEADER = '''<div class="section" id="service-types-authority">
</a></p>
</div>
<h2>Latest file is <a href="./{latest_file}">{latest_file}</a></h2>
'''
"""
def is_data_equal(old, new):
@@ -53,8 +53,9 @@ def is_data_equal(old, new):
# normal equality on the service dicts and reverse and forward mappings.
if old.keys() != new.keys():
return False
if (sorted(old['services'], key=operator.itemgetter('service_type')) !=
sorted(new['services'], key=operator.itemgetter('service_type'))):
if sorted(
old['services'], key=operator.itemgetter('service_type')
) != sorted(new['services'], key=operator.itemgetter('service_type')):
return False
if old['reverse'] != new['reverse']:
return False
@@ -64,15 +65,17 @@ def is_data_equal(old, new):
def should_publish_data():
current_contents = json.load(open('service-types.json', 'r'))
current_contents = json.load(open('service-types.json'))
try:
response = requests.get(HTTP_LOCATION)
response.raise_for_status()
existing_contents = response.json()
except (requests.HTTPError, requests.ConnectionError) as e:
print("Failed to fetch current service-types.json. Assuming data"
" needs to be published. Error: {error}".format(error=str(e)))
print(
'Failed to fetch current service-types.json. Assuming data'
f' needs to be published. Error: {str(e)}'
)
return (True, current_contents['version'])
# If our contents are not the same as published, we need to publish
@@ -92,9 +95,7 @@ def main():
if not os.path.exists(OUTDIR):
os.makedirs(OUTDIR)
elif not os.path.isdir(OUTDIR):
print(
"{outdir} exists but is not a directory. Aborting!".format(
outdir=OUTDIR))
print(f'{OUTDIR} exists but is not a directory. Aborting!')
return 1
# It's fine to always copy the json schema
@@ -107,16 +108,19 @@ def main():
if should_publish:
to_copy += glob.glob('service-types.json*')
else:
print("Data in existing file matches {version} data."
" Not publishing".format(version=latest_version))
print(
f'Data in existing file matches {latest_version} data.'
' Not publishing'
)
for filename in to_copy:
shutil.copyfile(filename, os.path.join(OUTDIR, filename))
latest_file = 'service-types.json.{version}'.format(version=latest_version)
with open('{outdir}/HEADER.html'.format(outdir=OUTDIR), 'w') as header:
header.write(HEADER.format(latest_file=latest_file,
specs_base=SPECS_BASE))
latest_file = f'service-types.json.{latest_version}'
with open(f'{OUTDIR}/HEADER.html', 'w') as header:
header.write(
HEADER.format(latest_file=latest_file, specs_base=SPECS_BASE)
)
return 0

8
ruff.toml Normal file
View File

@@ -0,0 +1,8 @@
line-length = 79
target-version = "py310"
[lint]
select = ["E4", "E7", "E9", "F", "U"]
[format]
quote-style = "single"

View File

@@ -15,7 +15,4 @@
import setuptools
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
py_modules=[],
pbr=True)
setuptools.setup(setup_requires=['pbr>=2.0.0'], py_modules=[], pbr=True)

View File

@@ -20,8 +20,10 @@ commands =
[testenv:pep8]
deps =
ruff
hacking
commands =
ruff check
flake8
[testenv:docs]

View File

@@ -38,11 +38,12 @@ def create_local_registry():
changes tricky. Instead of fetching from the already published spec,
use the local one so that changes can be self-gating.
"""
def local_retrieve(uri: str):
if uri.startswith('https://specs.openstack.org'):
# The URI arrives with fragment removed. We assume no querystring.
filename = uri.split('/')[-1]
with open(filename, 'r') as f:
with open(filename) as f:
return referencing.Resource.from_contents(json.load(f))
# We shouldn't have any external URIs. Scream bloody murder if someone
# tries.
@@ -79,7 +80,7 @@ def main():
)
args = parser.parse_args()
mapping = yaml.safe_load(open('service-types.yaml', 'r'))
mapping = yaml.safe_load(open('service-types.yaml'))
# we are using a TZ-naive timestamp for legacy reasons, but we should
# probably revisit this as TZ-naive timestamps are a menace
@@ -88,8 +89,11 @@ def main():
now = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
mapping['version'] = now.isoformat()
mapping['sha'] = subprocess.check_output(
['git', 'rev-parse', 'HEAD']).decode('utf-8').strip()
mapping['sha'] = (
subprocess.check_output(['git', 'rev-parse', 'HEAD'])
.decode('utf-8')
.strip()
)
mapping['forward'] = {}
mapping['reverse'] = {}
mapping['primary_service_by_project'] = {}
@@ -111,7 +115,8 @@ def main():
if not service.get('secondary', False):
mapping['primary_service_by_project'][name] = service
project_types = mapping['service_types_by_project'].get(
name, [])
name, []
)
if service_type not in project_types:
project_types.append(service_type)
mapping['service_types_by_project'][name] = project_types
@@ -119,7 +124,7 @@ def main():
if not service.get('api_reference'):
service['api_reference'] = API_REF_FMT.format(service=service_type)
schema = json.load(open('published-schema.json', 'r'))
schema = json.load(open('published-schema.json'))
registry = create_local_registry()
valid = validate.validate_all(schema, mapping, registry=registry)
@@ -129,7 +134,8 @@ def main():
output.replace(' \n', '\n')
unversioned_filename = 'service-types.json'
versioned_filename = 'service-types.json.{version}'.format(
version=mapping['version'])
version=mapping['version']
)
for filename in (unversioned_filename, versioned_filename):
open(filename, 'w').write(output)

View File

@@ -36,22 +36,21 @@ def validate_unique_tokens(data):
projects = []
for service in data['services']:
service_types.append(service['service_type'])
if "aliases" in service:
if 'aliases' in service:
for alias in service['aliases']:
if alias in aliases:
yield "Alias '{alias}' appears twice".format(alias=alias)
yield f"Alias '{alias}' appears twice"
aliases.append(alias)
if service.get('secondary', False):
continue
if service['project'] in projects:
yield "'{service}' is duplicate service from '{project}'".format(
service=service['service_type'],
project=service['project'])
service=service['service_type'], project=service['project']
)
projects.append(service['project'])
for alias in aliases:
if alias in service_types:
yield "Alias '{alias}' conflicts with a service_type".format(
alias=alias)
yield f"Alias '{alias}' conflicts with a service_type"
def validate_all(schema, data, registry):