feat: Support mypy check

1. support mypy check, tox -e mypy or tox -e pep8
2. fix error of mypy check

Change-Id: I41b0013d271f3c7d3a28e1ea6dd0b083893d8983
This commit is contained in:
Boxiang Zhu 2022-07-28 16:09:31 +08:00
parent 1591f34a25
commit 88ef320dc6
30 changed files with 211 additions and 198 deletions

View File

@ -72,6 +72,7 @@ docs/_build/
tmp/
test_results.html
nginx.conf
**/mypy-report/
# MAC OS
.DS_Store

1
.gitignore vendored
View File

@ -75,6 +75,7 @@ tmp/
test_results.html
skyline-console-*
nginx.conf
mypy-report/
# MAC OS
.DS_Store

View File

@ -1,39 +1,47 @@
# https://mypy.readthedocs.io/en/stable/config_file.html
[mypy]
incremental = false
cache_dir = /dev/null
show_error_codes = true
# Import discovery
ignore_missing_imports = true
follow_imports = normal
# Platform configuration
# Disallow dynamic typing
# Untyped definitions and calls
check_untyped_defs = true
# None and Optional handling
no_implicit_optional = true
strict_optional = true
# Configuring warnings
show_error_context = true
show_column_numbers = true
warn_unused_ignores = true
; check_untyped_defs = true
; disallow_incomplete_defs = true
; disallow_untyped_calls = true
; disallow_untyped_decorators = true
; disallow_untyped_defs = true
; ignore_missing_imports = true
; incremental = false
; no_implicit_optional = true
; pretty = true
; raise_exceptions = true
; strict_equality = true
; warn_incomplete_stub = true
; warn_redundant_casts = true
; warn_return_any = true
; warn_unreachable = true
; warn_unused_configs = true
; warn_unused_ignores = true
; allow_redefinition = true
; implicit_reexport = true
# Suppressing errors
; # NOTE: Maybe need remove
; disallow_subclassing_any = true
; disallow_any_decorated = true
; disallow_any_explicit = false
; disallow_any_expr = false
; disallow_any_generics = true
; disallow_any_unimported = false
# Miscellaneous strictness flags
[pydantic-mypy]
init_forbid_extra = true
init_typed = true
warn_required_dynamic_aliases = true
warn_untyped_fields = true
# Configuring error messages
show_error_codes = true
pretty = true
color_output = true
error_summary = true
show_absolute_path = false
# Incremental mode
incremental = true
cache_dir = .mypy_cache
sqlite_cache = false
cache_fine_grained = false
skip_version_check = false
skip_cache_mtime_checks = false
# Advanced options
# Report generation
html_report = mypy-report
# Miscellaneous

View File

@ -29,7 +29,7 @@ from skyline_apiserver.types import constants
class TokenCookie(APIKeyCookie):
async def __call__(self, request: Request) -> Optional[str]:
async def __call__(self, request: Request) -> str:
api_key = request.cookies.get(self.model.name)
if not api_key:
raise HTTPException(
@ -39,7 +39,7 @@ class TokenCookie(APIKeyCookie):
return api_key
async def getJWTPayload(request: Request) -> (str):
async def getJWTPayload(request: Request) -> Optional[str]:
token = request.cookies.get(CONF.default.session_name)
return token

View File

@ -47,7 +47,7 @@ async def list_keystone_endpoints() -> List[schemas.KeystoneEndpoints]:
tasks = [asyncio.create_task(get_endpoints(region)) for region in regions]
endpoints = await asyncio.gather(*tasks)
result = [
{"region_name": region, "url": endpoint.get("keystone")}
schemas.KeystoneEndpoints(**{"region_name": region, "url": endpoint.get("keystone")})
for region, endpoint in zip(regions, endpoints)
]
return result

View File

@ -18,10 +18,13 @@ import asyncio
import math
from asyncio import gather
from functools import reduce
from typing import List
from typing import Any, Dict, List
from cinderclient.v3.volumes import Volume as CinderVolume
from dateutil import parser
from fastapi import APIRouter, Depends, Header, Query, status
from glanceclient.v2.schemas import SchemaBasedModel as GlanceModel
from novaclient.v2.servers import Server as NovaServer
from skyline_apiserver import schemas
from skyline_apiserver.api import deps
@ -67,10 +70,10 @@ async def list_servers(
regex=constants.INBOUND_HEADER_REGEX,
),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None,
sort_dirs: schemas.SortDir = None,
marker: str = Query(None),
sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.ServerSortKey] = Query(None),
all_projects: bool = None,
all_projects: bool = Query(None),
project_id: str = Query(
None,
description="Only works when the all_projects filter is also specified.",
@ -79,10 +82,10 @@ async def list_servers(
None,
description="Only works when the all_projects filter is also specified.",
),
name: str = None,
status: schemas.ServerStatus = None,
name: str = Query(None),
status: schemas.ServerStatus = Query(None),
host: str = Query(None, description="It will be ignored for non-admin user."),
flavor_id: str = None,
flavor_id: str = Query(None),
uuid: str = Query(None, description="UUID of server."),
) -> schemas.ServersResponse:
"""Extension List Servers.
@ -143,14 +146,14 @@ async def list_servers(
search_opts={"name": project_name},
)
if not filter_projects:
return {"servers": []}
return schemas.ServersResponse(**{"servers": []})
else:
# Projects will not have the same name or same id in the same domain
filter_project = filter_projects[0]
# When we both supply the project_id and project_name filter, if the project's id does
# not equal the project_id, just return [].
if project_id and filter_project.id != project_id:
return {"servers": []}
return schemas.ServersResponse(**{"servers": []})
project_id = filter_project.id
search_opts = {
@ -169,8 +172,8 @@ async def list_servers(
search_opts=search_opts,
marker=marker,
limit=limit,
sort_keys=sort_keys,
sort_dirs=[sort_dirs] if sort_dirs else None,
sort_keys=[sort_key.value for sort_key in sort_keys] if sort_keys else None,
sort_dirs=[sort_dirs.value] if sort_dirs else None,
)
result = []
@ -228,8 +231,12 @@ async def list_servers(
projects = task_result[0] if task_result[0] else []
proj_mappings = {project.id: project.name for project in projects}
total_image_tasks = math.ceil(len(image_ids) / STEP)
images = reduce(lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], [])
volumes = reduce(lambda x, y: x + y, task_result[1 + total_image_tasks :], [])
images: List[GlanceModel] = reduce(
lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], []
)
volumes: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 + total_image_tasks :], []
)
image_mappings = {
image.id: {"name": image.name, "image_os_distro": getattr(image, "os_distro", None)}
for image in list(images)
@ -267,7 +274,7 @@ async def list_servers(
else:
values = {"image": None, "image_name": None, "image_os_distro": None}
server.update(values)
return {"servers": result}
return schemas.ServersResponse(**{"servers": result})
@router.get(
@ -298,10 +305,10 @@ async def list_recycle_servers(
regex=constants.INBOUND_HEADER_REGEX,
),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None,
sort_dirs: schemas.SortDir = None,
marker: str = Query(None),
sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.RecycleServerSortKey] = Query(None),
all_projects: bool = None,
all_projects: bool = Query(None),
project_id: str = Query(
None,
description="Only works when the all_projects filter is also specified.",
@ -310,7 +317,7 @@ async def list_recycle_servers(
None,
description="Only works when the all_projects filter is also specified.",
),
name: str = None,
name: str = Query(None),
uuid: str = Query(None, description="UUID of recycle server."),
) -> schemas.RecycleServersResponse:
"""Extension List Recycle Servers.
@ -366,14 +373,14 @@ async def list_recycle_servers(
search_opts={"name": project_name},
)
if not filter_projects:
return {"recycle_servers": []}
return schemas.RecycleServersResponse(**{"recycle_servers": []})
else:
# Projects will not have the same name or same id in the same domain
filter_project = filter_projects[0]
# When we both supply the project_id and project_name filter, if the project's id does
# not equal the project_id, just return [].
if project_id and filter_project.id != project_id:
return {"recycle_servers": []}
return schemas.RecycleServersResponse(**{"recycle_servers": []})
project_id = filter_project.id
search_opts = {
@ -393,8 +400,8 @@ async def list_recycle_servers(
search_opts=search_opts,
marker=marker,
limit=limit,
sort_keys=sort_keys,
sort_dirs=[sort_dirs] if sort_dirs else None,
sort_keys=[sort_key.value for sort_key in sort_keys] if sort_keys else None,
sort_dirs=[sort_dirs.value] if sort_dirs else None,
)
result = []
@ -452,8 +459,12 @@ async def list_recycle_servers(
projects = task_result[0] if task_result[0] else []
proj_mappings = {project.id: project.name for project in projects}
total_image_tasks = math.ceil(len(image_ids) / STEP)
images = reduce(lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], [])
volumes = reduce(lambda x, y: x + y, task_result[1 + total_image_tasks :], [])
images: List[GlanceModel] = reduce(
lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], []
)
volumes: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 + total_image_tasks :], []
)
image_mappings = {
image.id: {"name": image.name, "image_os_distro": getattr(image, "os_distro", None)}
for image in list(images)
@ -496,7 +507,7 @@ async def list_recycle_servers(
else:
values = {"image": None, "image_name": None, "image_os_distro": None}
recycle_server.update(values)
return {"recycle_servers": result}
return schemas.RecycleServersResponse(**{"recycle_servers": result})
@router.get(
@ -520,15 +531,15 @@ async def list_volumes(
regex=constants.INBOUND_HEADER_REGEX,
),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None,
sort_dirs: schemas.SortDir = None,
marker: str = Query(None),
sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.VolumeSortKey] = Query(None),
all_projects: bool = None,
project_id: str = None,
name: str = None,
multiattach: bool = None,
status: schemas.VolumeStatus = None,
bootable: bool = None,
all_projects: bool = Query(None),
project_id: str = Query(None),
name: str = Query(None),
multiattach: bool = Query(None),
status: schemas.VolumeStatus = Query(None),
bootable: bool = Query(None),
uuid: List[str] = Query(None, description="UUID of volume."),
) -> schemas.VolumesResponse:
"""Extension List Volumes.
@ -683,7 +694,7 @@ async def list_volumes(
task_result = await gather(*tasks)
projects = [] if not task_result[0] else task_result[0]
servers = reduce(lambda x, y: x + y, task_result[1:], [])
servers: List[NovaServer] = reduce(lambda x, y: x + y, task_result[1:], [])
proj_mappings = {project.id: project.name for project in projects}
ser_mappings = {server.id: server.name for server in servers}
@ -692,7 +703,7 @@ async def list_volumes(
volume["project_name"] = proj_mappings.get(volume["project_id"])
for attachment in volume["attachments"]:
attachment["server_name"] = ser_mappings.get(attachment["server_id"])
return {"count": count, "volumes": result}
return schemas.VolumesResponse(**{"count": count, "volumes": result})
@router.get(
@ -716,14 +727,14 @@ async def list_volume_snapshots(
regex=constants.INBOUND_HEADER_REGEX,
),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None,
sort_dirs: schemas.SortDir = None,
marker: str = Query(None),
sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.VolumeSnapshotSortKey] = Query(None),
all_projects: bool = None,
project_id: str = None,
name: str = None,
status: schemas.VolumeSnapshotStatus = None,
volume_id: str = None,
all_projects: bool = Query(None),
project_id: str = Query(None),
name: str = Query(None),
status: schemas.VolumeSnapshotStatus = Query(None),
volume_id: str = Query(None),
) -> schemas.VolumeSnapshotsResponse:
"""Extension List Volume Snapshots.
@ -844,8 +855,12 @@ async def list_volume_snapshots(
projects = task_result[0] if task_result[0] else []
total_volume_tasks = math.ceil(len(volume_ids) / STEP)
volumes = reduce(lambda x, y: x + y, task_result[1 : 1 + total_volume_tasks], [])
volumes_from_snapshot = reduce(lambda x, y: x + y, task_result[1 + total_volume_tasks :], [])
volumes: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 : 1 + total_volume_tasks], []
)
volumes_from_snapshot: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 + total_volume_tasks :], []
)
proj_mappings = {project.id: project.name for project in projects}
vol_mappings = {}
@ -854,7 +869,7 @@ async def list_volume_snapshots(
"name": volume.name,
"host": getattr(volume, "os-vol-host-attr:host", None),
}
child_volumes = {}
child_volumes: Dict[str, Any] = {}
for volume in volumes_from_snapshot:
child_volumes.setdefault(volume.snapshot_id, [])
child_volumes[volume.snapshot_id].append(volume.name)
@ -866,7 +881,7 @@ async def list_volume_snapshots(
snapshot["volume_name"] = vol_mapping["name"]
snapshot["host"] = vol_mapping["host"] if all_projects else None
snapshot["child_volumes"] = child_volumes.get(snapshot["id"], [])
return {"count": count, "volume_snapshots": result}
return schemas.VolumeSnapshotsResponse(**{"count": count, "volume_snapshots": result})
@router.get(
@ -889,16 +904,16 @@ async def list_ports(
regex=constants.INBOUND_HEADER_REGEX,
),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None,
sort_dirs: schemas.SortDir = None,
marker: str = Query(None),
sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.PortSortKey] = Query(None),
all_projects: bool = None,
project_id: str = None,
name: str = None,
status: schemas.PortStatus = None,
network_name: str = None,
network_id: str = None,
device_id: str = None,
all_projects: bool = Query(None),
project_id: str = Query(None),
name: str = Query(None),
status: schemas.PortStatus = Query(None),
network_name: str = Query(None),
network_id: str = Query(None),
device_id: str = Query(None),
device_owner: List[schemas.PortDeviceOwner] = Query(None),
uuid: List[str] = Query(None, description="UUID of port."),
) -> schemas.PortsResponse:
@ -941,7 +956,7 @@ async def list_ports(
"""
current_session = await generate_session(profile=profile)
kwargs = {}
kwargs: Dict[str, Any] = {}
if limit is not None:
kwargs["limit"] = limit
if marker is not None:
@ -966,13 +981,13 @@ async def list_ports(
**{"name": network_name},
)
if not networks["networks"]:
return {"ports": []}
return schemas.PortsResponse(**{"ports": []})
network_ids = [network["id"] for network in networks["networks"]]
kwargs["network_id"] = network_ids
if network_id is not None:
network_ids = kwargs.get("network_id", [])
if network_ids and network_id not in network_ids:
return {"ports": []}
return schemas.PortsResponse(**{"ports": []})
elif not network_ids:
network_ids.append(network_id)
kwargs["network_id"] = network_ids
@ -1003,7 +1018,7 @@ async def list_ports(
server_ids.append(port["device_id"])
network_ids.append(port["network_id"])
network_params = {}
network_params: Dict[str, Any] = {}
tasks = [
neutron.list_networks(
profile=profile,
@ -1046,7 +1061,9 @@ async def list_ports(
task_result = await gather(*tasks)
total_network_tasks = math.ceil(len(network_ids) / STEP)
servers = reduce(lambda x, y: x + y, task_result[1 + total_network_tasks :], [])
servers: List[NovaServer] = reduce(
lambda x, y: x + y, task_result[1 + total_network_tasks :], []
)
ser_mappings = {server.id: server.name for server in servers}
_networks = [net.get("networks", []) for net in task_result[1 : 1 + total_network_tasks]]
shared_nets = task_result[0].get("networks", [])
@ -1055,7 +1072,7 @@ async def list_ports(
for port in result:
port["server_name"] = ser_mappings.get(port["device_id"])
port["network_name"] = network_mappings.get(port["network_id"])
return {"ports": result}
return schemas.PortsResponse(**{"ports": result})
@router.get(
@ -1078,8 +1095,8 @@ async def compute_services(
alias=constants.INBOUND_HEADER,
regex=constants.INBOUND_HEADER_REGEX,
),
binary: str = None,
host: str = None,
binary: str = Query(None),
host: str = Query(None),
) -> schemas.ComputeServicesResponse:
"""Extension List Compute Services.
@ -1112,4 +1129,4 @@ async def compute_services(
**kwargs,
)
services = [Service(service).to_dict() for service in services]
return {"services": services}
return schemas.ComputeServicesResponse(**{"services": services})

View File

@ -15,6 +15,7 @@
from __future__ import annotations
import copy
from typing import Dict, List
class APIResourceWrapper(object):
@ -24,7 +25,7 @@ class APIResourceWrapper(object):
api object as the only argument to the constructor
"""
_attrs = []
_attrs: List[str] = []
_apiresource = None # Make sure _apiresource is there even in __init__.
def __init__(self, apiresource):
@ -67,7 +68,7 @@ class APIDictWrapper(object):
consistent with api resource objects from novaclient.
"""
_apidict = {} # Make sure _apidict is there even in __init__.
_apidict: Dict[str, str] = {} # Make sure _apidict is there even in __init__.
def __init__(self, apidict):
self._apidict = apidict

View File

@ -100,7 +100,7 @@ async def list_policies(
{"rule": rule, "allowed": ENFORCER.authorize(rule, target, user_context)}
for rule in ENFORCER.rules
]
return {"policies": result}
return schemas.Policies(**{"policies": result})
@router.post(
@ -150,4 +150,4 @@ async def check_policies(
detail=str(e),
)
return {"policies": result}
return schemas.Policies(**{"policies": result})

View File

@ -1,6 +1,6 @@
from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi import APIRouter, Depends, HTTPException, Query, status
from httpx import codes
from skyline_apiserver import schemas
@ -93,9 +93,9 @@ def get_prometheus_query_range_response(
response_model_exclude_none=True,
)
async def prometheus_query(
query: str = None,
time: str = None,
timeout: str = None,
query: str = Query(None),
time: str = Query(None),
timeout: str = Query(None),
profile: schemas.Profile = Depends(deps.get_profile_update_jwt),
) -> schemas.PrometheusQueryResponse:
kwargs = {}
@ -138,11 +138,11 @@ async def prometheus_query(
response_model_exclude_none=True,
)
async def prometheus_query_range(
query: str = None,
start: str = None,
end: str = None,
step: str = None,
timeout: str = None,
query: str = Query(None),
start: str = Query(None),
end: str = Query(None),
step: str = Query(None),
timeout: str = Query(None),
profile: schemas.Profile = Depends(deps.get_profile_update_jwt),
) -> schemas.PrometheusQueryRangeResponse:
kwargs = {}

View File

@ -115,8 +115,7 @@ async def list_settings(
for item in db_settings:
if item.key in CONF.setting.base_settings:
settings[item.key].value = item.value
settings = list(settings.values())
return schemas.Settings(settings=settings)
return schemas.Settings(settings=list(settings.values()))
@router.delete(

View File

@ -14,7 +14,7 @@
from __future__ import annotations
from typing import Any, Dict
from typing import Any, Dict, Optional
from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized
@ -29,10 +29,10 @@ async def list_volumes(
profile: schemas.Profile,
session: Session,
global_request_id: str,
limit: int = None,
marker: str = None,
search_opts: Dict[str, Any] = None,
sort: str = None,
limit: Optional[int] = None,
marker: Optional[str] = None,
search_opts: Optional[Dict[str, Any]] = None,
sort: Optional[str] = None,
) -> Any:
try:
cc = await utils.cinder_client(
@ -63,10 +63,10 @@ async def list_volume_snapshots(
profile: schemas.Profile,
session: Session,
global_request_id: str,
limit: int = None,
marker: str = None,
search_opts: Dict[str, Any] = None,
sort: str = None,
limit: Optional[int] = None,
marker: Optional[str] = None,
search_opts: Optional[Dict[str, Any]] = None,
sort: Optional[str] = None,
) -> Any:
try:
cc = await utils.cinder_client(

View File

@ -14,7 +14,7 @@
from __future__ import annotations
from typing import Any, Dict
from typing import Any, Dict, Optional
from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized
@ -29,7 +29,7 @@ async def list_images(
profile: schemas.Profile,
session: Session,
global_request_id: str,
filters: Dict[str, Any] = None,
filters: Optional[Dict[str, Any]] = None,
) -> Any:
try:
kwargs = {}

View File

@ -14,7 +14,7 @@
from __future__ import annotations
from typing import Any, Dict
from typing import Any, Dict, Optional
from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized
@ -30,7 +30,7 @@ async def list_projects(
session: Session,
global_request_id: str,
all_projects: bool,
search_opts: Dict[str, Any] = None,
search_opts: Optional[Dict[str, Any]] = None,
) -> Any:
try:
search_opts = search_opts if search_opts else {}

View File

@ -14,7 +14,7 @@
from __future__ import annotations
from typing import Any, Dict
from typing import Any, Dict, List, Optional
from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized
@ -30,11 +30,11 @@ async def list_servers(
profile: schemas.Profile,
session: Session,
global_request_id: str,
search_opts: Dict[str, Any] = None,
marker: str = None,
limit: int = None,
sort_keys: str = None,
sort_dirs: str = None,
search_opts: Optional[Dict[str, Any]] = None,
marker: Optional[str] = None,
limit: Optional[int] = None,
sort_keys: Optional[List[str]] = None,
sort_dirs: Optional[List[str]] = None,
) -> Any:
try:
nc = await utils.nova_client(

View File

@ -14,7 +14,7 @@
from __future__ import annotations
from typing import Any
from typing import Any, Optional
from cinderclient.client import Client as CinderClient
from glanceclient.client import Client as GlanceClient
@ -99,7 +99,7 @@ async def get_endpoint(region: str, service: str, session: Session) -> Any:
async def keystone_client(
session: Session,
region: str,
global_request_id: str = None,
global_request_id: Optional[str] = None,
version: str = constants.KEYSTONE_API_VERSION,
) -> HTTPClient:
endpoint = await get_endpoint(region, "keystone", session=session)
@ -116,7 +116,7 @@ async def keystone_client(
async def glance_client(
session: Session,
region: str,
global_request_id: str = None,
global_request_id: Optional[str] = None,
version: str = constants.GLANCE_API_VERSION,
) -> HTTPClient:
endpoint = await get_endpoint(region, "glance", session=session)
@ -132,7 +132,7 @@ async def glance_client(
async def nova_client(
session: Session,
region: str,
global_request_id: str = None,
global_request_id: Optional[str] = None,
version: str = constants.NOVA_API_VERSION,
) -> HTTPClient:
endpoint = await get_endpoint(region, "nova", session=session)
@ -148,7 +148,7 @@ async def nova_client(
async def cinder_client(
session: Session,
region: str,
global_request_id: str,
global_request_id: Optional[str] = None,
version: str = constants.CINDER_API_VERSION,
) -> HTTPClient:
endpoint = await get_endpoint(region, "cinderv3", session=session)
@ -164,7 +164,7 @@ async def cinder_client(
async def neutron_client(
session: Session,
region: str,
global_request_id: str = None,
global_request_id: Optional[str] = None,
version: str = constants.NEUTRON_API_VERSION,
) -> HTTPClient:
endpoint = await get_endpoint(region, "neutron", session=session)

View File

@ -99,11 +99,11 @@ def get_proxy_endpoints() -> Dict[str, ProxyEndpoint]:
# 2. $(project_id)s or %(project_id)s
# 3. AUTH_$(tenant_id)s or AUTH_%(tenant_id)s
# 4. AUTH_$(project_id)s or AUTH_%(project_id)s
path = "" if str(raw_path.parents[1]) == "/" else raw_path.parents[1]
path = "" if str(raw_path.parents[1]) == "/" else str(raw_path.parents[1])
elif raw_path.match("v[0-9]") or raw_path.match("v[0-9][.][0-9]"):
path = "" if str(raw_path.parents[0]) == "/" else raw_path.parents[0]
path = "" if str(raw_path.parents[0]) == "/" else str(raw_path.parents[0])
else:
path = raw_path
path = str(raw_path)
proxy.url = raw_url._replace(path=f"{str(path)}/").geturl()
endpoints[f"{region}-{service_type}"] = proxy

View File

@ -21,7 +21,7 @@ from pathlib import Path
from typing import Callable, Dict, Iterable, List, Union
import click
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault # type: ignore
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault
from skyline_apiserver.log import LOG, setup as log_setup
from skyline_apiserver.policy.manager import get_service_rules

View File

@ -16,6 +16,7 @@ from __future__ import annotations
import time
import uuid
from typing import Optional
from fastapi import HTTPException, status
from jose import jwt
@ -48,8 +49,8 @@ async def generate_profile_by_token(token: schemas.Payload) -> schemas.Profile:
async def generate_profile(
keystone_token: str,
region: str,
exp: int = None,
uuid_value: str = None,
exp: Optional[int] = None,
uuid_value: Optional[str] = None,
) -> schemas.Profile:
try:
kc = await utils.keystone_client(session=get_system_session(), region=region)

View File

@ -26,7 +26,7 @@ from .base import DB, inject_db
from .models import RevokedToken, Settings
def check_db_connected(fn: Fn) -> Fn:
def check_db_connected(fn: Fn) -> Any:
@wraps(fn)
async def wrapper(*args: Any, **kwargs: Any) -> Any:
await inject_db()

View File

@ -21,7 +21,7 @@ from databases import Database, DatabaseURL, core
from skyline_apiserver.config import CONF
DATABASE = None
DB = ContextVar("skyline_db", default=None)
DB: ContextVar = ContextVar("skyline_db")
class ParallelDatabase(Database):

View File

@ -14,7 +14,7 @@
from __future__ import annotations
from oslo_policy import _parser # type: ignore
from oslo_policy import _parser
from .base import Enforcer, UserContext
from .manager import get_service_rules

View File

@ -15,7 +15,7 @@
from __future__ import annotations
from collections.abc import MutableMapping
from typing import Any, Dict, Iterable, Iterator
from typing import Any, Dict, Iterator, List, Union
import attr
from immutables import Map
@ -24,7 +24,7 @@ from oslo_policy._checks import _check
from skyline_apiserver.config import CONF
from .manager.base import APIRule
from .manager.base import APIRule, Rule
class UserContext(MutableMapping):
@ -32,7 +32,7 @@ class UserContext(MutableMapping):
self,
access: AccessInfoV3,
):
self._data = {}
self._data: Dict[str, Any] = {}
self.access = access
self._data.setdefault("auth_token", getattr(access, "auth_token", None))
self._data.setdefault("user_id", getattr(access, "user_id", None))
@ -96,7 +96,7 @@ class UserContext(MutableMapping):
class Enforcer:
rules: Map = attr.ib(factory=Map, repr=True, init=False)
def register_rules(self, rules: Iterable[APIRule]) -> None:
def register_rules(self, rules: List[Union[Rule, APIRule]]) -> None:
rule_map = {}
for rule in rules:
if rule.name in rule_map:

View File

@ -16,8 +16,8 @@ from __future__ import annotations
from typing import List
from oslo_policy import _parser # type: ignore
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault # type: ignore
from oslo_policy import _parser
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault
from skyline_apiserver import schemas

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING, Iterator, Optional
from typing import TYPE_CHECKING, AsyncGenerator
import pytest
from _pytest.mark import ParameterSet
@ -28,7 +28,7 @@ if TYPE_CHECKING:
@pytest.fixture(scope="function")
async def client() -> Iterator[AsyncClient]:
async def client() -> AsyncGenerator:
async with LifespanManager(app):
async with AsyncClient(app=app, base_url="http://test") as ac:
yield ac
@ -36,7 +36,7 @@ async def client() -> Iterator[AsyncClient]:
CONF.cleanup()
def pytest_generate_tests(metafunc: Optional["Metafunc"]) -> None:
def pytest_generate_tests(metafunc: "Metafunc") -> None:
for marker in metafunc.definition.iter_markers(name="ddt"):
test_data: TestData
for test_data in marker.args:

View File

@ -16,7 +16,7 @@ from __future__ import annotations
import sys
from dataclasses import asdict, dataclass, field
from typing import Any, Dict, List
from typing import Any, Dict, List, Union
from mimesis import Generic
from pydantic import StrictBool, StrictInt, StrictStr
@ -56,10 +56,10 @@ class FakeOptData:
@dataclass
class FakeOperation:
method: str = field(
method: Union[str, Any] = field(
default_factory=lambda: FAKER.choice(["GET", "POST", "PUT", "PATCH", "DELETE"]),
)
path: str = field(
path: Union[str, Any] = field(
default_factory=lambda: FAKER.choice(["/resources", "/resources/{resource_id}"]),
)
@ -71,7 +71,7 @@ class FakeDocumentedRuleData:
check_str: str = field(
default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}',
)
scope_types: List[str] = field(
scope_types: Union[List[str], Any] = field(
default_factory=lambda: FAKER.choice(
["system", "domain", "project"],
length=FAKER.numbers.integer_number(1, 3),
@ -92,7 +92,7 @@ class FakeRuleData:
check_str: str = field(
default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}',
)
scope_types: List[str] = field(
scope_types: Union[List[str], Any] = field(
default_factory=lambda: FAKER.choice(
["system", "domain", "project"],
length=FAKER.numbers.integer_number(1, 3),

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Set
ALGORITHM = "HS256"
KEYSTONE_API_VERSION = "3.13"
@ -36,8 +38,8 @@ EXTENSION_API_LIMIT_GT = 0
ID_UUID_RANGE_STEP = 100
SETTINGS_HIDDEN_SET = set()
SETTINGS_RESTART_SET = set()
SETTINGS_HIDDEN_SET: Set = set()
SETTINGS_RESTART_SET: Set = set()
DEFAULT_TIMEOUT = 30

View File

@ -13,7 +13,7 @@
# limitations under the License.
import types
from typing import Any, Dict
from typing import Any, Dict, Optional
import httpx
from fastapi import HTTPException, status
@ -21,7 +21,7 @@ from httpx import Response, codes
async def _http_request(
method: types.FunctionType = httpx.AsyncClient.get,
method: types.FunctionType = httpx.AsyncClient.get, # type: ignore
**kwargs,
) -> Response:
async with httpx.AsyncClient(verify=False) as client:
@ -37,7 +37,7 @@ async def _http_request(
async def assert_http_request(
method: types.FunctionType,
expectedStatus: str = codes.OK,
expectedStatus: codes = codes.OK,
**kwargs,
) -> Response:
response = await _http_request(method, **kwargs)
@ -51,12 +51,12 @@ async def assert_http_request(
async def get_assert_200(
url: str,
cookies: Dict[str, Any] = None,
headers: Dict[str, Any] = None,
params: Dict[str, Any] = None,
cookies: Optional[Dict[str, Any]] = None,
headers: Optional[Dict[str, Any]] = None,
params: Optional[Dict[str, Any]] = None,
) -> Response:
return await assert_http_request(
method=httpx.AsyncClient.get,
method=httpx.AsyncClient.get, # type: ignore
expectedStatus=codes.OK,
url=url,
cookies=cookies,
@ -65,9 +65,9 @@ async def get_assert_200(
)
async def delete_assert_200(url, cookies: Dict[str, Any] = None) -> Response:
async def delete_assert_200(url, cookies: Optional[Dict[str, Any]] = None) -> Response:
return await assert_http_request(
method=httpx.AsyncClient.delete,
method=httpx.AsyncClient.delete, # type: ignore
expectedStatus=codes.OK,
url=url,
cookies=cookies,
@ -76,7 +76,7 @@ async def delete_assert_200(url, cookies: Dict[str, Any] = None) -> Response:
async def post_assert_201(url: str, json: Dict[str, Any], cookies: Dict[str, Any]) -> Response:
return await assert_http_request(
method=httpx.AsyncClient.post,
method=httpx.AsyncClient.post, # type: ignore
expectedStatus=codes.CREATED,
url=url,
json=json,
@ -86,7 +86,7 @@ async def post_assert_201(url: str, json: Dict[str, Any], cookies: Dict[str, Any
async def put_assert_200(url: str, json: Dict[str, Any], cookies: Dict[str, Any]) -> Response:
return await assert_http_request(
method=httpx.AsyncClient.put,
method=httpx.AsyncClient.put, # type: ignore
expectedStatus=codes.OK,
url=url,
json=json,

View File

@ -19,3 +19,5 @@ asgi-lifespan<=1.0.1 # MIT
types-PyYAML<=5.4.10 # Apache-2.0
oslo.log<=5.0.0 # Apache-2.0
neutron-lib>=2.15.0 # Apache-2.0
lxml>=4.4.1 # BSD
types-python-dateutil>=2.8.2 # Apache-2.0

View File

@ -1,19 +0,0 @@
#!/bin/sh
#
# A wrapper around mypy that allows us to specify what files to run 'mypy' type
# checks on. Intended to be invoked via tox:
#
# tox -e mypy
#
# Eventually this should go away once we have either converted everything or
# converted enough and ignored [1] the rest.
#
# [1] http://mypy.readthedocs.io/en/latest/config_file.html#per-module-flags
if [ $# -eq 0 ]; then
# if no arguments provided, use the standard converted lists
python -m mypy skyline_apiserver
else
# else test what the user asked us to
python -m mypy $@
fi

View File

@ -51,7 +51,7 @@ description =
envdir = {toxworkdir}/shared
extras =
commands =
bash tools/mypywrap.sh {posargs}
mypy skyline_apiserver
[testenv:pep8]
description =
@ -61,7 +61,7 @@ deps =
{[testenv]deps}
extras =
commands =
; {[testenv:mypy]commands}
{[testenv:mypy]commands}
isort --check-only --diff skyline_apiserver
black --check --diff --color skyline_apiserver --line-length 98
flake8 {posargs} .