Update typos version

Update verion of typos that we use to fix typos in the openapi specs.
Since there are quite lot findings (and fixes cause other failures) also
update pre-commit and the resulting changes.

Change-Id: I795215942e1587bf914d923edd09070fe729e9d5
Signed-off-by: Artem Goncharov <artem.goncharov@gmail.com>
This commit is contained in:
Artem Goncharov
2025-10-09 14:27:02 +02:00
parent a40d9c183b
commit 298e6753ed
52 changed files with 151 additions and 147 deletions

View File

@@ -4,13 +4,13 @@ default_language_version:
python: python3
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v6.0.0
hooks:
- id: trailing-whitespace
- id: mixed-line-ending
args: ['--fix', 'lf']
exclude: '.*\.(svg)$'
- id: check-byte-order-marker
- id: fix-byte-order-marker
- id: check-executables-have-shebangs
- id: check-merge-conflict
- id: debug-statements
@@ -18,11 +18,15 @@ repos:
files: .*\.(yaml|yml)$
exclude: '^zuul.d/.*$'
- repo: https://github.com/PyCQA/doc8
rev: v1.1.2
rev: v2.0.0
hooks:
- id: doc8
- repo: https://github.com/crate-ci/typos
rev: v1.38.1
hooks:
- id: typos
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.12
rev: v0.14.0
hooks:
- id: ruff
args: ['--fix', '--unsafe-fixes']
@@ -34,7 +38,7 @@ repos:
# args: ['-l', '79']
# exclude: '^codegenerator/templates/.*$'
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.16.0
rev: v1.18.2
hooks:
- id: mypy
language: python

View File

@@ -86,7 +86,7 @@ class BaseGenerator:
mod_path,
mod_list: set[str],
url: str,
resouce_name: str,
resource_name: str,
service_name: str,
):
pass

View File

@@ -317,7 +317,7 @@ def find_response_schema(
Some operations are having variety of possible responses (depending on
microversion, action, etc). Try to locate suitable response for the client.
The function iterates over all defined responses and for 2** appies the
The function iterates over all defined responses and for 2** applies the
following logic:
- if action_name is present AND oneOf is present AND action_name is in one
@@ -331,7 +331,7 @@ def find_response_schema(
:param dict responses: Dictionary with responses as defined in OpenAPI spec
:param str response_key: Response key to be searching in responses (when
aciton_name is not given) :param str action_name: Action name to be
action_name is not given) :param str action_name: Action name to be
searching response for
"""
for code, rspec in responses.items():

View File

@@ -12,7 +12,8 @@
#
import logging
import re
from typing import Type, Any, Generator, Tuple
from typing import Any
from collections.abc import Generator
from pydantic import BaseModel
@@ -356,7 +357,7 @@ class StructField(BaseModel):
class Struct(BaseCompoundType):
base_type: str = "struct"
fields: dict[str, StructField] = {}
field_type_class_: Type[StructField] | StructField = StructField
field_type_class_: type[StructField] | StructField = StructField
additional_fields_type: (
BasePrimitiveType | BaseCombinedType | BaseCompoundType | None
) = None
@@ -488,7 +489,7 @@ class StructFieldResponse(StructField):
class StructResponse(Struct):
field_type_class_: Type[StructField] = StructFieldResponse
field_type_class_: type[StructField] = StructFieldResponse
@property
def imports(self):
@@ -721,12 +722,12 @@ class TypeManager:
model.Reference,
BasePrimitiveType | BaseCombinedType | BaseCompoundType,
] = {}
parameters: dict[str, Type[RequestParameter] | RequestParameter] = {}
parameters: dict[str, type[RequestParameter] | RequestParameter] = {}
#: Base mapping of the primitive data-types
base_primitive_type_mapping: dict[
Type[model.PrimitiveType],
Type[BasePrimitiveType] | Type[BaseCombinedType],
type[model.PrimitiveType],
type[BasePrimitiveType] | type[BaseCombinedType],
] = {
model.PrimitiveString: String,
model.ConstraintString: String,
@@ -740,17 +741,17 @@ class TypeManager:
#: Extension for primitives data-type mapping
primitive_type_mapping: dict[
Type[model.PrimitiveType],
Type[BasePrimitiveType] | Type[BaseCombinedType],
type[model.PrimitiveType],
type[BasePrimitiveType] | type[BaseCombinedType],
]
#: Extensions of the data-type mapping
data_type_mapping: dict[
Type[model.ADT], Type[BaseCombinedType] | Type[BaseCompoundType]
type[model.ADT], type[BaseCombinedType] | type[BaseCompoundType]
]
#: Base data-type mapping
base_data_type_mapping: dict[
Type[model.ADT], Type[BaseCombinedType] | Type[BaseCompoundType]
type[model.ADT], type[BaseCombinedType] | type[BaseCompoundType]
] = {
model.Dictionary: Dictionary,
model.Enum: Enum,
@@ -760,12 +761,12 @@ class TypeManager:
model.Set: BTreeSet,
}
#: RequestParameter Type class
request_parameter_class: Type[RequestParameter] = RequestParameter
request_parameter_class: type[RequestParameter] = RequestParameter
#: Option Type class
option_type_class: Type[Option] | Option = Option
option_type_class: type[Option] | Option = Option
#: StringEnum Type class
string_enum_class: Type[StringEnum] | StringEnum = StringEnum
string_enum_class: type[StringEnum] | StringEnum = StringEnum
#: List of the models to be ignored
ignored_models: list[model.Reference] = []
@@ -1137,9 +1138,9 @@ class TypeManager:
# in the output oneOf of same type (but maybe different formats)
# makes no sense
# Example is server addresses which are ipv4 or ipv6
bck = kinds[0].copy()
back = kinds[0].copy()
kinds.clear()
kinds.append(bck)
kinds.append(back)
elif (
self.string_enum_class in kinds_classes
and option_klass in kinds_classes
@@ -1265,7 +1266,7 @@ class TypeManager:
name,
new_name,
] and isinstance(other_model, Struct):
# rename first occurence to the same scheme
# rename first occurrence to the same scheme
props = other_model.fields.keys()
new_other_name = name + "".join(
x.title() for x in props
@@ -1292,7 +1293,7 @@ class TypeManager:
and name in unique_models
and model_.reference
and unique_models[name].hash_ == model_.reference.hash_
# image.metadef.namespace have weird occurences of itself
# image.metadef.namespace have weird occurrences of itself
and model_.reference != unique_models[name]
):
# We already have literally same model. Do nothing expecting
@@ -1306,7 +1307,7 @@ class TypeManager:
def get_subtypes(self):
"""Get all subtypes excluding TLA"""
# Need to prevent literaly same objects to be emitted multiple times
# Need to prevent literally same objects to be emitted multiple times
# what may happen in case of deep nesting
emitted: set[str] = set()
for k, v in self.refs.items():
@@ -1419,7 +1420,7 @@ class TypeManager:
def get_parameters(
self, location: str
) -> Generator[Tuple[str, Type[RequestParameter]], None, None]:
) -> Generator[tuple[str, type[RequestParameter]], None, None]:
"""Get parameters by location"""
for k, v in self.parameters.items():
if v.location == location:

View File

@@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
#
from typing import Any, Dict, List, Optional
from typing import Any
from pydantic import BaseModel, ConfigDict, Field
@@ -21,26 +21,24 @@ class TypeSchema(BaseModel):
# TODO(gtema): enums are re-shuffled on every serialization
model_config = ConfigDict(extra="allow", populate_by_name=True)
type: Optional[str | List[str]] = None
format: Optional[str] = None
description: Optional[str] = None
type: str | list[str] | None = None
format: str | None = None
description: str | None = None
summary: str | None = None
default: Optional[Any] = None
items: Optional[Dict[str, Any]] = None
default: Any | None = None
items: dict[str, Any] | None = None
# circular reference cause issues on deserializing
properties: Optional[Dict[str, Any]] = None
nullable: Optional[bool] = None
additionalProperties: Optional[bool | Any] = None
properties: dict[str, Any] | None = None
nullable: bool | None = None
additionalProperties: bool | Any | None = None
ref: Optional[str] = Field(alias="$ref", default=None)
oneOf: Optional[List[Any]] = Field(default=None)
anyOf: Optional[List[Any]] = Field(default=None)
openstack: Optional[Dict[str, Any]] = Field(
alias="x-openstack", default=None
)
required: Optional[List[str]] = None
pattern: Optional[str] = None
maxLength: Optional[int] = None
ref: str | None = Field(alias="$ref", default=None)
oneOf: list[Any] | None = Field(default=None)
anyOf: list[Any] | None = Field(default=None)
openstack: dict[str, Any] | None = Field(alias="x-openstack", default=None)
required: list[str] | None = None
pattern: str | None = None
maxLength: int | None = None
@classmethod
def openapi_type_from_sdk(cls, type_name, fallback_type):
@@ -92,7 +90,7 @@ class ParameterSchema(BaseModel):
explode: bool | None = None
uniqueItems: bool | None = None
ref: str = Field(alias="$ref", default=None)
openstack: Dict[str, Any] = Field(alias="x-openstack", default=None)
openstack: dict[str, Any] = Field(alias="x-openstack", default=None)
def get_sdk_name(self):
return self.sdk_name or self.name
@@ -101,31 +99,29 @@ class ParameterSchema(BaseModel):
class OperationSchema(BaseModel):
model_config = ConfigDict(extra="allow", populate_by_name=True)
parameters: List[ParameterSchema] = []
parameters: list[ParameterSchema] = []
description: str | None = None
operationId: str | None = None
requestBody: dict = {}
responses: Dict[str, dict] = {}
tags: List[str] = []
responses: dict[str, dict] = {}
tags: list[str] = []
deprecated: bool | None = None
openstack: dict = Field(alias="x-openstack", default={})
security: List | None = None
security: list | None = None
class HeaderSchema(BaseModel):
model_config = ConfigDict(extra="allow", populate_by_name=True)
description: Optional[str] = None
openstack: Optional[Dict[str, Any]] = Field(
alias="x-openstack", default=None
)
schema: Optional[TypeSchema] = Field(default=None)
description: str | None = None
openstack: dict[str, Any] | None = Field(alias="x-openstack", default=None)
schema: TypeSchema | None = Field(default=None)
class PathSchema(BaseModel):
model_config = ConfigDict(extra="allow", populate_by_name=True)
parameters: List[ParameterSchema] = []
parameters: list[ParameterSchema] = []
get: OperationSchema = OperationSchema()
post: OperationSchema = OperationSchema()
delete: OperationSchema = OperationSchema()
@@ -137,19 +133,19 @@ class PathSchema(BaseModel):
class ComponentsSchema(BaseModel):
model_config = ConfigDict(extra="allow", populate_by_name=True)
schemas: Dict[str, TypeSchema] = {}
parameters: Dict[str, ParameterSchema] = {}
headers: Dict[str, HeaderSchema] = {}
schemas: dict[str, TypeSchema] = {}
parameters: dict[str, ParameterSchema] = {}
headers: dict[str, HeaderSchema] = {}
class SpecSchema(BaseModel):
class Config:
pupulate_by_name = True
populate_by_name = True
extra = "allow"
openapi: str
info: dict
paths: Dict[str, PathSchema] = {}
paths: dict[str, PathSchema] = {}
components: ComponentsSchema = ComponentsSchema()
tags: List[Dict] = []
security: List[Dict] = []
tags: list[dict] = []
security: list[dict] = []

View File

@@ -52,7 +52,7 @@ OPERATION_ID_BLACKLIST: set[str] = {
"os-hosts/id:put",
}
SERVICE_METADATA_MAP: dict[str, ty.Type[MetadataBase]] = {
SERVICE_METADATA_MAP: dict[str, type[MetadataBase]] = {
"baremetal": BaremetalMetadata,
"block-storage": BlockStorageMetadata,
"volume": BlockStorageMetadata,
@@ -123,7 +123,7 @@ class MetadataGenerator(BaseGenerator):
) -> Metadata:
metadata = Metadata(resources={})
api_ver = "v" + schema.info["version"].split(".")[0]
service_metadata: ty.Type[MetadataBase] = SERVICE_METADATA_MAP[
service_metadata: type[MetadataBase] = SERVICE_METADATA_MAP[
service_type
]

View File

@@ -20,7 +20,7 @@ class BaremetalMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -22,7 +22,7 @@ class MetadataBase(ABC):
@abstractmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]: ...
) -> tuple[str | None, bool]: ...
@staticmethod
@abstractmethod

View File

@@ -21,7 +21,7 @@ class BlockStorageMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None
path_elements: list[str] = path.split("/")

View File

@@ -21,7 +21,7 @@ class ComputeMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -20,7 +20,7 @@ class ContainerInfrastructureManagementMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -22,7 +22,7 @@ class DnsMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -22,7 +22,7 @@ class IdentityMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -22,7 +22,7 @@ class ImageMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -20,7 +20,7 @@ class KeyManagerMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -22,7 +22,7 @@ class LoadBalancerMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None
path_elements: list[str] = path.split("/")

View File

@@ -22,7 +22,7 @@ class NetworkMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -21,7 +21,7 @@ class ObjectStorageMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -21,7 +21,7 @@ class PlacementMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -20,7 +20,7 @@ class SharedFileSystemMetadata(MetadataBase):
@staticmethod
def get_operation_key(
operation, path: str, method: str, resource_name: str
) -> ty.Tuple[str | None, bool]:
) -> tuple[str | None, bool]:
skip: bool = False
operation_key: str | None = None

View File

@@ -19,8 +19,6 @@ import json
import logging
import string
from typing import Any
from typing import Type
import typing as ty
from pydantic import BaseModel
from pydantic import ConfigDict
@@ -37,7 +35,7 @@ def dicthash_(data: dict[str, Any]) -> str:
class Reference(BaseModel):
"""Reference of the complex type to the occurence instance"""
"""Reference of the complex type to the occurrence instance"""
model_config = ConfigDict(arbitrary_types_allowed=True)
@@ -327,10 +325,10 @@ class JsonSchemaParser:
Do basic parsing of the jsonschema that has `"type": "object"` in the
root. In real life there might be `oneOf`, `anyOf`, `not`,
`dependentRequired`, `dependendSchemas`, `if-then-else` underneath. For
`dependentRequired`, `dependentSchemas`, `if-then-else` underneath. For
now oneOf are supported by building an Enum ouf of this object
when none of `properties`, `additional_properties`,
`pattern_properties` are present. `anyOf` elemenst are merged into a
`pattern_properties` are present. `anyOf` elements are merged into a
single schema that is then parsed.
The more complex validation rules (`"properties": ..., "oneOf":

View File

@@ -373,7 +373,7 @@ class BarbicanGenerator(OpenStackServerSourceBase):
action="delete",
conditions={"method": ["DELETE"]},
)
# Contanier consumers
# Container consumers
mapper.connect(
None,
"/v1/containers/{container_id}/consumers",

View File

@@ -19,7 +19,8 @@ import inspect
import jsonref
import logging
from pathlib import Path
from typing import Any, Callable, Literal
from typing import Any, Literal
from collections.abc import Callable
import re
from codegenerator import common

View File

@@ -212,7 +212,7 @@ class CinderV3Generator(OpenStackServerSourceBase):
operation_spec.deprecated = True
def _post_process(self, openapi_spec):
"""Repair urls and url parametes"""
"""Repair urls and url parameters"""
for path in [
"/v3/{project_id}/os-quota-sets/{id}",
"/v3/{project_id}/os-quota-sets/{id}/defaults",

View File

@@ -65,7 +65,7 @@ ATTACHMENT_DETAIL_SCHEMA: dict[str, Any] = {
"format": "date-time",
"description": "The time when attachment is attached.",
},
"connecttion_info": {
"connection_info": {
"type": "object",
"description": "The connection info used for server to connect the volume.",
},

View File

@@ -48,7 +48,7 @@ If an error occurs, you can find more information about the error in the fail_re
"cgsnapshots": "Lists all, lists all with details, shows details for, creates, and deletes consistency group snapshots.",
"clusters": """Administrator only. Lists all Cinder clusters, show cluster detail, enable or disable a cluster.
Each cinder service runs on a host computer (possibly multiple services on the same host; it depends how you decide to deploy cinder). In order to support High Availibility scenarios, services can be grouped into clusters where the same type of service (for example, cinder-volume) can run on different hosts so that if one host goes down the service is still available on a different host. Since there's no point having these services sitting around doing nothing while waiting for some other host to go down (which is also known as Active/Passive mode), grouping services into clusters also allows cinder to support Active/Active mode in which all services in a cluster are doing work all the time.
Each cinder service runs on a host computer (possibly multiple services on the same host; it depends how you decide to deploy cinder). In order to support High Availability scenarios, services can be grouped into clusters where the same type of service (for example, cinder-volume) can run on different hosts so that if one host goes down the service is still available on a different host. Since there's no point having these services sitting around doing nothing while waiting for some other host to go down (which is also known as Active/Passive mode), grouping services into clusters also allows cinder to support Active/Active mode in which all services in a cluster are doing work all the time.
**Note**: Currently the only service that can be grouped into clusters is cinder-volume.

View File

@@ -79,7 +79,7 @@ GROUP_SNAPSHOT_SCHEMA: dict[str, Any] = {
GROUP_SNAPSHOT_DETAIL_SCHEMA: dict[str, Any] = {
"type": "object",
"description": "A group snapshot bject.",
"description": "A group snapshot object.",
"properties": {
"status": {
"type": "string",

View File

@@ -298,14 +298,14 @@ VOLUME_PARAMETERS: dict[str, Any] = {
"in": "query",
"name": "created_at",
"schema": {"type": "string", "format": "date-time"},
"description": "Filters reuslts by a time that resources are created at with time comparison operators: gt/gte/eq/neq/lt/lte.",
"description": "Filters results by a time that resources are created at with time comparison operators: gt/gte/eq/neq/lt/lte.",
"x-openstack": {"min-ver": "3.60"},
},
"updated_at": {
"in": "query",
"name": "updated_at",
"schema": {"type": "string", "format": "date-time"},
"description": "Filters reuslts by a time that resources are updated at with time comparison operators: gt/gte/eq/neq/lt/lte.",
"description": "Filters results by a time that resources are updated at with time comparison operators: gt/gte/eq/neq/lt/lte.",
"x-openstack": {"min-ver": "3.60"},
},
"consumes_quota": {

View File

@@ -90,7 +90,7 @@ IDENTITY_PROVIDER_CREATE_SCHEMA: dict[str, Any] = {
"properties": {
"identity_provider": {
"type": "object",
"projecties": {
"properties": {
**_identity_provider_properties,
"domain_id": {
"type": ["string", "null"],

View File

@@ -345,10 +345,10 @@ class NeutronGenerator(OpenStackServerSourceBase):
work_dir.mkdir(parents=True, exist_ok=True)
# NOTE(gtema): call me paranoic or stupid, but I just gave up fighting
# agains oslo_config and oslo_policy with their global state. It is
# against oslo_config and oslo_policy with their global state. It is
# just too painful and takes too much precious time. On multiple
# invocation with different config there are plenty of things remaining
# in the old state. In order to workaroung this just process in
# in the old state. In order to workaround this just process in
# different processes.
with Manager() as manager:
# Since we may process same route multiple times we need to have a
@@ -362,7 +362,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
p.start()
p.join()
if p.exitcode != 0:
raise RuntimeError("Error generating Neutron OpenAPI schma")
raise RuntimeError("Error generating Neutron OpenAPI schema")
# VPNaaS
p = Process(
@@ -372,7 +372,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
p.start()
p.join()
if p.exitcode != 0:
raise RuntimeError("Error generating Neutron OpenAPI schma")
raise RuntimeError("Error generating Neutron OpenAPI schema")
(impl_path, openapi_spec) = self._read_spec(work_dir)
@@ -941,7 +941,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
# And now basic CRUD operations, those take whichever info is available in Controller._attr_info
elif operation in ["index", "show", "create", "update", "delete"]:
# Only CRUD operation are having request/response information avaiable
# Only CRUD operation are having request/response information available
send_props = {}
return_props = {}
# Consume request name to required fields mapping

View File

@@ -174,7 +174,7 @@ class NovaGenerator(OpenStackServerSourceBase):
schema: None = None
ref: str | None
mime_type: str | None = "application/json"
# NOTE(gtema): This must go away once scemas are merged directly to
# NOTE(gtema): This must go away once schemas are merged directly to
# Nova
# /servers
if name == "ServersCreateResponse":

View File

@@ -2225,7 +2225,7 @@ VOLUME_ATTACHMENT_SCHEMA: dict[str, Any] = {
"x-openstack": {"min-ver": "2.89"},
},
},
"reuired": ["id", "serverId", "volumeId", "device"],
"required": ["id", "serverId", "volumeId", "device"],
}
VOLUME_ATTACHMENT_CONTAINER_SCHEMA: dict[str, Any] = {
"type": "object",

View File

@@ -739,8 +739,8 @@ class OctaviaGenerator(OpenStackServerSourceBase):
"/lbaas/providers": "providers",
"/lbaas/flavorprofiles": "flavor-profiles",
"/lbaas/flavors": "flavors",
"/lbaas/availabilityzoneprofiles": "avaiability-zone-profiles",
"/lbaas/availabilityzones": "avaiability-zones",
"/lbaas/availabilityzoneprofiles": "availability-zone-profiles",
"/lbaas/availabilityzones": "availability-zones",
"/lbaas/amphorae": "amphorae",
"/octavia/amphorae": "amphorae",
}

View File

@@ -28,7 +28,7 @@ def merge_api_ref_doc(
allow_strip_version=True,
doc_url_prefix="",
):
"""Merge infomation from rendered API-REF html into the spec
"""Merge information from rendered API-REF html into the spec
:param openapi_spec: OpenAPI spec
:param api_ref_src: path to the rendered API-REF
@@ -170,7 +170,7 @@ def merge_api_ref_doc(
else:
processed_operations.add(op_spec.operationId)
# Find the button in the operaion container to get ID of the
# Find the button in the operation container to get ID of the
# details section
details_button = op.find("button")
details_section_id = details_button.get(
@@ -492,7 +492,7 @@ def _get_schema_candidates(
continue
if candidate_action_name == action_name:
# We know which action we are searching for (most likely we process reponse
# We know which action we are searching for (most likely we process response
schema_specs.append(res)
elif not action_name and section_description:
@@ -514,11 +514,11 @@ def _get_schema_candidates(
# This is an action we are hopefully interested in
# Now we can have single schema or multiple (i.e. microversions)
if isinstance(res, TypeSchema):
itms = res.oneOf
items = res.oneOf
elif isinstance(res, dict):
itms = res.get("oneOf")
if itms:
for itm in itms:
items = res.get("oneOf")
if items:
for itm in items:
schema_specs.append(
get_schema(openapi_spec, itm)
)

View File

@@ -14,7 +14,6 @@ import logging
from pathlib import Path
import subprocess
import re
from typing import Type
from codegenerator.base import BaseGenerator
from codegenerator import common
@@ -174,7 +173,7 @@ class StructInputField(common_rust.StructField):
class StructInput(common_rust.Struct):
field_type_class_: Type[common_rust.StructField] = StructInputField
field_type_class_: type[common_rust.StructField] = StructInputField
clap_macros: set[str] = set()
original_data_type: BaseCompoundType | BaseCompoundType | None = None
is_group: bool = False
@@ -200,7 +199,7 @@ class EnumGroupStructInputField(StructInputField):
class EnumGroupStruct(common_rust.Struct):
"""Container for complex Enum containing Array"""
field_type_class_: Type[common_rust.StructField] = (
field_type_class_: type[common_rust.StructField] = (
EnumGroupStructInputField
)
base_type: str = "struct"
@@ -306,7 +305,7 @@ class RequestParameter(common_rust.RequestParameter):
class RequestTypeManager(common_rust.TypeManager):
primitive_type_mapping: dict[
Type[model.PrimitiveType], Type[BasePrimitiveType]
type[model.PrimitiveType], type[BasePrimitiveType]
] = {
model.PrimitiveString: String,
model.ConstraintString: String,
@@ -314,7 +313,7 @@ class RequestTypeManager(common_rust.TypeManager):
}
data_type_mapping: dict[
Type[model.ADT], Type[BaseCombinedType] | Type[BaseCompoundType]
type[model.ADT], type[BaseCombinedType] | type[BaseCompoundType]
]
data_type_mapping = {
@@ -325,7 +324,7 @@ class RequestTypeManager(common_rust.TypeManager):
model.Set: ArrayInput,
}
request_parameter_class: Type[common_rust.RequestParameter] = (
request_parameter_class: type[common_rust.RequestParameter] = (
RequestParameter
)
string_enum_class = StringEnum
@@ -374,7 +373,7 @@ class RequestTypeManager(common_rust.TypeManager):
# and keep only rest
# This usecase is here at least to handle server.networks
# which are during creation `none`|`auto`|`JSON`
# On the SDK side where this method is not overriden there
# On the SDK side where this method is not overridden there
# would be a naming conflict resulting in `set_models` call
# adding type name as a suffix.
# sdk_enum_name = result.name + result.__class__.__name__
@@ -458,7 +457,7 @@ class RequestTypeManager(common_rust.TypeManager):
):
# An array of structs with more then 1 field
# Array of Structs can not be handled by the CLI (input).
# Therefore handle underlaying structure as Json saving
# Therefore handle underlying structure as Json saving
# reference to the original "expected" stuff to make final
# input conversion possible
original_data_type = self.convert_model(item_type)
@@ -587,7 +586,7 @@ class RequestTypeManager(common_rust.TypeManager):
)
if intersect:
# Na well, it is such a rare case that it does not make
# much sense to start renaming fields. Instead conver
# much sense to start renaming fields. Instead convert
# substruct to be a JsonValue
simplified_data_type = JsonValue()
simplified_data_type.original_data_type = field_data_type
@@ -1090,7 +1089,7 @@ class RustCliGenerator(BaseGenerator):
additional_imports.update(type_manager.get_imports())
# additional_imports.update(response_type_manager.get_imports())
# Deserialize is already in template since it is uncoditionally required
# Deserialize is already in template since it is unconditionally required
additional_imports.discard("serde::Deserialize")
additional_imports.discard("serde::Serialize")

View File

@@ -14,7 +14,7 @@ import logging
from pathlib import Path
import re
import subprocess
from typing import Type, Any
from typing import Any
from codegenerator.base import BaseGenerator
from codegenerator import common
@@ -112,7 +112,7 @@ class StructField(common_rust.StructField):
class Struct(common_rust.Struct):
# field_type_class_ = StructField
field_type_class_: Type[StructField] | StructField = StructField
field_type_class_: type[StructField] | StructField = StructField
@property
def builder_macros(self):
@@ -253,7 +253,7 @@ class TypeManager(common_rust.TypeManager):
"""
primitive_type_mapping: dict[Type[model.PrimitiveType], Type[Any]] = {
primitive_type_mapping: dict[type[model.PrimitiveType], type[Any]] = {
model.PrimitiveString: String,
model.ConstraintString: String,
}
@@ -265,7 +265,7 @@ class TypeManager(common_rust.TypeManager):
model.CommaSeparatedList: CommaSeparatedList,
}
request_parameter_class: Type[common_rust.RequestParameter] = (
request_parameter_class: type[common_rust.RequestParameter] = (
RequestParameter
)

View File

@@ -14,7 +14,7 @@ import logging
from pathlib import Path
import re
import subprocess
from typing import Type, Any
from typing import Any
from codegenerator.base import BaseGenerator
from codegenerator import common
@@ -155,7 +155,7 @@ class StructField(rust_sdk.StructField):
class Struct(rust_sdk.Struct):
field_type_class_: Type[StructField] | StructField = StructField
field_type_class_: type[StructField] | StructField = StructField
original_data_type: BaseCompoundType | BaseCompoundType | None = None
is_required: bool = False
@@ -239,7 +239,7 @@ class TypeManager(common_rust.TypeManager):
"""
primitive_type_mapping: dict[Type[model.PrimitiveType], Type[Any]] = {
primitive_type_mapping: dict[type[model.PrimitiveType], type[Any]] = {
model.PrimitiveString: String,
model.ConstraintString: String,
}
@@ -250,7 +250,7 @@ class TypeManager(common_rust.TypeManager):
model.CommaSeparatedList: ArrayInput,
}
request_parameter_class: Type[common_rust.RequestParameter] = (
request_parameter_class: type[common_rust.RequestParameter] = (
common_rust.RequestParameter
)
@@ -535,7 +535,7 @@ class RustTuiGenerator(BaseGenerator):
"crate::cloud_worker::ConfirmableRequest"
)
# Deserialize is already in template since it is uncoditionally required
# Deserialize is already in template since it is unconditionally required
additional_imports.discard("serde::Deserialize")
additional_imports.discard("serde::Serialize")

View File

@@ -183,7 +183,7 @@ class ResponseTypeManager(common_rust.TypeManager):
def get_subtypes(self):
"""Get all subtypes excluding TLA"""
emited_data: set[str] = set()
emitted_data: set[str] = set()
for k, v in self.refs.items():
if (
k
@@ -200,8 +200,8 @@ class ResponseTypeManager(common_rust.TypeManager):
and k.name != self.root_name
):
key = v.base_type + v.type_hint
if key not in emited_data:
emited_data.add(key)
if key not in emitted_data:
emitted_data.add(key)
yield v
def get_imports(self):
@@ -371,7 +371,7 @@ class RustTypesGenerator(BaseGenerator):
)
)
except Exception:
# In rare cases we can not conter
# In rare cases we can not convert
# value_type since it depends on different
# types. We are here in the output
# simplification, so just downcast it to

View File

@@ -65,7 +65,7 @@ class Create{{ class_name }}(command.ShowOne):
{%- endif %} {#- if attr.required_on_create #}
help=_(
{%- if v['docs'] and v['docs']|length > 0 %}
{#- wrap long line with identation #}
{#- wrap long line with indentation #}
"{{ v.get('docs', '') | wordwrap(59) | replace('\n', ' \"\n \"') }}"
{%- elif v['doc']|length == 0 %}
""

View File

@@ -46,7 +46,7 @@ class List{{ class_name }}(command.Lister):
{%- endif -%} {# if not k.startswith #}
help=_(
{%- if attr is defined and attr['docs'] and attr['docs']|length > 0 %}
{#- wrap long line with identation #}
{#- wrap long line with indentation #}
"{{ attr.get('docs', '') | wordwrap(59) | replace('\n', ' \"\n \"') }}"
{%- elif attr is defined and attr['doc']|length == 0 %}
""

View File

@@ -62,7 +62,7 @@ class Set{{ class_name }}(command.ShowOne):
{%- endif %} {#- if attr.type == dict #}
help=_(
{%- if v['docs'] and v['docs']|length > 0 %}
{#- wrap long line with identation #}
{#- wrap long line with indentation #}
"{{ v.get('docs', '') | wordwrap(59) | replace('\n', ' \"\n \"') }}"
{%- elif v['doc']|length == 0 %}
""

View File

@@ -1190,7 +1190,7 @@ class TestModel(TestCase):
"in": "query",
"name": "limit",
"schema": {
"type": ["strng", "integer"],
"type": ["string", "integer"],
"format": "^[0-9]*$",
"minimum": 0,
},

View File

@@ -51,7 +51,7 @@ Highlevel description (for contributor)
Base generator
:class:`~codegenerator.openapi.base.OpenStackServerSourceGenerator` is
supporting WSGI + Routes based application out of box. For such applications
it tries to get the main router from wich all exposed routes are being
it tries to get the main router from which all exposed routes are being
analysed. During routes processing generator is searching for supported
decorators and frameworks in order to extract as most information about the
operation as possible:
@@ -105,7 +105,7 @@ Cinder is very similar to Nova so everything mentioned above is applicable
here as well.
for Cinder at the moment all operations are duplicated under
`v3/${project_id}/...` and `v3/...`. For the sake of standartization
`v3/${project_id}/...` and `v3/...`. For the sake of standardization
project_id urls are excluded from the produces spec file.
@@ -138,7 +138,7 @@ Neutron
This is where things are getting more challenging.
Neutron requires having DB provisioned and an in-memory DB seems not to be
possible due to technics for the DB communication. In addition to that config
possible due to techniques for the DB communication. In addition to that config
file enabling desired extensions is expected. All this activities are covered
in :class:`~codegenrator.openapi.neutron.NeutronGenerator:setup_neutron`.
According to the current information it is not possible to have all possible

View File

@@ -16,7 +16,7 @@ following aspects:
Currently the generated code is hosted under
`https://github.com/gtema/openstack` It covers all
services for which OpenAPI specs exist with version
dicovery and partial microversion negotiation.
discovery and partial microversion negotiation.
TODO

View File

@@ -8,6 +8,6 @@ generate SDK for Rust automatically.
Currently the generated code is hosted under
`https://github.com/gtema/openstack` It covers all
services for which OpenAPI specs exist with version
dicovery and partial microversion negotiation.
discovery and partial microversion negotiation.
TODO

View File

@@ -22,7 +22,7 @@
- name: Download crate-ci/typos
ansible.builtin.get_url:
url: "https://github.com/crate-ci/typos/releases/download/v1.25.0/typos-v1.25.0-x86_64-unknown-linux-musl.tar.gz"
url: "https://github.com/crate-ci/typos/releases/download/v1.38.1/typos-v1.38.1-x86_64-unknown-linux-musl.tar.gz"
dest: "/tmp/typos.tar.gz"
- name: Create bin folder

View File

@@ -21,7 +21,7 @@
- rust_patch
download_artifact_pipeline: gate
- name: "Check git patch presense"
- name: "Check git patch presence"
ansible.builtin.stat:
path: "{{ patch_file }}"
register: "git_patch_stat"

View File

@@ -75,7 +75,7 @@ build-backend = "pbr.build"
[tool.ruff]
line-length = 79
target-version = "py38"
target-version = "py312"
[tool.setuptools]
packages = [

View File

@@ -54,7 +54,7 @@ openstackdocs_auto_name = False
# Set aliases for extlinks
# * lpbug - generic Launchpad bug :lpbug:`123456`
# * oscbp - OSC blueprints :oscbp:`Blue Print <bp-name>`
# * oscdoc - OSC Docs :oscdoc:`Comamnd List <command-list>`
# * oscdoc - OSC Docs :oscdoc:`Command List <command-list>`
extlinks = {}
# Add any paths that contain templates here, relative to this directory.

View File

@@ -7,7 +7,7 @@
- ansible.builtin.set_fact:
metadata: "{{ metadata_content.content | b64decode | from_yaml }}"
- name: "Check presense of openapi file (first one)"
- name: "Check presence of openapi file (first one)"
ansible.builtin.stat:
path: "{{ codegenerator_base_dir }}/{{ metadata.resources | dict2items | map(attribute='value') | map(attribute='spec_file') | first }}"
register: "openapi_stat"

View File

@@ -76,7 +76,7 @@ commands =
application-import-names = codegenerator
# The following are ignored on purpose. It's not super worth it to fix them.
# However, if you feel strongly about it, patches will be accepted to fix them
# if they fix ALL of the occurances of one and only one of them.
# if they fix ALL of the occurrences of one and only one of them.
# E203 Black will put spaces after colons in list comprehensions
# E501 Black takes care of line length for us
# H238 New Style Classes are the default in Python3

5
typos.toml Normal file
View File

@@ -0,0 +1,5 @@
[default.extend-words]
wrk = "wrk"
[type.py.extend-words]
ser = "ser"