feat: Support mypy check

1. support mypy check, tox -e mypy or tox -e pep8
2. fix error of mypy check

Change-Id: I41b0013d271f3c7d3a28e1ea6dd0b083893d8983
This commit is contained in:
Boxiang Zhu 2022-07-28 16:09:31 +08:00
parent 1591f34a25
commit 88ef320dc6
30 changed files with 211 additions and 198 deletions

View File

@ -72,6 +72,7 @@ docs/_build/
tmp/ tmp/
test_results.html test_results.html
nginx.conf nginx.conf
**/mypy-report/
# MAC OS # MAC OS
.DS_Store .DS_Store

1
.gitignore vendored
View File

@ -75,6 +75,7 @@ tmp/
test_results.html test_results.html
skyline-console-* skyline-console-*
nginx.conf nginx.conf
mypy-report/
# MAC OS # MAC OS
.DS_Store .DS_Store

View File

@ -1,39 +1,47 @@
# https://mypy.readthedocs.io/en/stable/config_file.html
[mypy] [mypy]
incremental = false # Import discovery
cache_dir = /dev/null ignore_missing_imports = true
show_error_codes = true follow_imports = normal
# Platform configuration
# Disallow dynamic typing
# Untyped definitions and calls
check_untyped_defs = true
# None and Optional handling
no_implicit_optional = true
strict_optional = true
# Configuring warnings
show_error_context = true show_error_context = true
show_column_numbers = true
warn_unused_ignores = true
; check_untyped_defs = true # Suppressing errors
; disallow_incomplete_defs = true
; disallow_untyped_calls = true
; disallow_untyped_decorators = true
; disallow_untyped_defs = true
; ignore_missing_imports = true
; incremental = false
; no_implicit_optional = true
; pretty = true
; raise_exceptions = true
; strict_equality = true
; warn_incomplete_stub = true
; warn_redundant_casts = true
; warn_return_any = true
; warn_unreachable = true
; warn_unused_configs = true
; warn_unused_ignores = true
; allow_redefinition = true
; implicit_reexport = true
; # NOTE: Maybe need remove # Miscellaneous strictness flags
; disallow_subclassing_any = true
; disallow_any_decorated = true
; disallow_any_explicit = false
; disallow_any_expr = false
; disallow_any_generics = true
; disallow_any_unimported = false
[pydantic-mypy] # Configuring error messages
init_forbid_extra = true show_error_codes = true
init_typed = true pretty = true
warn_required_dynamic_aliases = true color_output = true
warn_untyped_fields = true error_summary = true
show_absolute_path = false
# Incremental mode
incremental = true
cache_dir = .mypy_cache
sqlite_cache = false
cache_fine_grained = false
skip_version_check = false
skip_cache_mtime_checks = false
# Advanced options
# Report generation
html_report = mypy-report
# Miscellaneous

View File

@ -29,7 +29,7 @@ from skyline_apiserver.types import constants
class TokenCookie(APIKeyCookie): class TokenCookie(APIKeyCookie):
async def __call__(self, request: Request) -> Optional[str]: async def __call__(self, request: Request) -> str:
api_key = request.cookies.get(self.model.name) api_key = request.cookies.get(self.model.name)
if not api_key: if not api_key:
raise HTTPException( raise HTTPException(
@ -39,7 +39,7 @@ class TokenCookie(APIKeyCookie):
return api_key return api_key
async def getJWTPayload(request: Request) -> (str): async def getJWTPayload(request: Request) -> Optional[str]:
token = request.cookies.get(CONF.default.session_name) token = request.cookies.get(CONF.default.session_name)
return token return token

View File

@ -47,7 +47,7 @@ async def list_keystone_endpoints() -> List[schemas.KeystoneEndpoints]:
tasks = [asyncio.create_task(get_endpoints(region)) for region in regions] tasks = [asyncio.create_task(get_endpoints(region)) for region in regions]
endpoints = await asyncio.gather(*tasks) endpoints = await asyncio.gather(*tasks)
result = [ result = [
{"region_name": region, "url": endpoint.get("keystone")} schemas.KeystoneEndpoints(**{"region_name": region, "url": endpoint.get("keystone")})
for region, endpoint in zip(regions, endpoints) for region, endpoint in zip(regions, endpoints)
] ]
return result return result

View File

@ -18,10 +18,13 @@ import asyncio
import math import math
from asyncio import gather from asyncio import gather
from functools import reduce from functools import reduce
from typing import List from typing import Any, Dict, List
from cinderclient.v3.volumes import Volume as CinderVolume
from dateutil import parser from dateutil import parser
from fastapi import APIRouter, Depends, Header, Query, status from fastapi import APIRouter, Depends, Header, Query, status
from glanceclient.v2.schemas import SchemaBasedModel as GlanceModel
from novaclient.v2.servers import Server as NovaServer
from skyline_apiserver import schemas from skyline_apiserver import schemas
from skyline_apiserver.api import deps from skyline_apiserver.api import deps
@ -67,10 +70,10 @@ async def list_servers(
regex=constants.INBOUND_HEADER_REGEX, regex=constants.INBOUND_HEADER_REGEX,
), ),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT), limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None, marker: str = Query(None),
sort_dirs: schemas.SortDir = None, sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.ServerSortKey] = Query(None), sort_keys: List[schemas.ServerSortKey] = Query(None),
all_projects: bool = None, all_projects: bool = Query(None),
project_id: str = Query( project_id: str = Query(
None, None,
description="Only works when the all_projects filter is also specified.", description="Only works when the all_projects filter is also specified.",
@ -79,10 +82,10 @@ async def list_servers(
None, None,
description="Only works when the all_projects filter is also specified.", description="Only works when the all_projects filter is also specified.",
), ),
name: str = None, name: str = Query(None),
status: schemas.ServerStatus = None, status: schemas.ServerStatus = Query(None),
host: str = Query(None, description="It will be ignored for non-admin user."), host: str = Query(None, description="It will be ignored for non-admin user."),
flavor_id: str = None, flavor_id: str = Query(None),
uuid: str = Query(None, description="UUID of server."), uuid: str = Query(None, description="UUID of server."),
) -> schemas.ServersResponse: ) -> schemas.ServersResponse:
"""Extension List Servers. """Extension List Servers.
@ -143,14 +146,14 @@ async def list_servers(
search_opts={"name": project_name}, search_opts={"name": project_name},
) )
if not filter_projects: if not filter_projects:
return {"servers": []} return schemas.ServersResponse(**{"servers": []})
else: else:
# Projects will not have the same name or same id in the same domain # Projects will not have the same name or same id in the same domain
filter_project = filter_projects[0] filter_project = filter_projects[0]
# When we both supply the project_id and project_name filter, if the project's id does # When we both supply the project_id and project_name filter, if the project's id does
# not equal the project_id, just return []. # not equal the project_id, just return [].
if project_id and filter_project.id != project_id: if project_id and filter_project.id != project_id:
return {"servers": []} return schemas.ServersResponse(**{"servers": []})
project_id = filter_project.id project_id = filter_project.id
search_opts = { search_opts = {
@ -169,8 +172,8 @@ async def list_servers(
search_opts=search_opts, search_opts=search_opts,
marker=marker, marker=marker,
limit=limit, limit=limit,
sort_keys=sort_keys, sort_keys=[sort_key.value for sort_key in sort_keys] if sort_keys else None,
sort_dirs=[sort_dirs] if sort_dirs else None, sort_dirs=[sort_dirs.value] if sort_dirs else None,
) )
result = [] result = []
@ -228,8 +231,12 @@ async def list_servers(
projects = task_result[0] if task_result[0] else [] projects = task_result[0] if task_result[0] else []
proj_mappings = {project.id: project.name for project in projects} proj_mappings = {project.id: project.name for project in projects}
total_image_tasks = math.ceil(len(image_ids) / STEP) total_image_tasks = math.ceil(len(image_ids) / STEP)
images = reduce(lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], []) images: List[GlanceModel] = reduce(
volumes = reduce(lambda x, y: x + y, task_result[1 + total_image_tasks :], []) lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], []
)
volumes: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 + total_image_tasks :], []
)
image_mappings = { image_mappings = {
image.id: {"name": image.name, "image_os_distro": getattr(image, "os_distro", None)} image.id: {"name": image.name, "image_os_distro": getattr(image, "os_distro", None)}
for image in list(images) for image in list(images)
@ -267,7 +274,7 @@ async def list_servers(
else: else:
values = {"image": None, "image_name": None, "image_os_distro": None} values = {"image": None, "image_name": None, "image_os_distro": None}
server.update(values) server.update(values)
return {"servers": result} return schemas.ServersResponse(**{"servers": result})
@router.get( @router.get(
@ -298,10 +305,10 @@ async def list_recycle_servers(
regex=constants.INBOUND_HEADER_REGEX, regex=constants.INBOUND_HEADER_REGEX,
), ),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT), limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None, marker: str = Query(None),
sort_dirs: schemas.SortDir = None, sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.RecycleServerSortKey] = Query(None), sort_keys: List[schemas.RecycleServerSortKey] = Query(None),
all_projects: bool = None, all_projects: bool = Query(None),
project_id: str = Query( project_id: str = Query(
None, None,
description="Only works when the all_projects filter is also specified.", description="Only works when the all_projects filter is also specified.",
@ -310,7 +317,7 @@ async def list_recycle_servers(
None, None,
description="Only works when the all_projects filter is also specified.", description="Only works when the all_projects filter is also specified.",
), ),
name: str = None, name: str = Query(None),
uuid: str = Query(None, description="UUID of recycle server."), uuid: str = Query(None, description="UUID of recycle server."),
) -> schemas.RecycleServersResponse: ) -> schemas.RecycleServersResponse:
"""Extension List Recycle Servers. """Extension List Recycle Servers.
@ -366,14 +373,14 @@ async def list_recycle_servers(
search_opts={"name": project_name}, search_opts={"name": project_name},
) )
if not filter_projects: if not filter_projects:
return {"recycle_servers": []} return schemas.RecycleServersResponse(**{"recycle_servers": []})
else: else:
# Projects will not have the same name or same id in the same domain # Projects will not have the same name or same id in the same domain
filter_project = filter_projects[0] filter_project = filter_projects[0]
# When we both supply the project_id and project_name filter, if the project's id does # When we both supply the project_id and project_name filter, if the project's id does
# not equal the project_id, just return []. # not equal the project_id, just return [].
if project_id and filter_project.id != project_id: if project_id and filter_project.id != project_id:
return {"recycle_servers": []} return schemas.RecycleServersResponse(**{"recycle_servers": []})
project_id = filter_project.id project_id = filter_project.id
search_opts = { search_opts = {
@ -393,8 +400,8 @@ async def list_recycle_servers(
search_opts=search_opts, search_opts=search_opts,
marker=marker, marker=marker,
limit=limit, limit=limit,
sort_keys=sort_keys, sort_keys=[sort_key.value for sort_key in sort_keys] if sort_keys else None,
sort_dirs=[sort_dirs] if sort_dirs else None, sort_dirs=[sort_dirs.value] if sort_dirs else None,
) )
result = [] result = []
@ -452,8 +459,12 @@ async def list_recycle_servers(
projects = task_result[0] if task_result[0] else [] projects = task_result[0] if task_result[0] else []
proj_mappings = {project.id: project.name for project in projects} proj_mappings = {project.id: project.name for project in projects}
total_image_tasks = math.ceil(len(image_ids) / STEP) total_image_tasks = math.ceil(len(image_ids) / STEP)
images = reduce(lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], []) images: List[GlanceModel] = reduce(
volumes = reduce(lambda x, y: x + y, task_result[1 + total_image_tasks :], []) lambda x, y: list(x) + list(y), task_result[1 : 1 + total_image_tasks], []
)
volumes: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 + total_image_tasks :], []
)
image_mappings = { image_mappings = {
image.id: {"name": image.name, "image_os_distro": getattr(image, "os_distro", None)} image.id: {"name": image.name, "image_os_distro": getattr(image, "os_distro", None)}
for image in list(images) for image in list(images)
@ -496,7 +507,7 @@ async def list_recycle_servers(
else: else:
values = {"image": None, "image_name": None, "image_os_distro": None} values = {"image": None, "image_name": None, "image_os_distro": None}
recycle_server.update(values) recycle_server.update(values)
return {"recycle_servers": result} return schemas.RecycleServersResponse(**{"recycle_servers": result})
@router.get( @router.get(
@ -520,15 +531,15 @@ async def list_volumes(
regex=constants.INBOUND_HEADER_REGEX, regex=constants.INBOUND_HEADER_REGEX,
), ),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT), limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None, marker: str = Query(None),
sort_dirs: schemas.SortDir = None, sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.VolumeSortKey] = Query(None), sort_keys: List[schemas.VolumeSortKey] = Query(None),
all_projects: bool = None, all_projects: bool = Query(None),
project_id: str = None, project_id: str = Query(None),
name: str = None, name: str = Query(None),
multiattach: bool = None, multiattach: bool = Query(None),
status: schemas.VolumeStatus = None, status: schemas.VolumeStatus = Query(None),
bootable: bool = None, bootable: bool = Query(None),
uuid: List[str] = Query(None, description="UUID of volume."), uuid: List[str] = Query(None, description="UUID of volume."),
) -> schemas.VolumesResponse: ) -> schemas.VolumesResponse:
"""Extension List Volumes. """Extension List Volumes.
@ -683,7 +694,7 @@ async def list_volumes(
task_result = await gather(*tasks) task_result = await gather(*tasks)
projects = [] if not task_result[0] else task_result[0] projects = [] if not task_result[0] else task_result[0]
servers = reduce(lambda x, y: x + y, task_result[1:], []) servers: List[NovaServer] = reduce(lambda x, y: x + y, task_result[1:], [])
proj_mappings = {project.id: project.name for project in projects} proj_mappings = {project.id: project.name for project in projects}
ser_mappings = {server.id: server.name for server in servers} ser_mappings = {server.id: server.name for server in servers}
@ -692,7 +703,7 @@ async def list_volumes(
volume["project_name"] = proj_mappings.get(volume["project_id"]) volume["project_name"] = proj_mappings.get(volume["project_id"])
for attachment in volume["attachments"]: for attachment in volume["attachments"]:
attachment["server_name"] = ser_mappings.get(attachment["server_id"]) attachment["server_name"] = ser_mappings.get(attachment["server_id"])
return {"count": count, "volumes": result} return schemas.VolumesResponse(**{"count": count, "volumes": result})
@router.get( @router.get(
@ -716,14 +727,14 @@ async def list_volume_snapshots(
regex=constants.INBOUND_HEADER_REGEX, regex=constants.INBOUND_HEADER_REGEX,
), ),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT), limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None, marker: str = Query(None),
sort_dirs: schemas.SortDir = None, sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.VolumeSnapshotSortKey] = Query(None), sort_keys: List[schemas.VolumeSnapshotSortKey] = Query(None),
all_projects: bool = None, all_projects: bool = Query(None),
project_id: str = None, project_id: str = Query(None),
name: str = None, name: str = Query(None),
status: schemas.VolumeSnapshotStatus = None, status: schemas.VolumeSnapshotStatus = Query(None),
volume_id: str = None, volume_id: str = Query(None),
) -> schemas.VolumeSnapshotsResponse: ) -> schemas.VolumeSnapshotsResponse:
"""Extension List Volume Snapshots. """Extension List Volume Snapshots.
@ -844,8 +855,12 @@ async def list_volume_snapshots(
projects = task_result[0] if task_result[0] else [] projects = task_result[0] if task_result[0] else []
total_volume_tasks = math.ceil(len(volume_ids) / STEP) total_volume_tasks = math.ceil(len(volume_ids) / STEP)
volumes = reduce(lambda x, y: x + y, task_result[1 : 1 + total_volume_tasks], []) volumes: List[CinderVolume] = reduce(
volumes_from_snapshot = reduce(lambda x, y: x + y, task_result[1 + total_volume_tasks :], []) lambda x, y: x + y, task_result[1 : 1 + total_volume_tasks], []
)
volumes_from_snapshot: List[CinderVolume] = reduce(
lambda x, y: x + y, task_result[1 + total_volume_tasks :], []
)
proj_mappings = {project.id: project.name for project in projects} proj_mappings = {project.id: project.name for project in projects}
vol_mappings = {} vol_mappings = {}
@ -854,7 +869,7 @@ async def list_volume_snapshots(
"name": volume.name, "name": volume.name,
"host": getattr(volume, "os-vol-host-attr:host", None), "host": getattr(volume, "os-vol-host-attr:host", None),
} }
child_volumes = {} child_volumes: Dict[str, Any] = {}
for volume in volumes_from_snapshot: for volume in volumes_from_snapshot:
child_volumes.setdefault(volume.snapshot_id, []) child_volumes.setdefault(volume.snapshot_id, [])
child_volumes[volume.snapshot_id].append(volume.name) child_volumes[volume.snapshot_id].append(volume.name)
@ -866,7 +881,7 @@ async def list_volume_snapshots(
snapshot["volume_name"] = vol_mapping["name"] snapshot["volume_name"] = vol_mapping["name"]
snapshot["host"] = vol_mapping["host"] if all_projects else None snapshot["host"] = vol_mapping["host"] if all_projects else None
snapshot["child_volumes"] = child_volumes.get(snapshot["id"], []) snapshot["child_volumes"] = child_volumes.get(snapshot["id"], [])
return {"count": count, "volume_snapshots": result} return schemas.VolumeSnapshotsResponse(**{"count": count, "volume_snapshots": result})
@router.get( @router.get(
@ -889,16 +904,16 @@ async def list_ports(
regex=constants.INBOUND_HEADER_REGEX, regex=constants.INBOUND_HEADER_REGEX,
), ),
limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT), limit: int = Query(None, gt=constants.EXTENSION_API_LIMIT_GT),
marker: str = None, marker: str = Query(None),
sort_dirs: schemas.SortDir = None, sort_dirs: schemas.SortDir = Query(None),
sort_keys: List[schemas.PortSortKey] = Query(None), sort_keys: List[schemas.PortSortKey] = Query(None),
all_projects: bool = None, all_projects: bool = Query(None),
project_id: str = None, project_id: str = Query(None),
name: str = None, name: str = Query(None),
status: schemas.PortStatus = None, status: schemas.PortStatus = Query(None),
network_name: str = None, network_name: str = Query(None),
network_id: str = None, network_id: str = Query(None),
device_id: str = None, device_id: str = Query(None),
device_owner: List[schemas.PortDeviceOwner] = Query(None), device_owner: List[schemas.PortDeviceOwner] = Query(None),
uuid: List[str] = Query(None, description="UUID of port."), uuid: List[str] = Query(None, description="UUID of port."),
) -> schemas.PortsResponse: ) -> schemas.PortsResponse:
@ -941,7 +956,7 @@ async def list_ports(
""" """
current_session = await generate_session(profile=profile) current_session = await generate_session(profile=profile)
kwargs = {} kwargs: Dict[str, Any] = {}
if limit is not None: if limit is not None:
kwargs["limit"] = limit kwargs["limit"] = limit
if marker is not None: if marker is not None:
@ -966,13 +981,13 @@ async def list_ports(
**{"name": network_name}, **{"name": network_name},
) )
if not networks["networks"]: if not networks["networks"]:
return {"ports": []} return schemas.PortsResponse(**{"ports": []})
network_ids = [network["id"] for network in networks["networks"]] network_ids = [network["id"] for network in networks["networks"]]
kwargs["network_id"] = network_ids kwargs["network_id"] = network_ids
if network_id is not None: if network_id is not None:
network_ids = kwargs.get("network_id", []) network_ids = kwargs.get("network_id", [])
if network_ids and network_id not in network_ids: if network_ids and network_id not in network_ids:
return {"ports": []} return schemas.PortsResponse(**{"ports": []})
elif not network_ids: elif not network_ids:
network_ids.append(network_id) network_ids.append(network_id)
kwargs["network_id"] = network_ids kwargs["network_id"] = network_ids
@ -1003,7 +1018,7 @@ async def list_ports(
server_ids.append(port["device_id"]) server_ids.append(port["device_id"])
network_ids.append(port["network_id"]) network_ids.append(port["network_id"])
network_params = {} network_params: Dict[str, Any] = {}
tasks = [ tasks = [
neutron.list_networks( neutron.list_networks(
profile=profile, profile=profile,
@ -1046,7 +1061,9 @@ async def list_ports(
task_result = await gather(*tasks) task_result = await gather(*tasks)
total_network_tasks = math.ceil(len(network_ids) / STEP) total_network_tasks = math.ceil(len(network_ids) / STEP)
servers = reduce(lambda x, y: x + y, task_result[1 + total_network_tasks :], []) servers: List[NovaServer] = reduce(
lambda x, y: x + y, task_result[1 + total_network_tasks :], []
)
ser_mappings = {server.id: server.name for server in servers} ser_mappings = {server.id: server.name for server in servers}
_networks = [net.get("networks", []) for net in task_result[1 : 1 + total_network_tasks]] _networks = [net.get("networks", []) for net in task_result[1 : 1 + total_network_tasks]]
shared_nets = task_result[0].get("networks", []) shared_nets = task_result[0].get("networks", [])
@ -1055,7 +1072,7 @@ async def list_ports(
for port in result: for port in result:
port["server_name"] = ser_mappings.get(port["device_id"]) port["server_name"] = ser_mappings.get(port["device_id"])
port["network_name"] = network_mappings.get(port["network_id"]) port["network_name"] = network_mappings.get(port["network_id"])
return {"ports": result} return schemas.PortsResponse(**{"ports": result})
@router.get( @router.get(
@ -1078,8 +1095,8 @@ async def compute_services(
alias=constants.INBOUND_HEADER, alias=constants.INBOUND_HEADER,
regex=constants.INBOUND_HEADER_REGEX, regex=constants.INBOUND_HEADER_REGEX,
), ),
binary: str = None, binary: str = Query(None),
host: str = None, host: str = Query(None),
) -> schemas.ComputeServicesResponse: ) -> schemas.ComputeServicesResponse:
"""Extension List Compute Services. """Extension List Compute Services.
@ -1112,4 +1129,4 @@ async def compute_services(
**kwargs, **kwargs,
) )
services = [Service(service).to_dict() for service in services] services = [Service(service).to_dict() for service in services]
return {"services": services} return schemas.ComputeServicesResponse(**{"services": services})

View File

@ -15,6 +15,7 @@
from __future__ import annotations from __future__ import annotations
import copy import copy
from typing import Dict, List
class APIResourceWrapper(object): class APIResourceWrapper(object):
@ -24,7 +25,7 @@ class APIResourceWrapper(object):
api object as the only argument to the constructor api object as the only argument to the constructor
""" """
_attrs = [] _attrs: List[str] = []
_apiresource = None # Make sure _apiresource is there even in __init__. _apiresource = None # Make sure _apiresource is there even in __init__.
def __init__(self, apiresource): def __init__(self, apiresource):
@ -67,7 +68,7 @@ class APIDictWrapper(object):
consistent with api resource objects from novaclient. consistent with api resource objects from novaclient.
""" """
_apidict = {} # Make sure _apidict is there even in __init__. _apidict: Dict[str, str] = {} # Make sure _apidict is there even in __init__.
def __init__(self, apidict): def __init__(self, apidict):
self._apidict = apidict self._apidict = apidict

View File

@ -100,7 +100,7 @@ async def list_policies(
{"rule": rule, "allowed": ENFORCER.authorize(rule, target, user_context)} {"rule": rule, "allowed": ENFORCER.authorize(rule, target, user_context)}
for rule in ENFORCER.rules for rule in ENFORCER.rules
] ]
return {"policies": result} return schemas.Policies(**{"policies": result})
@router.post( @router.post(
@ -150,4 +150,4 @@ async def check_policies(
detail=str(e), detail=str(e),
) )
return {"policies": result} return schemas.Policies(**{"policies": result})

View File

@ -1,6 +1,6 @@
from __future__ import annotations from __future__ import annotations
from fastapi import APIRouter, Depends, HTTPException, status from fastapi import APIRouter, Depends, HTTPException, Query, status
from httpx import codes from httpx import codes
from skyline_apiserver import schemas from skyline_apiserver import schemas
@ -93,9 +93,9 @@ def get_prometheus_query_range_response(
response_model_exclude_none=True, response_model_exclude_none=True,
) )
async def prometheus_query( async def prometheus_query(
query: str = None, query: str = Query(None),
time: str = None, time: str = Query(None),
timeout: str = None, timeout: str = Query(None),
profile: schemas.Profile = Depends(deps.get_profile_update_jwt), profile: schemas.Profile = Depends(deps.get_profile_update_jwt),
) -> schemas.PrometheusQueryResponse: ) -> schemas.PrometheusQueryResponse:
kwargs = {} kwargs = {}
@ -138,11 +138,11 @@ async def prometheus_query(
response_model_exclude_none=True, response_model_exclude_none=True,
) )
async def prometheus_query_range( async def prometheus_query_range(
query: str = None, query: str = Query(None),
start: str = None, start: str = Query(None),
end: str = None, end: str = Query(None),
step: str = None, step: str = Query(None),
timeout: str = None, timeout: str = Query(None),
profile: schemas.Profile = Depends(deps.get_profile_update_jwt), profile: schemas.Profile = Depends(deps.get_profile_update_jwt),
) -> schemas.PrometheusQueryRangeResponse: ) -> schemas.PrometheusQueryRangeResponse:
kwargs = {} kwargs = {}

View File

@ -115,8 +115,7 @@ async def list_settings(
for item in db_settings: for item in db_settings:
if item.key in CONF.setting.base_settings: if item.key in CONF.setting.base_settings:
settings[item.key].value = item.value settings[item.key].value = item.value
settings = list(settings.values()) return schemas.Settings(settings=list(settings.values()))
return schemas.Settings(settings=settings)
@router.delete( @router.delete(

View File

@ -14,7 +14,7 @@
from __future__ import annotations from __future__ import annotations
from typing import Any, Dict from typing import Any, Dict, Optional
from fastapi import HTTPException, status from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized from keystoneauth1.exceptions.http import Unauthorized
@ -29,10 +29,10 @@ async def list_volumes(
profile: schemas.Profile, profile: schemas.Profile,
session: Session, session: Session,
global_request_id: str, global_request_id: str,
limit: int = None, limit: Optional[int] = None,
marker: str = None, marker: Optional[str] = None,
search_opts: Dict[str, Any] = None, search_opts: Optional[Dict[str, Any]] = None,
sort: str = None, sort: Optional[str] = None,
) -> Any: ) -> Any:
try: try:
cc = await utils.cinder_client( cc = await utils.cinder_client(
@ -63,10 +63,10 @@ async def list_volume_snapshots(
profile: schemas.Profile, profile: schemas.Profile,
session: Session, session: Session,
global_request_id: str, global_request_id: str,
limit: int = None, limit: Optional[int] = None,
marker: str = None, marker: Optional[str] = None,
search_opts: Dict[str, Any] = None, search_opts: Optional[Dict[str, Any]] = None,
sort: str = None, sort: Optional[str] = None,
) -> Any: ) -> Any:
try: try:
cc = await utils.cinder_client( cc = await utils.cinder_client(

View File

@ -14,7 +14,7 @@
from __future__ import annotations from __future__ import annotations
from typing import Any, Dict from typing import Any, Dict, Optional
from fastapi import HTTPException, status from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized from keystoneauth1.exceptions.http import Unauthorized
@ -29,7 +29,7 @@ async def list_images(
profile: schemas.Profile, profile: schemas.Profile,
session: Session, session: Session,
global_request_id: str, global_request_id: str,
filters: Dict[str, Any] = None, filters: Optional[Dict[str, Any]] = None,
) -> Any: ) -> Any:
try: try:
kwargs = {} kwargs = {}

View File

@ -14,7 +14,7 @@
from __future__ import annotations from __future__ import annotations
from typing import Any, Dict from typing import Any, Dict, Optional
from fastapi import HTTPException, status from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized from keystoneauth1.exceptions.http import Unauthorized
@ -30,7 +30,7 @@ async def list_projects(
session: Session, session: Session,
global_request_id: str, global_request_id: str,
all_projects: bool, all_projects: bool,
search_opts: Dict[str, Any] = None, search_opts: Optional[Dict[str, Any]] = None,
) -> Any: ) -> Any:
try: try:
search_opts = search_opts if search_opts else {} search_opts = search_opts if search_opts else {}

View File

@ -14,7 +14,7 @@
from __future__ import annotations from __future__ import annotations
from typing import Any, Dict from typing import Any, Dict, List, Optional
from fastapi import HTTPException, status from fastapi import HTTPException, status
from keystoneauth1.exceptions.http import Unauthorized from keystoneauth1.exceptions.http import Unauthorized
@ -30,11 +30,11 @@ async def list_servers(
profile: schemas.Profile, profile: schemas.Profile,
session: Session, session: Session,
global_request_id: str, global_request_id: str,
search_opts: Dict[str, Any] = None, search_opts: Optional[Dict[str, Any]] = None,
marker: str = None, marker: Optional[str] = None,
limit: int = None, limit: Optional[int] = None,
sort_keys: str = None, sort_keys: Optional[List[str]] = None,
sort_dirs: str = None, sort_dirs: Optional[List[str]] = None,
) -> Any: ) -> Any:
try: try:
nc = await utils.nova_client( nc = await utils.nova_client(

View File

@ -14,7 +14,7 @@
from __future__ import annotations from __future__ import annotations
from typing import Any from typing import Any, Optional
from cinderclient.client import Client as CinderClient from cinderclient.client import Client as CinderClient
from glanceclient.client import Client as GlanceClient from glanceclient.client import Client as GlanceClient
@ -99,7 +99,7 @@ async def get_endpoint(region: str, service: str, session: Session) -> Any:
async def keystone_client( async def keystone_client(
session: Session, session: Session,
region: str, region: str,
global_request_id: str = None, global_request_id: Optional[str] = None,
version: str = constants.KEYSTONE_API_VERSION, version: str = constants.KEYSTONE_API_VERSION,
) -> HTTPClient: ) -> HTTPClient:
endpoint = await get_endpoint(region, "keystone", session=session) endpoint = await get_endpoint(region, "keystone", session=session)
@ -116,7 +116,7 @@ async def keystone_client(
async def glance_client( async def glance_client(
session: Session, session: Session,
region: str, region: str,
global_request_id: str = None, global_request_id: Optional[str] = None,
version: str = constants.GLANCE_API_VERSION, version: str = constants.GLANCE_API_VERSION,
) -> HTTPClient: ) -> HTTPClient:
endpoint = await get_endpoint(region, "glance", session=session) endpoint = await get_endpoint(region, "glance", session=session)
@ -132,7 +132,7 @@ async def glance_client(
async def nova_client( async def nova_client(
session: Session, session: Session,
region: str, region: str,
global_request_id: str = None, global_request_id: Optional[str] = None,
version: str = constants.NOVA_API_VERSION, version: str = constants.NOVA_API_VERSION,
) -> HTTPClient: ) -> HTTPClient:
endpoint = await get_endpoint(region, "nova", session=session) endpoint = await get_endpoint(region, "nova", session=session)
@ -148,7 +148,7 @@ async def nova_client(
async def cinder_client( async def cinder_client(
session: Session, session: Session,
region: str, region: str,
global_request_id: str, global_request_id: Optional[str] = None,
version: str = constants.CINDER_API_VERSION, version: str = constants.CINDER_API_VERSION,
) -> HTTPClient: ) -> HTTPClient:
endpoint = await get_endpoint(region, "cinderv3", session=session) endpoint = await get_endpoint(region, "cinderv3", session=session)
@ -164,7 +164,7 @@ async def cinder_client(
async def neutron_client( async def neutron_client(
session: Session, session: Session,
region: str, region: str,
global_request_id: str = None, global_request_id: Optional[str] = None,
version: str = constants.NEUTRON_API_VERSION, version: str = constants.NEUTRON_API_VERSION,
) -> HTTPClient: ) -> HTTPClient:
endpoint = await get_endpoint(region, "neutron", session=session) endpoint = await get_endpoint(region, "neutron", session=session)

View File

@ -99,11 +99,11 @@ def get_proxy_endpoints() -> Dict[str, ProxyEndpoint]:
# 2. $(project_id)s or %(project_id)s # 2. $(project_id)s or %(project_id)s
# 3. AUTH_$(tenant_id)s or AUTH_%(tenant_id)s # 3. AUTH_$(tenant_id)s or AUTH_%(tenant_id)s
# 4. AUTH_$(project_id)s or AUTH_%(project_id)s # 4. AUTH_$(project_id)s or AUTH_%(project_id)s
path = "" if str(raw_path.parents[1]) == "/" else raw_path.parents[1] path = "" if str(raw_path.parents[1]) == "/" else str(raw_path.parents[1])
elif raw_path.match("v[0-9]") or raw_path.match("v[0-9][.][0-9]"): elif raw_path.match("v[0-9]") or raw_path.match("v[0-9][.][0-9]"):
path = "" if str(raw_path.parents[0]) == "/" else raw_path.parents[0] path = "" if str(raw_path.parents[0]) == "/" else str(raw_path.parents[0])
else: else:
path = raw_path path = str(raw_path)
proxy.url = raw_url._replace(path=f"{str(path)}/").geturl() proxy.url = raw_url._replace(path=f"{str(path)}/").geturl()
endpoints[f"{region}-{service_type}"] = proxy endpoints[f"{region}-{service_type}"] = proxy

View File

@ -21,7 +21,7 @@ from pathlib import Path
from typing import Callable, Dict, Iterable, List, Union from typing import Callable, Dict, Iterable, List, Union
import click import click
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault # type: ignore from oslo_policy.policy import DocumentedRuleDefault, RuleDefault
from skyline_apiserver.log import LOG, setup as log_setup from skyline_apiserver.log import LOG, setup as log_setup
from skyline_apiserver.policy.manager import get_service_rules from skyline_apiserver.policy.manager import get_service_rules

View File

@ -16,6 +16,7 @@ from __future__ import annotations
import time import time
import uuid import uuid
from typing import Optional
from fastapi import HTTPException, status from fastapi import HTTPException, status
from jose import jwt from jose import jwt
@ -48,8 +49,8 @@ async def generate_profile_by_token(token: schemas.Payload) -> schemas.Profile:
async def generate_profile( async def generate_profile(
keystone_token: str, keystone_token: str,
region: str, region: str,
exp: int = None, exp: Optional[int] = None,
uuid_value: str = None, uuid_value: Optional[str] = None,
) -> schemas.Profile: ) -> schemas.Profile:
try: try:
kc = await utils.keystone_client(session=get_system_session(), region=region) kc = await utils.keystone_client(session=get_system_session(), region=region)

View File

@ -26,7 +26,7 @@ from .base import DB, inject_db
from .models import RevokedToken, Settings from .models import RevokedToken, Settings
def check_db_connected(fn: Fn) -> Fn: def check_db_connected(fn: Fn) -> Any:
@wraps(fn) @wraps(fn)
async def wrapper(*args: Any, **kwargs: Any) -> Any: async def wrapper(*args: Any, **kwargs: Any) -> Any:
await inject_db() await inject_db()

View File

@ -21,7 +21,7 @@ from databases import Database, DatabaseURL, core
from skyline_apiserver.config import CONF from skyline_apiserver.config import CONF
DATABASE = None DATABASE = None
DB = ContextVar("skyline_db", default=None) DB: ContextVar = ContextVar("skyline_db")
class ParallelDatabase(Database): class ParallelDatabase(Database):

View File

@ -14,7 +14,7 @@
from __future__ import annotations from __future__ import annotations
from oslo_policy import _parser # type: ignore from oslo_policy import _parser
from .base import Enforcer, UserContext from .base import Enforcer, UserContext
from .manager import get_service_rules from .manager import get_service_rules

View File

@ -15,7 +15,7 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import MutableMapping from collections.abc import MutableMapping
from typing import Any, Dict, Iterable, Iterator from typing import Any, Dict, Iterator, List, Union
import attr import attr
from immutables import Map from immutables import Map
@ -24,7 +24,7 @@ from oslo_policy._checks import _check
from skyline_apiserver.config import CONF from skyline_apiserver.config import CONF
from .manager.base import APIRule from .manager.base import APIRule, Rule
class UserContext(MutableMapping): class UserContext(MutableMapping):
@ -32,7 +32,7 @@ class UserContext(MutableMapping):
self, self,
access: AccessInfoV3, access: AccessInfoV3,
): ):
self._data = {} self._data: Dict[str, Any] = {}
self.access = access self.access = access
self._data.setdefault("auth_token", getattr(access, "auth_token", None)) self._data.setdefault("auth_token", getattr(access, "auth_token", None))
self._data.setdefault("user_id", getattr(access, "user_id", None)) self._data.setdefault("user_id", getattr(access, "user_id", None))
@ -96,7 +96,7 @@ class UserContext(MutableMapping):
class Enforcer: class Enforcer:
rules: Map = attr.ib(factory=Map, repr=True, init=False) rules: Map = attr.ib(factory=Map, repr=True, init=False)
def register_rules(self, rules: Iterable[APIRule]) -> None: def register_rules(self, rules: List[Union[Rule, APIRule]]) -> None:
rule_map = {} rule_map = {}
for rule in rules: for rule in rules:
if rule.name in rule_map: if rule.name in rule_map:

View File

@ -16,8 +16,8 @@ from __future__ import annotations
from typing import List from typing import List
from oslo_policy import _parser # type: ignore from oslo_policy import _parser
from oslo_policy.policy import DocumentedRuleDefault, RuleDefault # type: ignore from oslo_policy.policy import DocumentedRuleDefault, RuleDefault
from skyline_apiserver import schemas from skyline_apiserver import schemas

View File

@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from typing import TYPE_CHECKING, Iterator, Optional from typing import TYPE_CHECKING, AsyncGenerator
import pytest import pytest
from _pytest.mark import ParameterSet from _pytest.mark import ParameterSet
@ -28,7 +28,7 @@ if TYPE_CHECKING:
@pytest.fixture(scope="function") @pytest.fixture(scope="function")
async def client() -> Iterator[AsyncClient]: async def client() -> AsyncGenerator:
async with LifespanManager(app): async with LifespanManager(app):
async with AsyncClient(app=app, base_url="http://test") as ac: async with AsyncClient(app=app, base_url="http://test") as ac:
yield ac yield ac
@ -36,7 +36,7 @@ async def client() -> Iterator[AsyncClient]:
CONF.cleanup() CONF.cleanup()
def pytest_generate_tests(metafunc: Optional["Metafunc"]) -> None: def pytest_generate_tests(metafunc: "Metafunc") -> None:
for marker in metafunc.definition.iter_markers(name="ddt"): for marker in metafunc.definition.iter_markers(name="ddt"):
test_data: TestData test_data: TestData
for test_data in marker.args: for test_data in marker.args:

View File

@ -16,7 +16,7 @@ from __future__ import annotations
import sys import sys
from dataclasses import asdict, dataclass, field from dataclasses import asdict, dataclass, field
from typing import Any, Dict, List from typing import Any, Dict, List, Union
from mimesis import Generic from mimesis import Generic
from pydantic import StrictBool, StrictInt, StrictStr from pydantic import StrictBool, StrictInt, StrictStr
@ -56,10 +56,10 @@ class FakeOptData:
@dataclass @dataclass
class FakeOperation: class FakeOperation:
method: str = field( method: Union[str, Any] = field(
default_factory=lambda: FAKER.choice(["GET", "POST", "PUT", "PATCH", "DELETE"]), default_factory=lambda: FAKER.choice(["GET", "POST", "PUT", "PATCH", "DELETE"]),
) )
path: str = field( path: Union[str, Any] = field(
default_factory=lambda: FAKER.choice(["/resources", "/resources/{resource_id}"]), default_factory=lambda: FAKER.choice(["/resources", "/resources/{resource_id}"]),
) )
@ -71,7 +71,7 @@ class FakeDocumentedRuleData:
check_str: str = field( check_str: str = field(
default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}', default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}',
) )
scope_types: List[str] = field( scope_types: Union[List[str], Any] = field(
default_factory=lambda: FAKER.choice( default_factory=lambda: FAKER.choice(
["system", "domain", "project"], ["system", "domain", "project"],
length=FAKER.numbers.integer_number(1, 3), length=FAKER.numbers.integer_number(1, 3),
@ -92,7 +92,7 @@ class FakeRuleData:
check_str: str = field( check_str: str = field(
default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}', default_factory=lambda: f'role:{FAKER.choice(["admin", "member", "reader"])}',
) )
scope_types: List[str] = field( scope_types: Union[List[str], Any] = field(
default_factory=lambda: FAKER.choice( default_factory=lambda: FAKER.choice(
["system", "domain", "project"], ["system", "domain", "project"],
length=FAKER.numbers.integer_number(1, 3), length=FAKER.numbers.integer_number(1, 3),

View File

@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from typing import Set
ALGORITHM = "HS256" ALGORITHM = "HS256"
KEYSTONE_API_VERSION = "3.13" KEYSTONE_API_VERSION = "3.13"
@ -36,8 +38,8 @@ EXTENSION_API_LIMIT_GT = 0
ID_UUID_RANGE_STEP = 100 ID_UUID_RANGE_STEP = 100
SETTINGS_HIDDEN_SET = set() SETTINGS_HIDDEN_SET: Set = set()
SETTINGS_RESTART_SET = set() SETTINGS_RESTART_SET: Set = set()
DEFAULT_TIMEOUT = 30 DEFAULT_TIMEOUT = 30

View File

@ -13,7 +13,7 @@
# limitations under the License. # limitations under the License.
import types import types
from typing import Any, Dict from typing import Any, Dict, Optional
import httpx import httpx
from fastapi import HTTPException, status from fastapi import HTTPException, status
@ -21,7 +21,7 @@ from httpx import Response, codes
async def _http_request( async def _http_request(
method: types.FunctionType = httpx.AsyncClient.get, method: types.FunctionType = httpx.AsyncClient.get, # type: ignore
**kwargs, **kwargs,
) -> Response: ) -> Response:
async with httpx.AsyncClient(verify=False) as client: async with httpx.AsyncClient(verify=False) as client:
@ -37,7 +37,7 @@ async def _http_request(
async def assert_http_request( async def assert_http_request(
method: types.FunctionType, method: types.FunctionType,
expectedStatus: str = codes.OK, expectedStatus: codes = codes.OK,
**kwargs, **kwargs,
) -> Response: ) -> Response:
response = await _http_request(method, **kwargs) response = await _http_request(method, **kwargs)
@ -51,12 +51,12 @@ async def assert_http_request(
async def get_assert_200( async def get_assert_200(
url: str, url: str,
cookies: Dict[str, Any] = None, cookies: Optional[Dict[str, Any]] = None,
headers: Dict[str, Any] = None, headers: Optional[Dict[str, Any]] = None,
params: Dict[str, Any] = None, params: Optional[Dict[str, Any]] = None,
) -> Response: ) -> Response:
return await assert_http_request( return await assert_http_request(
method=httpx.AsyncClient.get, method=httpx.AsyncClient.get, # type: ignore
expectedStatus=codes.OK, expectedStatus=codes.OK,
url=url, url=url,
cookies=cookies, cookies=cookies,
@ -65,9 +65,9 @@ async def get_assert_200(
) )
async def delete_assert_200(url, cookies: Dict[str, Any] = None) -> Response: async def delete_assert_200(url, cookies: Optional[Dict[str, Any]] = None) -> Response:
return await assert_http_request( return await assert_http_request(
method=httpx.AsyncClient.delete, method=httpx.AsyncClient.delete, # type: ignore
expectedStatus=codes.OK, expectedStatus=codes.OK,
url=url, url=url,
cookies=cookies, cookies=cookies,
@ -76,7 +76,7 @@ async def delete_assert_200(url, cookies: Dict[str, Any] = None) -> Response:
async def post_assert_201(url: str, json: Dict[str, Any], cookies: Dict[str, Any]) -> Response: async def post_assert_201(url: str, json: Dict[str, Any], cookies: Dict[str, Any]) -> Response:
return await assert_http_request( return await assert_http_request(
method=httpx.AsyncClient.post, method=httpx.AsyncClient.post, # type: ignore
expectedStatus=codes.CREATED, expectedStatus=codes.CREATED,
url=url, url=url,
json=json, json=json,
@ -86,7 +86,7 @@ async def post_assert_201(url: str, json: Dict[str, Any], cookies: Dict[str, Any
async def put_assert_200(url: str, json: Dict[str, Any], cookies: Dict[str, Any]) -> Response: async def put_assert_200(url: str, json: Dict[str, Any], cookies: Dict[str, Any]) -> Response:
return await assert_http_request( return await assert_http_request(
method=httpx.AsyncClient.put, method=httpx.AsyncClient.put, # type: ignore
expectedStatus=codes.OK, expectedStatus=codes.OK,
url=url, url=url,
json=json, json=json,

View File

@ -19,3 +19,5 @@ asgi-lifespan<=1.0.1 # MIT
types-PyYAML<=5.4.10 # Apache-2.0 types-PyYAML<=5.4.10 # Apache-2.0
oslo.log<=5.0.0 # Apache-2.0 oslo.log<=5.0.0 # Apache-2.0
neutron-lib>=2.15.0 # Apache-2.0 neutron-lib>=2.15.0 # Apache-2.0
lxml>=4.4.1 # BSD
types-python-dateutil>=2.8.2 # Apache-2.0

View File

@ -1,19 +0,0 @@
#!/bin/sh
#
# A wrapper around mypy that allows us to specify what files to run 'mypy' type
# checks on. Intended to be invoked via tox:
#
# tox -e mypy
#
# Eventually this should go away once we have either converted everything or
# converted enough and ignored [1] the rest.
#
# [1] http://mypy.readthedocs.io/en/latest/config_file.html#per-module-flags
if [ $# -eq 0 ]; then
# if no arguments provided, use the standard converted lists
python -m mypy skyline_apiserver
else
# else test what the user asked us to
python -m mypy $@
fi

View File

@ -51,7 +51,7 @@ description =
envdir = {toxworkdir}/shared envdir = {toxworkdir}/shared
extras = extras =
commands = commands =
bash tools/mypywrap.sh {posargs} mypy skyline_apiserver
[testenv:pep8] [testenv:pep8]
description = description =
@ -61,7 +61,7 @@ deps =
{[testenv]deps} {[testenv]deps}
extras = extras =
commands = commands =
; {[testenv:mypy]commands} {[testenv:mypy]commands}
isort --check-only --diff skyline_apiserver isort --check-only --diff skyline_apiserver
black --check --diff --color skyline_apiserver --line-length 98 black --check --diff --color skyline_apiserver --line-length 98
flake8 {posargs} . flake8 {posargs} .