From 6a1bdd4fb4dafb1daf9909201cd734bcd7d93bcc Mon Sep 17 00:00:00 2001 From: "zhu.boxiang" Date: Fri, 13 May 2022 14:01:26 +0800 Subject: [PATCH] chore: Merge skyline-config into skyline-apiserver 1. merge skyline-config into skyline-apiserver 2. move tests into skyline-apiserver 3. we will remove skyline-config after we merge skline-policy-manager and skyline-nginx into skyline-apiserver Change-Id: I76091eb0f19333bafd999f3e03cb8bfc2ada6640 --- .gitignore | 1 + Makefile | 2 +- libs/skyline-apiserver/Makefile | 2 +- libs/skyline-apiserver/poetry.lock | 21 +- libs/skyline-apiserver/pyproject.toml | 4 +- .../skyline_apiserver/config/__init__.py | 2 +- .../skyline_apiserver/config/base.py | 167 +++++ .../skyline_apiserver/config/default.py | 2 +- .../skyline_apiserver/config/developer.py | 2 +- .../skyline_apiserver/config/openstack.py | 2 +- .../skyline_apiserver/config/setting.py | 2 +- .../skyline_apiserver/tests/fake.py | 17 + .../tests/unit/config/__init__.py | 0 .../tests/unit/config/test_base.py | 672 ++++++++++++++++++ 14 files changed, 866 insertions(+), 30 deletions(-) create mode 100644 libs/skyline-apiserver/skyline_apiserver/config/base.py create mode 100644 libs/skyline-apiserver/skyline_apiserver/tests/unit/config/__init__.py create mode 100644 libs/skyline-apiserver/skyline_apiserver/tests/unit/config/test_base.py diff --git a/.gitignore b/.gitignore index 8c19112..7fdb0f7 100644 --- a/.gitignore +++ b/.gitignore @@ -71,6 +71,7 @@ venv.bak/ /log/ tmp/ libs/skyline-apiserver/log/ +test_results.html # MAC OS .DS_Store diff --git a/Makefile b/Makefile index ae984d6..0784040 100644 --- a/Makefile +++ b/Makefile @@ -119,7 +119,7 @@ $(TEST_LIBS): .PHONY: clean $(CLEAN_LIBS) CLEAN_LIBS := $(addsuffix .clean,$(LIB_PATHS)) clean: $(CLEAN_LIBS) - rm -rf .venv dist + rm -rf .venv dist .tox $(CLEAN_LIBS): $(MAKE) -C $(basename $@) clean diff --git a/libs/skyline-apiserver/Makefile b/libs/skyline-apiserver/Makefile index 1301b59..561b006 100644 --- a/libs/skyline-apiserver/Makefile +++ b/libs/skyline-apiserver/Makefile @@ -55,7 +55,7 @@ test: .PHONY: clean clean: - rm -rf .venv dist htmlcov .coverage log + rm -rf .venv dist htmlcov .coverage log test_results.html .PHONY: db_revision diff --git a/libs/skyline-apiserver/poetry.lock b/libs/skyline-apiserver/poetry.lock index 331e964..24d40ed 100644 --- a/libs/skyline-apiserver/poetry.lock +++ b/libs/skyline-apiserver/poetry.lock @@ -1648,24 +1648,6 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -[[package]] -name = "skyline-config" -version = "0.1.0" -description = "" -category = "main" -optional = false -python-versions = "^3.8" -develop = true - -[package.dependencies] -immutables = "0.16" -pydantic = "1.8.2" -PyYAML = "5.4.1" - -[package.source] -type = "directory" -url = "../skyline-config" - [[package]] name = "skyline-log" version = "0.1.0" @@ -1925,7 +1907,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "ded4edfc5f1c33f84ed7fb28b5e2adadc53e1e694c8d78010156ff1b9a60f971" +content-hash = "d0188fec06a156124a80edce0d10dc7bb79e95287bbe49db541882db6a01c414" [metadata.files] add-trailing-comma = [ @@ -2939,7 +2921,6 @@ six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -skyline-config = [] skyline-log = [ {file = "skyline_log-0.1.0-py3-none-any.whl", hash = "sha256:60e61784ce43061c62ea424d271fd6ad0c04ba2a9e2df5d1e1f490a9cceb8d3b"}, ] diff --git a/libs/skyline-apiserver/pyproject.toml b/libs/skyline-apiserver/pyproject.toml index db10ebb..8e80c13 100644 --- a/libs/skyline-apiserver/pyproject.toml +++ b/libs/skyline-apiserver/pyproject.toml @@ -34,7 +34,6 @@ python-octaviaclient = "1.10.1" osc-placement = "1.7.0" keystoneauth1 = "3.17.4" skyline-policy-manager = "*" -skyline-config = "*" [tool.poetry.dev-dependencies] isort = "5.9.3" @@ -52,7 +51,6 @@ click = "7.1.2" asgi-lifespan = "1.0.1" types-PyYAML = "5.4.10" skyline-policy-manager = {path = "../skyline-policy-manager", develop = true} -skyline-config = {path = "../skyline-config", develop = true} [tool.poetry.scripts] swagger-generator = 'skyline_apiserver.cmd.generate_swagger:main' @@ -60,7 +58,7 @@ config-sample-generator = 'skyline_apiserver.cmd.generate_sample_config:main' [tool.pytest.ini_options] minversion = "6.0" -addopts = "-v -s -p no:cacheprovider -n auto --cov=skyline_apiserver --cov-append --cov-report=term-missing --cov-report=html" +addopts = "-v -s -p no:cacheprovider -n auto --cov=skyline_apiserver --cov-append --cov-report=term-missing --cov-report=html --html=test_results.html --self-contained-html" testpaths = [ "skyline_apiserver/tests", ] diff --git a/libs/skyline-apiserver/skyline_apiserver/config/__init__.py b/libs/skyline-apiserver/skyline_apiserver/config/__init__.py index d57d36e..fd0159a 100644 --- a/libs/skyline-apiserver/skyline_apiserver/config/__init__.py +++ b/libs/skyline-apiserver/skyline_apiserver/config/__init__.py @@ -16,7 +16,7 @@ from __future__ import annotations import os -from skyline_config import Configuration, Group +from skyline_apiserver.config.base import Configuration, Group from . import default, developer, openstack, setting diff --git a/libs/skyline-apiserver/skyline_apiserver/config/base.py b/libs/skyline-apiserver/skyline_apiserver/config/base.py new file mode 100644 index 0000000..2b828af --- /dev/null +++ b/libs/skyline-apiserver/skyline_apiserver/config/base.py @@ -0,0 +1,167 @@ +# Copyright 2021 99cloud +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import warnings +from dataclasses import InitVar, dataclass, field +from pathlib import Path, PurePath +from typing import Any, Dict, Iterator, NamedTuple, Sequence, Tuple, Type + +import yaml +from immutables import Map, MapItems, MapKeys, MapValues +from pydantic import BaseModel, create_model + + +class ConfigPath(NamedTuple): + config_dir_path: str + config_file_path: str + + +@dataclass(frozen=True) +class Opt: + name: str + description: str + schema: Any + default: Any = None + deprecated: bool = False + value: Any = field(init=False, default=None) + _schema_model: Type[BaseModel] = field(init=False, repr=False) + + def __post_init__(self) -> None: + object.__setattr__( + self, + "_schema_model", + create_model(f"Opt(name='{self.name}')", value=(self.schema, ...)), + ) + + def load(self, value: Any) -> None: + value = self.default if value is None else value + self._schema_model(value=value) + object.__setattr__(self, "value", value) + if self.deprecated: + warnings.warn( + f"The config opt {self.name} is deprecated, will be deleted in the" + " future version", + DeprecationWarning, + ) + + +@dataclass(repr=False, frozen=True) +class Group: + name: str + init_opts: InitVar[Sequence[Opt]] = tuple() + _opts: Map[str, Opt] = field(init=False, repr=False) + + def __post_init__(self, init_opts: Sequence[Opt]) -> None: + object.__setattr__(self, "_opts", Map({opt.name: opt for opt in init_opts})) + + def __getattr__(self, name: str) -> Any: + if name in self._opts: + return self._opts[name].value + raise AttributeError(name) + + def __contains__(self, key: Any) -> bool: + return self._opts.__contains__(key) + + def __iter__(self) -> Iterator[Any]: + return self._opts.__iter__() + + def __len__(self) -> int: + return self._opts.__len__() + + def __repr__(self) -> str: + items = ", ".join((f"{opt}=Opt(name='{opt}')" for opt in self._opts)) + return f"Group({items})" + + def keys(self) -> MapKeys[str]: + return self._opts.keys() + + def values(self) -> MapValues[Opt]: + return self._opts.values() + + def items(self) -> MapItems[str, Opt]: + return self._opts.items() + + +@dataclass(repr=False, frozen=True) +class Configuration: + init_groups: InitVar[Sequence[Group]] = tuple() + config: Dict[str, Any] = field(init=False, default_factory=dict, repr=False) + _groups: Map[str, Group] = field(init=False, repr=False) + + def __post_init__(self, init_groups: Sequence[Group]) -> None: + object.__setattr__(self, "_groups", Map({group.name: group for group in init_groups})) + + @staticmethod + def get_config_path(project: str, env: Dict[str, str]) -> Tuple[str, str]: + config_dir_path = env.get("OS_CONFIG_DIR", PurePath("/etc", project).as_posix()) + config_file_path = PurePath(config_dir_path).joinpath(f"{project}.yaml").as_posix() + return ConfigPath(config_dir_path.strip(), config_file_path.strip()) + + def setup(self, project: str, env: Dict[str, str]) -> None: + config_dir_path, config_file_path = self.get_config_path(project, env) + if not Path(config_file_path).exists(): + raise ValueError(f"Not found config file: {config_file_path}") + + with open(config_file_path) as f: + try: + object.__setattr__(self, "config", yaml.safe_load(f)) + except Exception: + raise ValueError("Load config file error") + + for group in self._groups.values(): + for opt in group._opts.values(): + value = self.config.get(group.name, {}).get(opt.name) + opt.load(value) + + def cleanup(self) -> None: + for group in self._groups.values(): + for opt in group._opts.values(): + object.__setattr__(opt, "value", None) + object.__setattr__(self, "_groups", Map()) + object.__setattr__(self, "config", {}) + + def __call__(self, init_groups: Sequence[Group]) -> Any: + object.__setattr__(self, "_groups", Map({group.name: group for group in init_groups})) + + def __getattr__(self, name: str) -> Group: + if name in self._groups: + return self._groups[name] + raise AttributeError(name) + + def __contains__(self, key: Any) -> bool: + return self._groups.__contains__(key) + + def __iter__(self) -> Iterator[Any]: + return self._groups.__iter__() + + def __len__(self) -> int: + return self._groups.__len__() + + def __repr__(self) -> str: + items = ", ".join((f"{group}=Group(name='{group}')" for group in self._groups)) + return f"Configuration({items})" + + def keys(self) -> MapKeys[str]: + return self._groups.keys() + + def values(self) -> MapValues[Group]: + return self._groups.values() + + def items(self) -> MapItems[str, Group]: + return self._groups.items() + + +__all__ = ("Opt", "Group", "Configuration") diff --git a/libs/skyline-apiserver/skyline_apiserver/config/default.py b/libs/skyline-apiserver/skyline_apiserver/config/default.py index 71d331c..596ccaf 100644 --- a/libs/skyline-apiserver/skyline_apiserver/config/default.py +++ b/libs/skyline-apiserver/skyline_apiserver/config/default.py @@ -17,7 +17,7 @@ from __future__ import annotations from typing import List from pydantic import StrictBool, StrictInt, StrictStr -from skyline_config import Opt +from skyline_apiserver.config.base import Opt debug = Opt( name="debug", diff --git a/libs/skyline-apiserver/skyline_apiserver/config/developer.py b/libs/skyline-apiserver/skyline_apiserver/config/developer.py index 2a0a461..1c4041f 100644 --- a/libs/skyline-apiserver/skyline_apiserver/config/developer.py +++ b/libs/skyline-apiserver/skyline_apiserver/config/developer.py @@ -15,7 +15,7 @@ from __future__ import annotations from pydantic import StrictBool -from skyline_config import Opt +from skyline_apiserver.config.base import Opt show_raw_sql = Opt( name="show_raw_sql", diff --git a/libs/skyline-apiserver/skyline_apiserver/config/openstack.py b/libs/skyline-apiserver/skyline_apiserver/config/openstack.py index 539fd09..a210f73 100644 --- a/libs/skyline-apiserver/skyline_apiserver/config/openstack.py +++ b/libs/skyline-apiserver/skyline_apiserver/config/openstack.py @@ -17,8 +17,8 @@ from __future__ import annotations from typing import Dict, List from pydantic import HttpUrl, StrictInt, StrictStr +from skyline_apiserver.config.base import Opt from skyline_apiserver.types import InterfaceType -from skyline_config import Opt keystone_url = Opt( name="keystone_url", diff --git a/libs/skyline-apiserver/skyline_apiserver/config/setting.py b/libs/skyline-apiserver/skyline_apiserver/config/setting.py index caa15f8..4fa135b 100644 --- a/libs/skyline-apiserver/skyline_apiserver/config/setting.py +++ b/libs/skyline-apiserver/skyline_apiserver/config/setting.py @@ -17,7 +17,7 @@ from __future__ import annotations from typing import Any, Dict, List from pydantic.types import StrictStr -from skyline_config import Opt +from skyline_apiserver.config.base import Opt base_settings = Opt( name="base_settings", diff --git a/libs/skyline-apiserver/skyline_apiserver/tests/fake.py b/libs/skyline-apiserver/skyline_apiserver/tests/fake.py index e9a03fe..d850cca 100644 --- a/libs/skyline-apiserver/skyline_apiserver/tests/fake.py +++ b/libs/skyline-apiserver/skyline_apiserver/tests/fake.py @@ -14,6 +14,23 @@ from __future__ import annotations +from dataclasses import dataclass, field +from typing import Any, Dict, List + from mimesis import Generic +from pydantic import StrictBool, StrictInt, StrictStr FAKER = Generic() + + +@dataclass +class FakeOptData: + name: str = field(default_factory=lambda: "_".join(FAKER.text.words())) + description: str = field(default_factory=lambda: str(FAKER.text.text())) + schema: Any = field( + default_factory=lambda: FAKER.random.choice( + [StrictBool, StrictInt, StrictStr, List, Dict], + ), + ) + default: Any = None + deprecated: bool = False diff --git a/libs/skyline-apiserver/skyline_apiserver/tests/unit/config/__init__.py b/libs/skyline-apiserver/skyline_apiserver/tests/unit/config/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/libs/skyline-apiserver/skyline_apiserver/tests/unit/config/test_base.py b/libs/skyline-apiserver/skyline_apiserver/tests/unit/config/test_base.py new file mode 100644 index 0000000..5b7fd3f --- /dev/null +++ b/libs/skyline-apiserver/skyline_apiserver/tests/unit/config/test_base.py @@ -0,0 +1,672 @@ +# Copyright 2021 99cloud +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import asdict +from pathlib import Path +from typing import Any, Dict, List, Optional, Sequence, Tuple, Type + +import pytest +from _pytest.fixtures import SubRequest +from pydantic import StrictBool, StrictFloat, StrictInt, StrictStr +from pydantic.error_wrappers import ValidationError +from skyline_apiserver.config.base import Configuration, Group, Opt +from skyline_apiserver.tests.fake import FAKER, FakeOptData +from skyline_apiserver.tests.models import ArgumentData, TestData + + +class TestOpt: + @pytest.mark.ddt( + TestData( + arguments=("opt_data", "expected_schema_type"), + argument_data_set=[ + ArgumentData( + id="bool_opt", + values=(asdict(FakeOptData(schema=StrictBool)), "boolean"), + ), + ArgumentData( + id="int_opt", + values=(asdict(FakeOptData(schema=StrictInt)), "integer"), + ), + ArgumentData( + id="float_opt", + values=(asdict(FakeOptData(schema=StrictFloat)), "number"), + ), + ArgumentData( + id="str_opt", + values=(asdict(FakeOptData(schema=StrictStr)), "string"), + ), + ArgumentData( + id="list_opt", + values=(asdict(FakeOptData(schema=List[StrictStr])), "array"), + ), + ArgumentData( + id="dict_opt", + values=(asdict(FakeOptData(schema=Dict[StrictStr, StrictStr])), "object"), + ), + ], + ), + ) + def test_opt_init(self, opt_data: Dict[str, Any], expected_schema_type: str) -> None: + opt = Opt(**opt_data) + opt_value_schema = opt._schema_model.schema().get("properties", {}).get("value", {}) + assert opt_value_schema.get("type") == expected_schema_type + + @pytest.mark.ddt( + TestData( + arguments=("opt_data", "expected_exception"), + argument_data_set=[ + ArgumentData( + id="missing_parameters", + values=({"name": FAKER.text.word()}, TypeError), + ), + ArgumentData( + id="unknown_schema", + values=( + { + "name": FAKER.text.word(), + "description": FAKER.text.word(), + "schema": object, + }, + RuntimeError, + ), + ), + ], + ), + ) + def test_opt_init_error( + self, + opt_data: Dict[str, Any], + expected_exception: Type[Exception], + ) -> None: + with pytest.raises(expected_exception): + Opt(**opt_data) + + @pytest.mark.ddt( + TestData( + arguments=("opt_data",), + argument_data_set=[ + ArgumentData( + id="when_has_default", + values=( + asdict( + FakeOptData(schema=Optional[StrictStr], default=FAKER.text.word()), + ), + ), + ), + ArgumentData( + id="when_no_default", + values=(asdict(FakeOptData(schema=Optional[StrictStr])),), + ), + ], + ), + TestData( + arguments=("opt_value",), + argument_data_set=[ + ArgumentData(id="load_value", values=(FAKER.text.word(),)), + ArgumentData(id="load_none", values=(None,)), + ], + ), + ) + def test_opt_load(self, opt_data: Dict[str, Any], opt_value: Optional[str]) -> None: + opt = Opt(**opt_data) + opt.load(opt_value) + if opt_value is not None: + expected_result = opt_value + else: + expected_result = opt.default + assert opt.value == expected_result + + @pytest.mark.ddt( + TestData( + arguments=("opt_data",), + argument_data_set=[ + ArgumentData( + id="deprecated_warning", + values=(asdict(FakeOptData(schema=Optional[StrictStr], deprecated=True)),), + ), + ], + ), + ) + def test_opt_deprecated(self, opt_data: Dict[str, Any]) -> None: + opt = Opt(**opt_data) + expected_warn = DeprecationWarning + with pytest.warns(expected_warn): + opt.load(None) + + @pytest.mark.ddt( + TestData( + arguments=("opt_data", "opt_value"), + argument_data_set=[ + ArgumentData( + id="validation_error", + values=( + asdict(FakeOptData(schema=StrictStr)), + FAKER.numbers.integer_number(), + ), + ), + ], + ), + ) + def test_opt_schema_validation(self, opt_data: Dict[str, Any], opt_value: int) -> None: + opt = Opt(**opt_data) + expected_exception = ValidationError + with pytest.raises(expected_exception): + opt.load(opt_value) + + +class TestGroup: + @pytest.fixture + def group_opts(self, request: SubRequest) -> Sequence[Opt]: + count: int = request.param + opts = [] + for _ in range(count): + opt_data = asdict( + FakeOptData(schema=StrictStr, default=FAKER.text.word()), + ) + opt = Opt(**opt_data) + opt.load(None) + opts.append(opt) + return opts + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData(id="empty_group", values=(FAKER.text.word(), 0)), + ArgumentData( + id="normal_group", + values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)), + ), + ], + ), + ) + def test_group_init(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + for opt in group_opts: + assert opt.value == getattr(group, opt.name, None) + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData(id="access_non-existent_opt", values=(FAKER.text.word(), 1)), + ], + ), + ) + def test_group_access_error(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + expected_exception = AttributeError + with pytest.raises(expected_exception): + getattr(group, f"{FAKER.text.word()}-test") + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData( + id="normal_group", + values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)), + ), + ], + ), + ) + def test_group_like_collection(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + for opt in group_opts: + assert opt.name in group + assert len(group) == len(group_opts) + opt_names = {opt.name for opt in group_opts} + for item in group: + assert item in opt_names + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData( + id="normal_group", + values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)), + ), + ], + ), + ) + def test_group_repr(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + opt_template = "{}=Opt(name='{}')" + for opt in group_opts: + opt_str = opt_template.format(opt.name, opt.name) + assert opt_str in repr(group) + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData( + id="normal_group", + values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)), + ), + ], + ), + ) + def test_group_keys(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + opt_names = {opt.name for opt in group_opts} + for item in group.keys(): + assert item in opt_names + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData( + id="normal_group", + values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)), + ), + ], + ), + ) + def test_group_values(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + opts = {opt for opt in group_opts} + opt_ids = {id(opt) for opt in group_opts} + for item in group.values(): + assert item in opts + assert id(item) in opt_ids + + @pytest.mark.ddt( + TestData( + arguments=("group_name", "group_opts"), + indirect=("group_opts",), + argument_data_set=[ + ArgumentData( + id="normal_group", + values=(FAKER.text.word(), FAKER.numbers.integer_number(1, 10)), + ), + ], + ), + ) + def test_group_items(self, group_name: str, group_opts: Sequence[Opt]) -> None: + group = Group(group_name, group_opts) + opt_names = {opt.name for opt in group_opts} + opts = {opt for opt in group_opts} + opt_ids = {id(opt) for opt in group_opts} + for name, item in group.items(): + assert name in opt_names + assert item in opts + assert id(item) in opt_ids + + +class TestConfiguration: + @pytest.fixture + def config_groups(self, request: SubRequest) -> Sequence[Group]: + count: int = request.param + groups = [] + for _ in range(count): + opts = [] + for __ in range(FAKER.numbers.integer_number(1, 10)): + opt_data = asdict( + FakeOptData(schema=StrictStr, default=FAKER.text.word()), + ) + opt = Opt(**opt_data) + opt.load(None) + opts.append(opt) + group = Group(FAKER.text.word(), opts) + groups.append(group) + return groups + + @pytest.fixture + def config_setup_params( + self, + request: SubRequest, + tmp_path: Path, + ) -> Tuple[str, Dict[str, str]]: + project: str = request.param.get("project", "") + env: Dict[str, str] = request.param.get("env", "") + env["OS_CONFIG_DIR"] = tmp_path.as_posix() + tmp_path.joinpath(f"{project}.yaml").write_text("{}") + return (project, env) + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData(id="empty_config", values=(0,)), + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_init(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + for group in config_groups: + assert group is getattr(config, group.name, None) + assert id(group) == id(getattr(config, group.name, None)) + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="access_non-existent_group", + values=(1,), + ), + ], + ), + ) + def test_configuration_access_error(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + expected_exception = AttributeError + with pytest.raises(expected_exception): + getattr(config, f"{FAKER.text.word()}-test") + + @pytest.mark.ddt( + TestData( + arguments=( + "project", + "env", + "expected_config_path", + ), + argument_data_set=[ + ArgumentData( + id="set_env_config_dir", + values=( + "fake_project_name", + {"OS_CONFIG_DIR": "env_config_dir"}, + ("env_config_dir", "env_config_dir/fake_project_name.yaml"), + ), + ), + ArgumentData( + id="no_set_env", + values=( + "fake_project_name", + {}, + ( + "/etc/fake_project_name", + "/etc/fake_project_name/fake_project_name.yaml", + ), + ), + ), + ], + ), + ) + def test_configuration_get_config_path( + self, + project: str, + env: Dict[str, str], + expected_config_path: Tuple[str, str], + ) -> None: + assert Configuration.get_config_path(project, env) == expected_config_path + + @pytest.mark.ddt( + TestData( + arguments=("config_setup_params",), + indirect=("config_setup_params",), + argument_data_set=[ + ArgumentData( + id="set_env_config_dir", + values=( + { + "project": "fake_project_name", + "env": {"OS_CONFIG_DIR": ""}, + }, + ), + ), + ], + ), + ) + def test_configuration_setup(self, config_setup_params: Tuple[str, Dict[str, str]]) -> None: + groups = [] + for _ in range(FAKER.numbers.integer_number(1, 10)): + opts = [] + for __ in range(FAKER.numbers.integer_number(1, 10)): + opt_data = asdict( + FakeOptData(schema=StrictStr, default=FAKER.text.word()), + ) + opts.append(Opt(**opt_data)) + groups.append(Group(FAKER.text.word(), opts)) + config = Configuration(groups) + project = config_setup_params[0] + env = config_setup_params[1] + config.setup(project, env) + for group in config: + for opt in getattr(config, group): + opt_value = getattr(getattr(config, group, None), opt) + assert isinstance(opt_value, str) + + @pytest.mark.ddt( + TestData( + arguments=("config_setup_params",), + indirect=("config_setup_params",), + argument_data_set=[ + ArgumentData( + id="not_found_config_file", + values=( + { + "project": "fake_project_name", + "env": {"OS_CONFIG_DIR": ""}, + }, + ), + ), + ], + ), + ) + def test_configuration_setup_non_existent_error( + self, + config_setup_params: Tuple[str, Dict[str, str]], + ) -> None: + groups = [] + for _ in range(FAKER.numbers.integer_number(1, 10)): + opts = [] + for __ in range(FAKER.numbers.integer_number(1, 10)): + opt_data = asdict( + FakeOptData(schema=StrictStr, default=FAKER.text.word()), + ) + opts.append(Opt(**opt_data)) + groups.append(Group(FAKER.text.word(), opts)) + config = Configuration(groups) + project = config_setup_params[0] + env = config_setup_params[1] + config_dir_path, config_file_path = config.get_config_path(project, env) + Path(config_file_path).unlink(missing_ok=True) + expected_exception = ValueError + with pytest.raises(expected_exception, match="Not found config file"): + config.setup(project, env) + + @pytest.mark.ddt( + TestData( + arguments=("config_setup_params",), + indirect=("config_setup_params",), + argument_data_set=[ + ArgumentData( + id="file_is_not_yaml", + values=( + { + "project": "fake_project_name", + "env": {"OS_CONFIG_DIR": ""}, + }, + ), + ), + ], + ), + ) + def test_configuration_setup_yaml_format_error( + self, + config_setup_params: Tuple[str, Dict[str, str]], + ) -> None: + groups = [] + for _ in range(FAKER.numbers.integer_number(1, 10)): + opts = [] + for __ in range(FAKER.numbers.integer_number(1, 10)): + opt_data = asdict( + FakeOptData(schema=StrictStr, default=FAKER.text.word()), + ) + opts.append(Opt(**opt_data)) + groups.append(Group(FAKER.text.word(), opts)) + config = Configuration(groups) + project = config_setup_params[0] + env = config_setup_params[1] + config_dir_path, config_file_path = config.get_config_path(project, env) + Path(config_file_path).write_text("{") + expected_exception = ValueError + with pytest.raises(expected_exception, match="Load config file error"): + config.setup(project, env) + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_cleanup(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + assert len(config) == len(config_groups) + config.cleanup() + assert len(config) == 0 + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_call(self, config_groups: Sequence[Group]) -> None: + config = Configuration() + config(config_groups) + for group in config_groups: + assert group is getattr(config, group.name, None) + assert id(group) == id(getattr(config, group.name, None)) + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_like_collection(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + for group in config_groups: + assert group.name in config + assert len(config) == len(config_groups) + group_names = {group.name for group in config_groups} + for item in config: + assert item in group_names + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_repr(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + group_template = "{}=Group(name='{}')" + for group in config_groups: + group_str = group_template.format(group.name, group.name) + assert group_str in repr(config) + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_keys(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + group_names = {group.name for group in config_groups} + for item in config.keys(): + assert item in group_names + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_values(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + groups = {group for group in config_groups} + group_ids = {id(group) for group in config_groups} + for item in config.values(): + assert item in groups + assert id(item) in group_ids + + @pytest.mark.ddt( + TestData( + arguments=("config_groups",), + indirect=("config_groups",), + argument_data_set=[ + ArgumentData( + id="normal_config", + values=(FAKER.numbers.integer_number(1, 10),), + ), + ], + ), + ) + def test_configuration_items(self, config_groups: Sequence[Group]) -> None: + config = Configuration(config_groups) + group_names = {group.name for group in config_groups} + groups = {group for group in config_groups} + group_ids = {id(group) for group in config_groups} + for name, item in config.items(): + assert name in group_names + assert item in groups + assert id(item) in group_ids