From a69424baee1944f30b733c40d66fc5fdde4b2b60 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Mon, 4 Nov 2024 21:46:23 +0300 Subject: [PATCH 01/46] ADCM-6064 Prepare base entities --- adcm_aio_client/__init__.py | 11 ++ adcm_aio_client/client.py | 32 +++++ adcm_aio_client/core/__init__.py | 11 ++ adcm_aio_client/core/accessors.py | 43 +++++++ adcm_aio_client/core/filters.py | 13 ++ adcm_aio_client/core/objects.py | 39 ++++++ adcm_aio_client/core/requesters.py | 41 +++++++ adcm_aio_client/core/types.py | 23 ++++ poetry.lock | 188 +++++++++++++++++++++++++++++ pyproject.toml | 62 ++++++++++ 10 files changed, 463 insertions(+) create mode 100644 adcm_aio_client/__init__.py create mode 100644 adcm_aio_client/client.py create mode 100644 adcm_aio_client/core/__init__.py create mode 100644 adcm_aio_client/core/accessors.py create mode 100644 adcm_aio_client/core/filters.py create mode 100644 adcm_aio_client/core/objects.py create mode 100644 adcm_aio_client/core/requesters.py create mode 100644 adcm_aio_client/core/types.py create mode 100644 poetry.lock create mode 100644 pyproject.toml diff --git a/adcm_aio_client/__init__.py b/adcm_aio_client/__init__.py new file mode 100644 index 00000000..4d9a9249 --- /dev/null +++ b/adcm_aio_client/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/adcm_aio_client/client.py b/adcm_aio_client/client.py new file mode 100644 index 00000000..3a79f5ff --- /dev/null +++ b/adcm_aio_client/client.py @@ -0,0 +1,32 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Self + +from adcm_aio_client.core.requesters import Requester +from adcm_aio_client.core.types import AuthCredentials, AuthToken, Cert, Verify + + +class ADCMClient: + def __init__(self: Self, requester: Requester) -> None: + pass + + +async def build_client( + url: str | list[str], + credentials: AuthCredentials | AuthToken, + *, + verify: Verify | None = None, + cert: Cert | None = None, + timeout: int | None = None, + retries: int | None = None, +) -> ADCMClient: ... diff --git a/adcm_aio_client/core/__init__.py b/adcm_aio_client/core/__init__.py new file mode 100644 index 00000000..4d9a9249 --- /dev/null +++ b/adcm_aio_client/core/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/adcm_aio_client/core/accessors.py b/adcm_aio_client/core/accessors.py new file mode 100644 index 00000000..6178c263 --- /dev/null +++ b/adcm_aio_client/core/accessors.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import AsyncGenerator, List, Self + +from adcm_aio_client.core.requesters import Requester + + +class Accessor[T](ABC): + @abstractmethod + async def list(self: Self) -> AsyncGenerator[List[T]]: ... + + @abstractmethod + async def get(self: Self) -> AsyncGenerator[T]: ... + + @abstractmethod + async def get_or_none(self: Self) -> AsyncGenerator[T | None]: ... + + @abstractmethod + async def all(self: Self) -> AsyncGenerator[List[T]]: ... + + @abstractmethod + async def iter(self: Self) -> AsyncGenerator[T]: ... + + @abstractmethod + async def filter(self: Self, predicate: T) -> AsyncGenerator[List[T]]: ... + + +class PaginatedAccessor(Accessor): + def __init__(self: Self, path: str, requester: Requester) -> None: ... + + +class NonPaginatedAccessor(Accessor): ... diff --git a/adcm_aio_client/core/filters.py b/adcm_aio_client/core/filters.py new file mode 100644 index 00000000..27ed00f5 --- /dev/null +++ b/adcm_aio_client/core/filters.py @@ -0,0 +1,13 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: Prepare API for filters diff --git a/adcm_aio_client/core/objects.py b/adcm_aio_client/core/objects.py new file mode 100644 index 00000000..b963c677 --- /dev/null +++ b/adcm_aio_client/core/objects.py @@ -0,0 +1,39 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Self + +from adcm_aio_client.core.accessors import Accessor + + +class BaseObject: + id: int + name: str + + +class Cluster(BaseObject): + description: str + services: "ServiceNode" + + def delete(self: Self) -> None: ... + + def rename(self: Self, name: str) -> Self: ... + + +class ClusterNode(Accessor[Cluster]): + def create(self: Self) -> Cluster: ... + + +class Service(BaseObject): ... + + +class ServiceNode(Accessor[Service]): ... diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py new file mode 100644 index 00000000..da03ca3b --- /dev/null +++ b/adcm_aio_client/core/requesters.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import AsyncGenerator, Self + +from typing_extensions import Protocol + + +class RequesterResponse(Protocol): + def as_list(self: Self) -> list: ... + + def as_dict(self: Self) -> dict: ... + + +class Requester(Protocol): + async def get(self: Self, path: str, query_params: dict) -> AsyncGenerator[RequesterResponse]: ... + + async def post(self: Self, path: str, data: dict) -> AsyncGenerator[RequesterResponse]: ... + + async def patch(self: Self, path: str, data: dict) -> AsyncGenerator[RequesterResponse]: ... + + async def delete(self: Self, path: str) -> AsyncGenerator[RequesterResponse]: ... + + +class Session: ... + + +class DefaultRequester(Requester): + def __init__(self: Self) -> None: ... + + @property + async def session(self: Self) -> AsyncGenerator[Session]: ... diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py new file mode 100644 index 00000000..5d9e3275 --- /dev/null +++ b/adcm_aio_client/core/types.py @@ -0,0 +1,23 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import NamedTuple, Optional, TypeAlias + + +class AuthCredentials(NamedTuple): + username: str + password: str + + +AuthToken: TypeAlias = str +Cert: TypeAlias = str | tuple[str, Optional[str], Optional[str]] | None +Verify: TypeAlias = str | bool diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..ed513295 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,188 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "anyio" +version = "4.6.2.post1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "certifi" +version = "2024.8.30" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + +[[package]] +name = "pyright" +version = "1.1.387" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.387-py3-none-any.whl", hash = "sha256:6a1f495a261a72e12ad17e20d1ae3df4511223c773b19407cfa006229b1b08a5"}, + {file = "pyright-1.1.387.tar.gz", hash = "sha256:577de60224f7fe36505d5b181231e3a395d427b7873be0bbcaa962a29ea93a60"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" +typing-extensions = ">=4.1" + +[package.extras] +all = ["nodejs-wheel-binaries", "twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] +nodejs = ["nodejs-wheel-binaries"] + +[[package]] +name = "ruff" +version = "0.7.2" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"}, + {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"}, + {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"}, + {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"}, + {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"}, + {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"}, + {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.12" +content-hash = "790831eb4e8498cbc0e1dc3af6a6ff8cc11752bbc1da56d7f255b47eafc24e5a" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..a80ff8f1 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,62 @@ +[tool.poetry] +name = "adcm-aio-client" +version = "0.1.0" +description = "ADCM Client" +authors = ["Aleksandr Alferov "] +license = "Apache License Version 2.0" +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.12" +httpx = "^0.27.2" + + +[tool.poetry.group.dev.dependencies] +ruff = "^0.7.1" +pyright = "^1.1.387" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint.isort] +force-sort-within-sections = true +length-sort-straight = true +order-by-type = true + +[tool.ruff.lint] +select = [ + # Pyflakes + "F", + # Pycodestyle + "E", "W", + # isort + "I", + # "COM" - The following rules may cause conflicts when used with the formatter: `COM812`. + # To avoid unexpected behavior, we recommend disabling these rules + "N", "UP", "YTT", "ANN", + "S", "BLE", "FBT", "B", "COM", "A", "C4", + "DTZ", "ICN", "PIE", "Q", "RET", + "SIM", "ARG", "PTH", "PLE", "TRY" +] + +ignore = [ + "COM812", +] + +[tool.pyright] +include = [ + "adcm_aio_client", +] + +typeCheckingMode = "standard" +reportUnnecessaryTypeIgnoreComment = true + +reportMissingImports = "error" +reportMissingTypeStubs = false + +pythonVersion = "3.12" +pythonPlatform = "Linux" From 2699ace80422d6131265267cc0754bbb5d0a6734 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 12 Nov 2024 14:28:23 +0500 Subject: [PATCH 02/46] ADCM-6073 Introduce basic workflows (#3) --- .github/CODEOWNERS | 2 + .../workflows/on_push_to_pull_request.yaml | 23 ++++++ .github/workflows/step_lint.yaml | 23 ++++++ .github/workflows/step_test_from_dir.yaml | 30 ++++++++ .gitignore | 2 +- poetry.lock | 72 ++++++++++++++++++- pyproject.toml | 8 +++ tests/integration/__init__.py | 0 tests/integration/test_dummy.py | 2 + tests/unit/__init__.py | 0 tests/unit/test_dummy.py | 2 + 11 files changed, 161 insertions(+), 3 deletions(-) create mode 100644 .github/CODEOWNERS create mode 100644 .github/workflows/on_push_to_pull_request.yaml create mode 100644 .github/workflows/step_lint.yaml create mode 100644 .github/workflows/step_test_from_dir.yaml create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/test_dummy.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/test_dummy.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..9f624206 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,2 @@ +* @a-alferov +*.py @Sealwing @DanBalalan @Starovoitov diff --git a/.github/workflows/on_push_to_pull_request.yaml b/.github/workflows/on_push_to_pull_request.yaml new file mode 100644 index 00000000..3099e751 --- /dev/null +++ b/.github/workflows/on_push_to_pull_request.yaml @@ -0,0 +1,23 @@ +name: Validate Pull Request Changes + +# most important for us is "synchronize" event for pull request, which is included by default +on: pull_request + +jobs: + lint: + name: Lint Python code + uses: ./.github/workflows/step_lint.yaml + + unit_tests: + name: Run unit tests + uses: ./.github/workflows/step_test_from_dir.yaml + with: + target: tests/unit + description: Unit + + integration_tests: + name: Run integration tests + uses: ./.github/workflows/step_test_from_dir.yaml + with: + target: tests/integration + description: Integration diff --git a/.github/workflows/step_lint.yaml b/.github/workflows/step_lint.yaml new file mode 100644 index 00000000..358f0285 --- /dev/null +++ b/.github/workflows/step_lint.yaml @@ -0,0 +1,23 @@ +name: Run Linters + +on: + workflow_call: + +jobs: + lint-python: + name: Lint Python Code + runs-on: ubuntu-24.04 + env: + CODE_DIRS: "adcm_aio_client tests" + steps: + - name: Install poetry + run: python -m pip install poetry + - uses: actions/checkout@v4 + - name: Install dependencies + run: poetry install --with dev --no-root + - name: Run ruff lint check + run: poetry run ruff check $CODE_DIRS + - name: Run ruff format check + run: poetry run ruff format --check $CODE_DIRS + - name: Run pyright check + run: poetry run pyright $CODE_DIRS diff --git a/.github/workflows/step_test_from_dir.yaml b/.github/workflows/step_test_from_dir.yaml new file mode 100644 index 00000000..d6c78571 --- /dev/null +++ b/.github/workflows/step_test_from_dir.yaml @@ -0,0 +1,30 @@ +name: Run Tests +run-name: "Run Tests: ${{ inputs.description }}" + +on: + workflow_call: + inputs: + target: + type: string + required: true + description: "Directory with tests to aim to" + description: + type: string + required: false + default: "unspecified" + description: "Name to use in `run-name` for tests to be more specific" + +jobs: + run-pytest-in-dir: + name: Run Tests + runs-on: ubuntu-24.04 + env: + CODE_DIRS: "adcm_aio_client tests" + steps: + - name: Install poetry + run: python -m pip install poetry + - uses: actions/checkout@v4 + - name: Install dependencies + run: poetry install --with test --no-root + - name: Run tests + run: poetry run pytest ${{ inputs.target }} diff --git a/.gitignore b/.gitignore index 82f92755..7b6caf34 100644 --- a/.gitignore +++ b/.gitignore @@ -159,4 +159,4 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +.idea/ diff --git a/poetry.lock b/poetry.lock index ed513295..e1db6f0a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "anyio" @@ -31,6 +31,17 @@ files = [ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + [[package]] name = "h11" version = "0.14.0" @@ -102,6 +113,17 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + [[package]] name = "nodeenv" version = "1.9.1" @@ -113,6 +135,32 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "pyright" version = "1.1.387" @@ -133,6 +181,26 @@ all = ["nodejs-wheel-binaries", "twine (>=3.4.1)"] dev = ["twine (>=3.4.1)"] nodejs = ["nodejs-wheel-binaries"] +[[package]] +name = "pytest" +version = "8.3.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "ruff" version = "0.7.2" @@ -185,4 +253,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "790831eb4e8498cbc0e1dc3af6a6ff8cc11752bbc1da56d7f255b47eafc24e5a" +content-hash = "aa120f1f5f547be5a2823893d875ee2bc2b71e1abe1e502946a9183f0be9bbe1" diff --git a/pyproject.toml b/pyproject.toml index a80ff8f1..c7d60364 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,11 +10,19 @@ readme = "README.md" python = "^3.12" httpx = "^0.27.2" +[tool.poetry.group.dev] +optional = true [tool.poetry.group.dev.dependencies] ruff = "^0.7.1" pyright = "^1.1.387" +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^8.3.3" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/test_dummy.py b/tests/integration/test_dummy.py new file mode 100644 index 00000000..95aab17f --- /dev/null +++ b/tests/integration/test_dummy.py @@ -0,0 +1,2 @@ +def test_dummy_unit() -> None: + assert 1 == 1 # noqa: S101 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/test_dummy.py b/tests/unit/test_dummy.py new file mode 100644 index 00000000..e71b4902 --- /dev/null +++ b/tests/unit/test_dummy.py @@ -0,0 +1,2 @@ +def test_dummy_integration() -> None: + assert 1 == 1 # noqa: S101 From 91e8fc111b6eacc3574c4cd36faa189f26e9b200 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Wed, 13 Nov 2024 13:36:44 +0300 Subject: [PATCH 03/46] ADCM-6067: Implement base Requester (#1) --- adcm_aio_client/client.py | 3 +- adcm_aio_client/core/accessors.py | 2 +- adcm_aio_client/core/errors.py | 63 +++++++++++ adcm_aio_client/core/requesters.py | 172 ++++++++++++++++++++++++++--- adcm_aio_client/core/types.py | 34 +++++- 5 files changed, 256 insertions(+), 18 deletions(-) create mode 100644 adcm_aio_client/core/errors.py diff --git a/adcm_aio_client/client.py b/adcm_aio_client/client.py index 3a79f5ff..cd2a9a79 100644 --- a/adcm_aio_client/client.py +++ b/adcm_aio_client/client.py @@ -12,8 +12,7 @@ from typing import Self -from adcm_aio_client.core.requesters import Requester -from adcm_aio_client.core.types import AuthCredentials, AuthToken, Cert, Verify +from adcm_aio_client.core.types import AuthCredentials, AuthToken, Cert, Requester, Verify class ADCMClient: diff --git a/adcm_aio_client/core/accessors.py b/adcm_aio_client/core/accessors.py index 6178c263..4da81db8 100644 --- a/adcm_aio_client/core/accessors.py +++ b/adcm_aio_client/core/accessors.py @@ -13,7 +13,7 @@ from abc import ABC, abstractmethod from typing import AsyncGenerator, List, Self -from adcm_aio_client.core.requesters import Requester +from adcm_aio_client.core.types import Requester class Accessor[T](ABC): diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py new file mode 100644 index 00000000..e9889230 --- /dev/null +++ b/adcm_aio_client/core/errors.py @@ -0,0 +1,63 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +class RequesterError(Exception): + pass + + +class NoCredentialsError(RequesterError): + pass + + +class WrongCredentialsError(RequesterError): + pass + + +class LoginError(RequesterError): + pass + + +class RetryRequestError(RequesterError): + pass + + +class ResponseDataConversionError(RequesterError): + pass + + +class ResponseError(RequesterError): + pass + + +class BadRequestError(ResponseError): + pass + + +class UnauthorizedError(ResponseError): + pass + + +class ForbiddenError(ResponseError): + pass + + +class NotFoundError(ResponseError): + pass + + +class ConflictError(ResponseError): + pass + + +class ServerError(ResponseError): + pass diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index da03ca3b..14c03fcd 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -10,32 +10,176 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import AsyncGenerator, Self +from asyncio import sleep +from contextlib import suppress +from dataclasses import dataclass +from functools import wraps +from json.decoder import JSONDecodeError +from typing import Any, Awaitable, Callable, Coroutine, ParamSpec, Self, TypeAlias +from urllib.parse import urljoin -from typing_extensions import Protocol +import httpx +from adcm_aio_client.core.errors import ( + BadRequestError, + ConflictError, + ForbiddenError, + LoginError, + NoCredentialsError, + NotFoundError, + ResponseDataConversionError, + ResponseError, + RetryRequestError, + ServerError, + UnauthorizedError, + WrongCredentialsError, +) +from adcm_aio_client.core.types import Credentials, Requester -class RequesterResponse(Protocol): - def as_list(self: Self) -> list: ... +Json: TypeAlias = Any +Params = ParamSpec("Params") +RequestFunc: TypeAlias = Callable[Params, Awaitable["HTTPXRequesterResponse"]] +DoRequestFunc: TypeAlias = Callable[Params, Awaitable[httpx.Response]] - def as_dict(self: Self) -> dict: ... +@dataclass(slots=True) +class HTTPXRequesterResponse: + response: httpx.Response + _json_data: Json | None = None -class Requester(Protocol): - async def get(self: Self, path: str, query_params: dict) -> AsyncGenerator[RequesterResponse]: ... + def as_list(self: Self) -> list: + if not isinstance(data := self._get_json_data(), list): + message = f"Expected a list, got {type(data)}" + raise ResponseDataConversionError(message) - async def post(self: Self, path: str, data: dict) -> AsyncGenerator[RequesterResponse]: ... + return data - async def patch(self: Self, path: str, data: dict) -> AsyncGenerator[RequesterResponse]: ... + def as_dict(self: Self) -> dict: + if not isinstance(data := self._get_json_data(), dict): + message = f"Expected a dict, got {type(data)}" + raise ResponseDataConversionError(message) - async def delete(self: Self, path: str) -> AsyncGenerator[RequesterResponse]: ... + return data + def _get_json_data(self: Self) -> Json: + if self._json_data is not None: + return self._json_data -class Session: ... + try: + data = self.response.json() + except JSONDecodeError as e: + message = "Response can't be parsed to json" + raise ResponseDataConversionError(message) from e + + self._json_data = data + + return self._json_data + + +STATUS_ERRORS_MAP = { + 400: BadRequestError, + 401: UnauthorizedError, + 403: ForbiddenError, + 404: NotFoundError, + 409: ConflictError, + 500: ServerError, +} + + +def convert_exceptions(func: DoRequestFunc) -> DoRequestFunc: + @wraps(func) + async def wrapper(*arg: Params.args, **kwargs: Params.kwargs) -> httpx.Response: + response = await func(*arg, **kwargs) + if response.status_code >= 300: + raise STATUS_ERRORS_MAP.get(response.status_code, ResponseError) + + return response + + return wrapper + + +def retry_request(request_func: RequestFunc) -> RequestFunc: + @wraps(request_func) + async def wrapper(self: "DefaultRequester", *args: Params.args, **kwargs: Params.kwargs) -> HTTPXRequesterResponse: + for attempt in range(self.retries): + try: + response = await request_func(self, *args, **kwargs) + except (UnauthorizedError, httpx.NetworkError, httpx.TransportError): + if attempt >= self.retries - 1: + continue + await sleep(self.retry_interval) + with suppress(httpx.NetworkError, httpx.TransportError): + await self.login(self._ensure_credentials()) + else: + break + else: + message = f"Request failed in {self.retries} attempts" + raise RetryRequestError(message) + return response + + return wrapper class DefaultRequester(Requester): - def __init__(self: Self) -> None: ... + __slots__ = ("_credentials", "api_root", "client", "retries", "retry_interval") + + def __init__( + self: Self, + base_url: str, + root_path: str = "/api/v2/", + timeout: float = 5.0, + retries: int = 5, + retry_interval: float = 5.0, + ) -> None: + self.retries = retries + self.retry_interval = retry_interval + self.api_root = self._make_url(root_path, base=base_url) + self.client = httpx.AsyncClient(timeout=timeout) + + async def login(self: Self, credentials: Credentials) -> Self: + login_url = self._make_url("login", base=self.api_root) + + try: + response = await self._do_request(self.client.post(url=login_url, data=credentials.dict())) + except UnauthorizedError as e: + raise WrongCredentialsError from e + + if response.status_code != 200: + message = f"Authentication error: {response.status_code} for url: {login_url}" + raise LoginError(message) + + self._credentials = credentials + return self + + async def get(self: Self, *path: str | int, query_params: dict | None = None) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.get, params=query_params or {}) + + async def post(self: Self, *path: str | int, data: dict) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.post, data=data) + + async def patch(self: Self, *path: str | int, data: dict) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.patch, data=data) + + async def delete(self: Self, *path: str | int) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.delete) + + @retry_request + async def request(self: Self, *path: str | int, method: Callable, **kwargs: dict) -> HTTPXRequesterResponse: + url = self._make_url(*path, base=self.api_root) + response = await self._do_request(method(url, **kwargs)) + + return HTTPXRequesterResponse(response=response) + + @staticmethod + def _make_url(*path: str | int, base: str) -> str: + return urljoin(base, "/".join(map(str, (*path, "")))) + + @convert_exceptions + async def _do_request(self: Self, request_coro: Coroutine[Any, Any, httpx.Response]) -> httpx.Response: + return await request_coro + + def _ensure_credentials(self: Self) -> Credentials: + if self._credentials is None: + raise NoCredentialsError - @property - async def session(self: Self) -> AsyncGenerator[Session]: ... + return self._credentials diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 5d9e3275..33f3af05 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -9,8 +9,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from dataclasses import asdict, dataclass +from typing import NamedTuple, Optional, Self, TypeAlias -from typing import NamedTuple, Optional, TypeAlias +from typing_extensions import Protocol class AuthCredentials(NamedTuple): @@ -21,3 +23,33 @@ class AuthCredentials(NamedTuple): AuthToken: TypeAlias = str Cert: TypeAlias = str | tuple[str, Optional[str], Optional[str]] | None Verify: TypeAlias = str | bool + + +@dataclass(slots=True, frozen=True) +class Credentials: + username: str + password: str + + def dict(self: Self) -> dict: + return asdict(self) + + def __repr__(self: Self) -> str: + return f"{self.username}'s credentials" + + +class RequesterResponse(Protocol): + def as_list(self: Self) -> list: ... + + def as_dict(self: Self) -> dict: ... + + +class Requester(Protocol): + async def login(self: Self, credentials: Credentials) -> Self: ... + + async def get(self: Self, *path: str | int, query_params: dict) -> RequesterResponse: ... + + async def post(self: Self, *path: str | int, data: dict) -> RequesterResponse: ... + + async def patch(self: Self, *path: str | int, data: dict) -> RequesterResponse: ... + + async def delete(self: Self, *path: str | int) -> RequesterResponse: ... From b7655299f6d4df18d05c07231bdd6c81503692b7 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Thu, 14 Nov 2024 08:11:36 +0300 Subject: [PATCH 04/46] ADCM-6068: Implement base Accessor (#2) Co-authored-by: astarovo --- adcm_aio_client/core/accessors.py | 103 ++++++++++++++++++++++--- adcm_aio_client/core/exceptions.py | 18 +++++ adcm_aio_client/core/objects.py | 45 ++++++++--- adcm_aio_client/tests/__init__.py | 0 adcm_aio_client/tests/mocks.py | 85 ++++++++++++++++++++ adcm_aio_client/tests/test_accessor.py | 48 ++++++++++++ poetry.lock | 64 +++++++++------ pyproject.toml | 6 ++ 8 files changed, 323 insertions(+), 46 deletions(-) create mode 100644 adcm_aio_client/core/exceptions.py create mode 100644 adcm_aio_client/tests/__init__.py create mode 100644 adcm_aio_client/tests/mocks.py create mode 100644 adcm_aio_client/tests/test_accessor.py diff --git a/adcm_aio_client/core/accessors.py b/adcm_aio_client/core/accessors.py index 4da81db8..149f4188 100644 --- a/adcm_aio_client/core/accessors.py +++ b/adcm_aio_client/core/accessors.py @@ -9,35 +9,116 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - from abc import ABC, abstractmethod -from typing import AsyncGenerator, List, Self +from contextlib import suppress +from typing import Any, AsyncGenerator, Dict, Generator, List, Optional, Self, Tuple, Type +from adcm_aio_client.core.exceptions import ( + MultipleObjectsReturnedError, + ObjectDoesNotExistError, +) from adcm_aio_client.core.types import Requester -class Accessor[T](ABC): +class Accessor[T, F](ABC): + class_type: Type[T] + + def __init__(self: Self, path: tuple[str | int, ...], requester: Requester) -> None: + self.path = path + self.requester = requester + @abstractmethod - async def list(self: Self) -> AsyncGenerator[List[T]]: ... + async def list(self: Self) -> List[T]: ... @abstractmethod - async def get(self: Self) -> AsyncGenerator[T]: ... + async def get(self: Self) -> T: ... @abstractmethod - async def get_or_none(self: Self) -> AsyncGenerator[T | None]: ... + async def get_or_none(self: Self) -> T | None: ... @abstractmethod - async def all(self: Self) -> AsyncGenerator[List[T]]: ... + async def all(self: Self) -> List[T]: ... @abstractmethod - async def iter(self: Self) -> AsyncGenerator[T]: ... + async def iter(self: Self) -> AsyncGenerator[T, None]: ... @abstractmethod - async def filter(self: Self, predicate: T) -> AsyncGenerator[List[T]]: ... + async def filter(self: Self) -> List[T]: ... + + def _create_object(self: Self, data: Dict[str, Any]) -> T: + return self.class_type(requester=self.requester, data=data) # type: ignore + + +class PaginatedAccessor[T](Accessor): + def _gen_page_indexes( + self: Self, page: Optional[int] = 1, items: Optional[int] = 10 + ) -> Generator[Tuple[int, int], None, None]: + """ + Generates indices for pagination slicing based on specified page number and offset. + + Args: + page (int, optional): The starting page number for pagination. Defaults to 1. + items (int, optional): The number of items per page. Defaults to 10. + + Yields: + Tuple[int, int]: A tuple representing the start and end indices for slicing. + """ + if page is None: + page = 1 + if items is None: + items = 10 + + current_page = page + + while True: + # Calculate the start and end indices for the current page + start_index = (current_page - 1) * items + end_index = current_page * items + yield (start_index, end_index) + current_page += 1 + + async def get(self: Self) -> T: + response = await self._list(query_params={"offset": 0, "limit": 2}) + objects = response["results"] + + if not objects: + raise ObjectDoesNotExistError("No objects found with the given filter.") + if len(objects) > 1: + raise MultipleObjectsReturnedError("More than one object found.") + return self._create_object(objects[0]) + + async def _list(self: Self, query_params: dict) -> dict: + response = await self.requester.get(*self.path, query_params=query_params) + return response.as_dict() + + async def list(self: Self) -> List[T]: + return [self._create_object(obj) for obj in await self._list(query_params={})] + + async def get_or_none(self: Self) -> T | None: + with suppress(ObjectDoesNotExistError): + obj = await self.get() + if obj: + return obj + return None + + async def all(self: Self) -> List[T]: + return await self.filter() + + async def iter(self: Self) -> AsyncGenerator[T, None]: + start, step = 0, 10 + while True: + response = await self._list(query_params={"offset": start, "limit": step}) + + if not response["results"]: + return + + for record in response["results"]: + yield self._create_object(record) + start += step -class PaginatedAccessor(Accessor): - def __init__(self: Self, path: str, requester: Requester) -> None: ... + async def filter(self: Self) -> List[T]: + return [i async for i in self.iter()] class NonPaginatedAccessor(Accessor): ... diff --git a/adcm_aio_client/core/exceptions.py b/adcm_aio_client/core/exceptions.py new file mode 100644 index 00000000..d57015f7 --- /dev/null +++ b/adcm_aio_client/core/exceptions.py @@ -0,0 +1,18 @@ +class AccessionError(Exception): + pass + + +class MissingParameterError(AccessionError): + pass + + +class MultipleObjectsReturnedError(AccessionError): + pass + + +class ObjectDoesNotExistError(AccessionError): + pass + + +class InvalidArgumentError(AccessionError): + pass diff --git a/adcm_aio_client/core/objects.py b/adcm_aio_client/core/objects.py index b963c677..2ecdd500 100644 --- a/adcm_aio_client/core/objects.py +++ b/adcm_aio_client/core/objects.py @@ -9,10 +9,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from typing import Optional, Self +import ast -from typing import Self - -from adcm_aio_client.core.accessors import Accessor +from adcm_aio_client.core.accessors import Accessor, PaginatedAccessor +from adcm_aio_client.core.requesters import Requester class BaseObject: @@ -20,20 +21,40 @@ class BaseObject: name: str -class Cluster(BaseObject): - description: str - services: "ServiceNode" +class BaseNode(Accessor[BaseObject, None]): + def __init__(self: Self, path: tuple[str | int, ...], requester: Requester) -> None: + super().__init__(path, requester) + self.class_type = ast.literal_eval(self.class_type.__name__) - def delete(self: Self) -> None: ... - def rename(self: Self, name: str) -> Self: ... +class Service(BaseObject): ... -class ClusterNode(Accessor[Cluster]): - def create(self: Self) -> Cluster: ... +class ServiceNode(BaseNode, PaginatedAccessor): + id: int + name: str + display_name: str -class Service(BaseObject): ... +class Cluster(BaseObject): + def __init__(self: Self, pk: int, name: str, description: str, services: Optional[ServiceNode] = None) -> None: + self.id = pk + self.name = name + self.description = description + self.services = services + + def delete(self: Self) -> None: + # Implement delete logic + pass + + def rename(self: Self, name: str) -> Self: + self.name = name + return self -class ServiceNode(Accessor[Service]): ... +class ClusterNode(BaseNode, PaginatedAccessor): + class_type = Cluster + id: int + name: str + description: str + services: Optional[ServiceNode] diff --git a/adcm_aio_client/tests/__init__.py b/adcm_aio_client/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/adcm_aio_client/tests/mocks.py b/adcm_aio_client/tests/mocks.py new file mode 100644 index 00000000..460ccfc0 --- /dev/null +++ b/adcm_aio_client/tests/mocks.py @@ -0,0 +1,85 @@ +# Define a mock response object to simulate API/database responses +from typing import Self +import json + +from httpx import Response + +from adcm_aio_client.core.requesters import DefaultRequester, HTTPXRequesterResponse + +page_content = [ + { + "id": 1, + "name": "cluster_1", + "description": "cluster_1", + "state": "created", + "multiState": [], + "status": "down", + "prototype": { + "id": 2, + "name": "cluster_one", + "displayName": "cluster_one", + "version": "1.0", + }, + "concerns": [], + "isUpgradable": False, + "mainInfo": None, + }, + { + "id": 2, + "name": "cluster_2", + "description": "cluster_2", + "state": "created", + "multiState": [], + "status": "down", + "prototype": { + "id": 2, + "name": "cluster_2", + "displayName": "cluster_2", + "version": "1.0", + }, + "concerns": [], + "isUpgradable": False, + "mainInfo": None, + }, + { + "id": 3, + "name": "cluster_3", + "description": "cluster_3", + "state": "created", + "multiState": [], + "status": "down", + "prototype": { + "id": 3, + "name": "cluster_3", + "displayName": "cluster_3", + "version": "1.0", + }, + "concerns": [], + "isUpgradable": False, + "mainInfo": None, + }, +] + + +class MockRequester(DefaultRequester): + response_retrieve = Response( + status_code=200, # Assuming a successful response + headers={"Content-Type": "application/json"}, + content=json.dumps([page_content[0]]), + ) + + response_list = Response( + status_code=200, # Assuming a successful response + headers={"Content-Type": "application/json"}, + content=json.dumps(page_content), + ) + + async def get(self: Self, *path: str | int, query_params: dict | None = None) -> HTTPXRequesterResponse: + # This function simulates retrieval of data + if not isinstance(path, int) and not query_params or "id" not in query_params: + if query_params and "limit" not in query_params and "offset" not in query_params: + return HTTPXRequesterResponse(response=MockRequester.response_list) + return HTTPXRequesterResponse(response=MockRequester.response_list) + if query_params and "id" in query_params and query_params["id"] > 3: + return HTTPXRequesterResponse(response=Response(status_code=404, content=json.dumps([]))) + return HTTPXRequesterResponse(response=MockRequester.response_retrieve) diff --git a/adcm_aio_client/tests/test_accessor.py b/adcm_aio_client/tests/test_accessor.py new file mode 100644 index 00000000..bca928d2 --- /dev/null +++ b/adcm_aio_client/tests/test_accessor.py @@ -0,0 +1,48 @@ +from contextlib import suppress +from typing import Self + +import pytest + +from adcm_aio_client.core.exceptions import ObjectDoesNotExistError +from adcm_aio_client.core.objects import ClusterNode +from adcm_aio_client.tests.mocks import MockRequester + + +@pytest.mark.skip(reason="This tests are temporarily disabled") +@pytest.mark.asyncio +class TestClusterNode: + async def test_get_single_object_success(self: Self) -> None: + accessor = ClusterNode("clusters", requester=MockRequester(base_url="http://127.0.0.1")) + result = await accessor.get(id=1) + assert result.__dict__ == {"id": 1, "name": "cluster_1", "description": "cluster_1"} + + with suppress(ObjectDoesNotExistError): + await accessor.get(id=4) + + async def test_get_or_none_single_object_success(self: Self) -> None: + accessor = ClusterNode("clusters", requester=MockRequester(base_url="http://127.0.0.1")) + result = await accessor.get_or_none(id=1) + assert result.__dict__ == {"id": 1, "name": "cluster_1", "description": "cluster_1"} + + result = await accessor.get_or_none(id=4) + assert result is None + + async def test_list_success(self: Self) -> None: + accessor = ClusterNode("clusters", requester=MockRequester(base_url="http://127.0.0.1")) + + result = await accessor.list() + assert len(result) == 3 + for i, _ in enumerate(result): + assert result[i].id == i + 1 + assert result[i].name == f"cluster_{i + 1}" + + async def test_all_success(self: Self) -> None: + accessor = ClusterNode( + "clusters", requester=MockRequester(base_url="http://127.0.0.1"), query_params={"offset": 0, "limit": 1} + ) + + result = await accessor.all() + assert len(result) == 3 + for i, _ in enumerate(result): + assert result[i].id == i + 1 + assert result[i].name == f"cluster_{i + 1}" diff --git a/poetry.lock b/poetry.lock index e1db6f0a..48ef0aa0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -163,13 +163,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pyright" -version = "1.1.387" +version = "1.1.388" description = "Command line wrapper for pyright" optional = false python-versions = ">=3.7" files = [ - {file = "pyright-1.1.387-py3-none-any.whl", hash = "sha256:6a1f495a261a72e12ad17e20d1ae3df4511223c773b19407cfa006229b1b08a5"}, - {file = "pyright-1.1.387.tar.gz", hash = "sha256:577de60224f7fe36505d5b181231e3a395d427b7873be0bbcaa962a29ea93a60"}, + {file = "pyright-1.1.388-py3-none-any.whl", hash = "sha256:c7068e9f2c23539c6ac35fc9efac6c6c1b9aa5a0ce97a9a8a6cf0090d7cbf84c"}, + {file = "pyright-1.1.388.tar.gz", hash = "sha256:0166d19b716b77fd2d9055de29f71d844874dbc6b9d3472ccd22df91db3dfa34"}, ] [package.dependencies] @@ -201,31 +201,49 @@ pluggy = ">=1.5,<2" [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "ruff" -version = "0.7.2" +version = "0.7.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"}, - {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"}, - {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"}, - {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"}, - {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"}, - {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"}, - {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"}, + {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, + {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, + {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"}, + {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"}, + {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"}, + {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"}, + {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"}, ] [[package]] @@ -253,4 +271,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "aa120f1f5f547be5a2823893d875ee2bc2b71e1abe1e502946a9183f0be9bbe1" +content-hash = "20136fad059dd6f087334eea57612f368edbe485fcd2874666338000b4859d1b" diff --git a/pyproject.toml b/pyproject.toml index c7d60364..2fb41bb5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^8.3.3" +pytest-asyncio = "^0.24.0" [build-system] requires = ["poetry-core"] @@ -53,12 +54,17 @@ select = [ ignore = [ "COM812", + "S101", + "TRY003", ] [tool.pyright] include = [ "adcm_aio_client", ] +exclude = [ + "adcm_aio_client/tests/**" +] typeCheckingMode = "standard" reportUnnecessaryTypeIgnoreComment = true From c413f18f3b24a77d27503fcf7d7fcd9ad871fd7e Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 15 Nov 2024 14:26:31 +0500 Subject: [PATCH 05/46] ADCM-6076 Introduce base objects and very simple Cluster implementation (#5) --- adcm_aio_client/core/accessors.py | 124 --------------------- adcm_aio_client/{ => core}/client.py | 13 ++- adcm_aio_client/core/errors.py | 24 +++- adcm_aio_client/core/exceptions.py | 18 --- adcm_aio_client/core/objects.py | 60 ---------- adcm_aio_client/core/objects/__init__.py | 3 + adcm_aio_client/core/objects/_accessors.py | 114 +++++++++++++++++++ adcm_aio_client/core/objects/_base.py | 24 ++++ adcm_aio_client/core/objects/_common.py | 8 ++ adcm_aio_client/core/objects/cm.py | 112 +++++++++++++++++++ adcm_aio_client/core/requesters.py | 16 +-- adcm_aio_client/core/types.py | 44 +++++--- adcm_aio_client/tests/__init__.py | 0 adcm_aio_client/tests/mocks.py | 85 -------------- adcm_aio_client/tests/test_accessor.py | 48 -------- 15 files changed, 331 insertions(+), 362 deletions(-) delete mode 100644 adcm_aio_client/core/accessors.py rename adcm_aio_client/{ => core}/client.py (66%) delete mode 100644 adcm_aio_client/core/exceptions.py delete mode 100644 adcm_aio_client/core/objects.py create mode 100644 adcm_aio_client/core/objects/__init__.py create mode 100644 adcm_aio_client/core/objects/_accessors.py create mode 100644 adcm_aio_client/core/objects/_base.py create mode 100644 adcm_aio_client/core/objects/_common.py create mode 100644 adcm_aio_client/core/objects/cm.py delete mode 100644 adcm_aio_client/tests/__init__.py delete mode 100644 adcm_aio_client/tests/mocks.py delete mode 100644 adcm_aio_client/tests/test_accessor.py diff --git a/adcm_aio_client/core/accessors.py b/adcm_aio_client/core/accessors.py deleted file mode 100644 index 149f4188..00000000 --- a/adcm_aio_client/core/accessors.py +++ /dev/null @@ -1,124 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from abc import ABC, abstractmethod -from contextlib import suppress -from typing import Any, AsyncGenerator, Dict, Generator, List, Optional, Self, Tuple, Type - -from adcm_aio_client.core.exceptions import ( - MultipleObjectsReturnedError, - ObjectDoesNotExistError, -) -from adcm_aio_client.core.types import Requester - - -class Accessor[T, F](ABC): - class_type: Type[T] - - def __init__(self: Self, path: tuple[str | int, ...], requester: Requester) -> None: - self.path = path - self.requester = requester - - @abstractmethod - async def list(self: Self) -> List[T]: ... - - @abstractmethod - async def get(self: Self) -> T: ... - - @abstractmethod - async def get_or_none(self: Self) -> T | None: ... - - @abstractmethod - async def all(self: Self) -> List[T]: ... - - @abstractmethod - async def iter(self: Self) -> AsyncGenerator[T, None]: ... - - @abstractmethod - async def filter(self: Self) -> List[T]: ... - - def _create_object(self: Self, data: Dict[str, Any]) -> T: - return self.class_type(requester=self.requester, data=data) # type: ignore - - -class PaginatedAccessor[T](Accessor): - def _gen_page_indexes( - self: Self, page: Optional[int] = 1, items: Optional[int] = 10 - ) -> Generator[Tuple[int, int], None, None]: - """ - Generates indices for pagination slicing based on specified page number and offset. - - Args: - page (int, optional): The starting page number for pagination. Defaults to 1. - items (int, optional): The number of items per page. Defaults to 10. - - Yields: - Tuple[int, int]: A tuple representing the start and end indices for slicing. - """ - if page is None: - page = 1 - if items is None: - items = 10 - - current_page = page - - while True: - # Calculate the start and end indices for the current page - start_index = (current_page - 1) * items - end_index = current_page * items - yield (start_index, end_index) - current_page += 1 - - async def get(self: Self) -> T: - response = await self._list(query_params={"offset": 0, "limit": 2}) - objects = response["results"] - - if not objects: - raise ObjectDoesNotExistError("No objects found with the given filter.") - if len(objects) > 1: - raise MultipleObjectsReturnedError("More than one object found.") - return self._create_object(objects[0]) - - async def _list(self: Self, query_params: dict) -> dict: - response = await self.requester.get(*self.path, query_params=query_params) - return response.as_dict() - - async def list(self: Self) -> List[T]: - return [self._create_object(obj) for obj in await self._list(query_params={})] - - async def get_or_none(self: Self) -> T | None: - with suppress(ObjectDoesNotExistError): - obj = await self.get() - if obj: - return obj - return None - - async def all(self: Self) -> List[T]: - return await self.filter() - - async def iter(self: Self) -> AsyncGenerator[T, None]: - start, step = 0, 10 - while True: - response = await self._list(query_params={"offset": start, "limit": step}) - - if not response["results"]: - return - - for record in response["results"]: - yield self._create_object(record) - - start += step - - async def filter(self: Self) -> List[T]: - return [i async for i in self.iter()] - - -class NonPaginatedAccessor(Accessor): ... diff --git a/adcm_aio_client/client.py b/adcm_aio_client/core/client.py similarity index 66% rename from adcm_aio_client/client.py rename to adcm_aio_client/core/client.py index cd2a9a79..1bc64065 100644 --- a/adcm_aio_client/client.py +++ b/adcm_aio_client/core/client.py @@ -10,19 +10,26 @@ # See the License for the specific language governing permissions and # limitations under the License. +from functools import cached_property from typing import Self -from adcm_aio_client.core.types import AuthCredentials, AuthToken, Cert, Requester, Verify +from adcm_aio_client.core.objects.cm import ClustersNode +from adcm_aio_client.core.requesters import Requester +from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify class ADCMClient: def __init__(self: Self, requester: Requester) -> None: - pass + self._requester = requester + + @cached_property + def clusters(self: Self) -> ClustersNode: + return ClustersNode(path=(), requester=self._requester) async def build_client( url: str | list[str], - credentials: AuthCredentials | AuthToken, + credentials: Credentials | AuthToken, *, verify: Verify | None = None, cert: Cert | None = None, diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index e9889230..29c36918 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -11,7 +11,14 @@ # limitations under the License. -class RequesterError(Exception): +class ADCMClientError(Exception): + pass + + +# Requester + + +class RequesterError(ADCMClientError): pass @@ -61,3 +68,18 @@ class ConflictError(ResponseError): class ServerError(ResponseError): pass + + +# Objects + + +class AccessorError(ADCMClientError): + pass + + +class MultipleObjectsReturnedError(AccessorError): + pass + + +class ObjectDoesNotExistError(AccessorError): + pass diff --git a/adcm_aio_client/core/exceptions.py b/adcm_aio_client/core/exceptions.py deleted file mode 100644 index d57015f7..00000000 --- a/adcm_aio_client/core/exceptions.py +++ /dev/null @@ -1,18 +0,0 @@ -class AccessionError(Exception): - pass - - -class MissingParameterError(AccessionError): - pass - - -class MultipleObjectsReturnedError(AccessionError): - pass - - -class ObjectDoesNotExistError(AccessionError): - pass - - -class InvalidArgumentError(AccessionError): - pass diff --git a/adcm_aio_client/core/objects.py b/adcm_aio_client/core/objects.py deleted file mode 100644 index 2ecdd500..00000000 --- a/adcm_aio_client/core/objects.py +++ /dev/null @@ -1,60 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from typing import Optional, Self -import ast - -from adcm_aio_client.core.accessors import Accessor, PaginatedAccessor -from adcm_aio_client.core.requesters import Requester - - -class BaseObject: - id: int - name: str - - -class BaseNode(Accessor[BaseObject, None]): - def __init__(self: Self, path: tuple[str | int, ...], requester: Requester) -> None: - super().__init__(path, requester) - self.class_type = ast.literal_eval(self.class_type.__name__) - - -class Service(BaseObject): ... - - -class ServiceNode(BaseNode, PaginatedAccessor): - id: int - name: str - display_name: str - - -class Cluster(BaseObject): - def __init__(self: Self, pk: int, name: str, description: str, services: Optional[ServiceNode] = None) -> None: - self.id = pk - self.name = name - self.description = description - self.services = services - - def delete(self: Self) -> None: - # Implement delete logic - pass - - def rename(self: Self, name: str) -> Self: - self.name = name - return self - - -class ClusterNode(BaseNode, PaginatedAccessor): - class_type = Cluster - id: int - name: str - description: str - services: Optional[ServiceNode] diff --git a/adcm_aio_client/core/objects/__init__.py b/adcm_aio_client/core/objects/__init__.py new file mode 100644 index 00000000..76d14def --- /dev/null +++ b/adcm_aio_client/core/objects/__init__.py @@ -0,0 +1,3 @@ +from adcm_aio_client.core.objects.cm import Bundle, Cluster, Service + +__all__ = ["Bundle", "Cluster", "Service"] diff --git a/adcm_aio_client/core/objects/_accessors.py b/adcm_aio_client/core/objects/_accessors.py new file mode 100644 index 00000000..f9a02646 --- /dev/null +++ b/adcm_aio_client/core/objects/_accessors.py @@ -0,0 +1,114 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from contextlib import suppress +from typing import Any, AsyncGenerator, Self + +from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject +from adcm_aio_client.core.types import Endpoint, QueryParameters, Requester, RequesterResponse + + +class Accessor[ReturnObject: InteractiveObject, Filter](ABC): + class_type: type[ReturnObject] + + def __init__(self: Self, path: Endpoint, requester: Requester) -> None: + self._path = path + self._requester = requester + + @abstractmethod + async def iter(self: Self) -> AsyncGenerator[ReturnObject, None]: ... + + @abstractmethod + def _extract_results_from_response(self: Self, response: RequesterResponse) -> list[dict]: ... + + async def get(self: Self) -> ReturnObject: + response = await self._request_endpoint(query={"offset": 0, "limit": 2}) + results = self._extract_results_from_response(response=response) + + if not results: + raise ObjectDoesNotExistError("No objects found with the given filter.") + + if len(results) > 1: + raise MultipleObjectsReturnedError("More than one object found.") + + return self._create_object(results[0]) + + async def get_or_none(self: Self) -> ReturnObject | None: + with suppress(ObjectDoesNotExistError): + return await self.get() + + return None + + async def all(self: Self) -> list[ReturnObject]: + return await self.filter() + + async def filter(self: Self) -> list[ReturnObject]: + return [i async for i in self.iter()] + + async def list(self: Self) -> list[ReturnObject]: + response = await self._request_endpoint(query={}) + results = self._extract_results_from_response(response) + return [self._create_object(obj) for obj in results] + + async def _request_endpoint(self: Self, query: QueryParameters) -> RequesterResponse: + return await self._requester.get(*self._path, query=query) + + def _create_object(self: Self, data: dict[str, Any]) -> ReturnObject: + return self.class_type(requester=self._requester, data=data) + + +class PaginatedAccessor[ReturnObject: InteractiveObject, Filter](Accessor[ReturnObject, Filter]): + async def iter(self: Self) -> AsyncGenerator[ReturnObject, None]: + start, step = 0, 10 + while True: + response = await self._request_endpoint(query={"offset": start, "limit": step}) + results = self._extract_results_from_response(response=response) + + if not results: + return + + for record in results: + yield self._create_object(record) + + start += step + + def _extract_results_from_response(self: Self, response: RequesterResponse) -> list[dict]: + return response.as_dict()["results"] + + +class PaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](PaginatedAccessor[Child, Filter]): + def __init__(self: Self, parent: Parent, path: Endpoint, requester: Requester) -> None: + super().__init__(path, requester) + self._parent = parent + + def _create_object(self: Self, data: dict[str, Any]) -> Child: + return self.class_type(parent=self._parent, requester=self._requester, data=data) + + +class NonPaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](Accessor[Child, Filter]): + def __init__(self: Self, parent: Parent, path: Endpoint, requester: Requester) -> None: + super().__init__(path, requester) + self._parent = parent + + async def iter(self: Self) -> AsyncGenerator[Child, None]: + response = await self._request_endpoint(query={}) + results = self._extract_results_from_response(response=response) + for record in results: + yield self._create_object(record) + + def _extract_results_from_response(self: Self, response: RequesterResponse) -> list[dict]: + return response.as_list() + + def _create_object(self: Self, data: dict[str, Any]) -> Child: + return self.class_type(parent=self._parent, requester=self._requester, data=data) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py new file mode 100644 index 00000000..a2a9c34d --- /dev/null +++ b/adcm_aio_client/core/objects/_base.py @@ -0,0 +1,24 @@ +from typing import Any, Self + +from adcm_aio_client.core.requesters import Requester +from adcm_aio_client.core.types import AwareOfOwnPath, WithRequester + + +class InteractiveObject(WithRequester, AwareOfOwnPath): + def __init__(self: Self, requester: Requester, data: dict[str, Any]) -> None: + self._requester = requester + self._data = data + + def _construct[Object: "InteractiveObject"](self: Self, what: type[Object], from_data: dict[str, Any]) -> Object: + return what(requester=self._requester, data=from_data) + + def _construct_child[Child: "InteractiveChildObject"]( + self: Self, what: type[Child], from_data: dict[str, Any] + ) -> Child: + return what(requester=self._requester, data=from_data, parent=self) + + +class InteractiveChildObject[Parent](InteractiveObject): + def __init__(self: Self, parent: Parent, requester: Requester, data: dict[str, Any]) -> None: + super().__init__(requester=requester, data=data) + self._parent = parent diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py new file mode 100644 index 00000000..9a3bc5b8 --- /dev/null +++ b/adcm_aio_client/core/objects/_common.py @@ -0,0 +1,8 @@ +from typing import Self + +from adcm_aio_client.core.objects._base import AwareOfOwnPath, WithRequester + + +class Deletable(WithRequester, AwareOfOwnPath): + async def delete(self: Self) -> None: + await self._requester.delete(*self.get_own_path()) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py new file mode 100644 index 00000000..1e40b75b --- /dev/null +++ b/adcm_aio_client/core/objects/cm.py @@ -0,0 +1,112 @@ +from functools import cached_property +from typing import Literal, Self + +from adcm_aio_client.core.objects._accessors import ( + NonPaginatedChildAccessor, + PaginatedAccessor, + PaginatedChildAccessor, +) +from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject +from adcm_aio_client.core.objects._common import Deletable +from adcm_aio_client.core.types import Endpoint + + +class Bundle(Deletable, InteractiveObject): ... + + +class Cluster(Deletable, InteractiveObject): + # data-based properties + + @property + def id(self: Self) -> int: + return int(self._data["id"]) + + @property + def name(self: Self) -> str: + return str(self._data["name"]) + + @property + def description(self: Self) -> str: + return str(self._data["description"]) + + # related/dynamic data access + + # todo think how such properties will be invalidated when data is updated + # during `refresh()` / `reread()` calls. + # See cache invalidation or alternatives in documentation for `cached_property` + @cached_property + async def bundle(self: Self) -> Bundle: + prototype_id = self._data["prototype"]["id"] + response = await self._requester.get("prototypes", prototype_id) + + bundle_id = response.as_dict()["bundle"]["id"] + response = await self._requester.get("bundles", bundle_id) + + return self._construct(what=Bundle, from_data=response.as_dict()) + + # object-specific methods + + async def get_status(self: Self) -> Literal["up", "down"]: + response = await self._requester.get(*self.get_own_path()) + return response.as_dict()["status"] + + async def set_ansible_forks(self: Self, value: int) -> Self: + # todo + ... + + # nodes and managers to access + + @cached_property + def services(self: Self) -> "ServicesNode": + return ServicesNode(parent=self, path=(*self.get_own_path(), "services"), requester=self._requester) + + # todo IMPLEMENT: + # Nodes: + # - hosts: "ClusterHostsNode" + # - imports (probably not an accessor node, but some cool class) + # - actions + # - upgrades + # - config-groups + # Managers: + # - config + # - mapping + + def get_own_path(self: Self) -> Endpoint: + return "clusters", self.id + + +class ClustersNode(PaginatedAccessor[Cluster, None]): + class_type = Cluster + + def get_own_path(self: Self) -> Endpoint: + return ("clusters",) + + +class Service(InteractiveChildObject[Cluster]): + @property + def id(self: Self) -> int: + return int(self._data["id"]) + + def get_own_path(self: Self) -> Endpoint: + return (*self._parent.get_own_path(), "services", self.id) + + @cached_property + def components(self: Self) -> "ComponentsNode": + return ComponentsNode(parent=self, path=(*self.get_own_path(), "components"), requester=self._requester) + + +class ServicesNode(PaginatedChildAccessor[Cluster, Service, None]): + class_type = Service + + +class Component(InteractiveChildObject[Service]): + @property + def id(self: Self) -> int: + return int(self._data["id"]) + + def get_own_path(self: Self) -> Endpoint: + return (*self._parent.get_own_path(), "components", self.id) + + +class ComponentsNode(NonPaginatedChildAccessor[Service, Component, None]): + class_type = Component diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 14c03fcd..98a68c96 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -34,7 +34,7 @@ UnauthorizedError, WrongCredentialsError, ) -from adcm_aio_client.core.types import Credentials, Requester +from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester Json: TypeAlias = Any Params = ParamSpec("Params") @@ -151,27 +151,27 @@ async def login(self: Self, credentials: Credentials) -> Self: self._credentials = credentials return self - async def get(self: Self, *path: str | int, query_params: dict | None = None) -> HTTPXRequesterResponse: - return await self.request(*path, method=self.client.get, params=query_params or {}) + async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.get, params=query or {}) - async def post(self: Self, *path: str | int, data: dict) -> HTTPXRequesterResponse: + async def post(self: Self, *path: PathPart, data: dict) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.post, data=data) - async def patch(self: Self, *path: str | int, data: dict) -> HTTPXRequesterResponse: + async def patch(self: Self, *path: PathPart, data: dict) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.patch, data=data) - async def delete(self: Self, *path: str | int) -> HTTPXRequesterResponse: + async def delete(self: Self, *path: PathPart) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.delete) @retry_request - async def request(self: Self, *path: str | int, method: Callable, **kwargs: dict) -> HTTPXRequesterResponse: + async def request(self: Self, *path: PathPart, method: Callable, **kwargs: dict) -> HTTPXRequesterResponse: url = self._make_url(*path, base=self.api_root) response = await self._do_request(method(url, **kwargs)) return HTTPXRequesterResponse(response=response) @staticmethod - def _make_url(*path: str | int, base: str) -> str: + def _make_url(*path: PathPart, base: str) -> str: return urljoin(base, "/".join(map(str, (*path, "")))) @convert_exceptions diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 33f3af05..ee14b747 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -9,20 +9,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from dataclasses import asdict, dataclass -from typing import NamedTuple, Optional, Self, TypeAlias - -from typing_extensions import Protocol +from dataclasses import asdict, dataclass +from typing import Optional, Protocol, Self -class AuthCredentials(NamedTuple): - username: str - password: str - +# Init / Authorization -AuthToken: TypeAlias = str -Cert: TypeAlias = str | tuple[str, Optional[str], Optional[str]] | None -Verify: TypeAlias = str | bool +type AuthToken = str +type Cert = str | tuple[str, Optional[str], Optional[str]] | None +type Verify = str | bool @dataclass(slots=True, frozen=True) @@ -37,6 +32,14 @@ def __repr__(self: Self) -> str: return f"{self.username}'s credentials" +# Requests + +type PathPart = str | int +type Endpoint = tuple[PathPart, ...] + +type QueryParameters = dict + + class RequesterResponse(Protocol): def as_list(self: Self) -> list: ... @@ -46,10 +49,21 @@ def as_dict(self: Self) -> dict: ... class Requester(Protocol): async def login(self: Self, credentials: Credentials) -> Self: ... - async def get(self: Self, *path: str | int, query_params: dict) -> RequesterResponse: ... + async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> RequesterResponse: ... + + async def post(self: Self, *path: PathPart, data: dict) -> RequesterResponse: ... + + async def patch(self: Self, *path: PathPart, data: dict) -> RequesterResponse: ... + + async def delete(self: Self, *path: PathPart) -> RequesterResponse: ... + + +# Objects + - async def post(self: Self, *path: str | int, data: dict) -> RequesterResponse: ... +class WithRequester(Protocol): + _requester: Requester - async def patch(self: Self, *path: str | int, data: dict) -> RequesterResponse: ... - async def delete(self: Self, *path: str | int) -> RequesterResponse: ... +class AwareOfOwnPath(Protocol): + def get_own_path(self: Self) -> Endpoint: ... diff --git a/adcm_aio_client/tests/__init__.py b/adcm_aio_client/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/adcm_aio_client/tests/mocks.py b/adcm_aio_client/tests/mocks.py deleted file mode 100644 index 460ccfc0..00000000 --- a/adcm_aio_client/tests/mocks.py +++ /dev/null @@ -1,85 +0,0 @@ -# Define a mock response object to simulate API/database responses -from typing import Self -import json - -from httpx import Response - -from adcm_aio_client.core.requesters import DefaultRequester, HTTPXRequesterResponse - -page_content = [ - { - "id": 1, - "name": "cluster_1", - "description": "cluster_1", - "state": "created", - "multiState": [], - "status": "down", - "prototype": { - "id": 2, - "name": "cluster_one", - "displayName": "cluster_one", - "version": "1.0", - }, - "concerns": [], - "isUpgradable": False, - "mainInfo": None, - }, - { - "id": 2, - "name": "cluster_2", - "description": "cluster_2", - "state": "created", - "multiState": [], - "status": "down", - "prototype": { - "id": 2, - "name": "cluster_2", - "displayName": "cluster_2", - "version": "1.0", - }, - "concerns": [], - "isUpgradable": False, - "mainInfo": None, - }, - { - "id": 3, - "name": "cluster_3", - "description": "cluster_3", - "state": "created", - "multiState": [], - "status": "down", - "prototype": { - "id": 3, - "name": "cluster_3", - "displayName": "cluster_3", - "version": "1.0", - }, - "concerns": [], - "isUpgradable": False, - "mainInfo": None, - }, -] - - -class MockRequester(DefaultRequester): - response_retrieve = Response( - status_code=200, # Assuming a successful response - headers={"Content-Type": "application/json"}, - content=json.dumps([page_content[0]]), - ) - - response_list = Response( - status_code=200, # Assuming a successful response - headers={"Content-Type": "application/json"}, - content=json.dumps(page_content), - ) - - async def get(self: Self, *path: str | int, query_params: dict | None = None) -> HTTPXRequesterResponse: - # This function simulates retrieval of data - if not isinstance(path, int) and not query_params or "id" not in query_params: - if query_params and "limit" not in query_params and "offset" not in query_params: - return HTTPXRequesterResponse(response=MockRequester.response_list) - return HTTPXRequesterResponse(response=MockRequester.response_list) - if query_params and "id" in query_params and query_params["id"] > 3: - return HTTPXRequesterResponse(response=Response(status_code=404, content=json.dumps([]))) - return HTTPXRequesterResponse(response=MockRequester.response_retrieve) diff --git a/adcm_aio_client/tests/test_accessor.py b/adcm_aio_client/tests/test_accessor.py deleted file mode 100644 index bca928d2..00000000 --- a/adcm_aio_client/tests/test_accessor.py +++ /dev/null @@ -1,48 +0,0 @@ -from contextlib import suppress -from typing import Self - -import pytest - -from adcm_aio_client.core.exceptions import ObjectDoesNotExistError -from adcm_aio_client.core.objects import ClusterNode -from adcm_aio_client.tests.mocks import MockRequester - - -@pytest.mark.skip(reason="This tests are temporarily disabled") -@pytest.mark.asyncio -class TestClusterNode: - async def test_get_single_object_success(self: Self) -> None: - accessor = ClusterNode("clusters", requester=MockRequester(base_url="http://127.0.0.1")) - result = await accessor.get(id=1) - assert result.__dict__ == {"id": 1, "name": "cluster_1", "description": "cluster_1"} - - with suppress(ObjectDoesNotExistError): - await accessor.get(id=4) - - async def test_get_or_none_single_object_success(self: Self) -> None: - accessor = ClusterNode("clusters", requester=MockRequester(base_url="http://127.0.0.1")) - result = await accessor.get_or_none(id=1) - assert result.__dict__ == {"id": 1, "name": "cluster_1", "description": "cluster_1"} - - result = await accessor.get_or_none(id=4) - assert result is None - - async def test_list_success(self: Self) -> None: - accessor = ClusterNode("clusters", requester=MockRequester(base_url="http://127.0.0.1")) - - result = await accessor.list() - assert len(result) == 3 - for i, _ in enumerate(result): - assert result[i].id == i + 1 - assert result[i].name == f"cluster_{i + 1}" - - async def test_all_success(self: Self) -> None: - accessor = ClusterNode( - "clusters", requester=MockRequester(base_url="http://127.0.0.1"), query_params={"offset": 0, "limit": 1} - ) - - result = await accessor.all() - assert len(result) == 3 - for i, _ in enumerate(result): - assert result[i].id == i + 1 - assert result[i].name == f"cluster_{i + 1}" From 54cc39a411d6df8612732afa7a3b37b02924ab00 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 18 Nov 2024 14:06:33 +0500 Subject: [PATCH 06/46] ADCM-6076 Add unit tests (#7) --- .github/workflows/step_lint.yaml | 2 +- .github/workflows/step_test_from_dir.yaml | 6 +- pyproject.toml | 8 +- tests/__init__.py | 0 tests/unit/conftest.py | 8 + tests/unit/mocks/__init__.py | 0 tests/unit/mocks/requesters.py | 66 ++++++ tests/unit/test_accessors.py | 269 ++++++++++++++++++++++ tests/unit/test_dummy.py | 2 - tests/unit/test_requesters.py | 121 ++++++++++ tests/unit/utils.py | 14 ++ 11 files changed, 488 insertions(+), 8 deletions(-) create mode 100644 tests/__init__.py create mode 100644 tests/unit/conftest.py create mode 100644 tests/unit/mocks/__init__.py create mode 100644 tests/unit/mocks/requesters.py create mode 100644 tests/unit/test_accessors.py delete mode 100644 tests/unit/test_dummy.py create mode 100644 tests/unit/test_requesters.py create mode 100644 tests/unit/utils.py diff --git a/.github/workflows/step_lint.yaml b/.github/workflows/step_lint.yaml index 358f0285..44eb7d29 100644 --- a/.github/workflows/step_lint.yaml +++ b/.github/workflows/step_lint.yaml @@ -14,7 +14,7 @@ jobs: run: python -m pip install poetry - uses: actions/checkout@v4 - name: Install dependencies - run: poetry install --with dev --no-root + run: poetry install --with dev --with test --no-root - name: Run ruff lint check run: poetry run ruff check $CODE_DIRS - name: Run ruff format check diff --git a/.github/workflows/step_test_from_dir.yaml b/.github/workflows/step_test_from_dir.yaml index d6c78571..07f607d5 100644 --- a/.github/workflows/step_test_from_dir.yaml +++ b/.github/workflows/step_test_from_dir.yaml @@ -25,6 +25,8 @@ jobs: run: python -m pip install poetry - uses: actions/checkout@v4 - name: Install dependencies - run: poetry install --with test --no-root + # install "with root" so adcm_aio_client + # will be accessible without PYTHONPATH manipulations + run: poetry install --with test - name: Run tests - run: poetry run pytest ${{ inputs.target }} + run: poetry run pytest ${{ inputs.target }} -v diff --git a/pyproject.toml b/pyproject.toml index 2fb41bb5..ce8b2c44 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -60,10 +60,12 @@ ignore = [ [tool.pyright] include = [ - "adcm_aio_client", + "adcm_aio_client", "tests" ] -exclude = [ - "adcm_aio_client/tests/**" + +executionEnvironments = [ + { root = "." }, + { root = "tests", extraPaths = [ "." ] }, ] typeCheckingMode = "standard" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 00000000..6de824ce --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from tests.unit.mocks.requesters import QueueRequester + + +@pytest.fixture() +def queue_requester() -> QueueRequester: + return QueueRequester() diff --git a/tests/unit/mocks/__init__.py b/tests/unit/mocks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/mocks/requesters.py b/tests/unit/mocks/requesters.py new file mode 100644 index 00000000..b00895dd --- /dev/null +++ b/tests/unit/mocks/requesters.py @@ -0,0 +1,66 @@ +from collections import deque +from dataclasses import dataclass, field +from typing import Self + +from adcm_aio_client.core.errors import ResponseDataConversionError +from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, RequesterResponse + +type FakeResponseData = dict | list + + +@dataclass(slots=True) +class QueueResponse(RequesterResponse): + data: FakeResponseData + + def as_list(self: Self) -> list: + if not isinstance(data := self.data, list): + message = f"Expected a list, got {type(data)}" + raise ResponseDataConversionError(message) + + return data + + def as_dict(self: Self) -> dict: + if not isinstance(data := self.data, dict): + message = f"Expected a dict, got {type(data)}" + raise ResponseDataConversionError(message) + + return data + + +@dataclass() +class QueueRequester(Requester): + queue: deque[FakeResponseData] = field(default_factory=deque) + + async def login(self: Self, credentials: Credentials) -> Self: + _ = credentials + return self + + async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> RequesterResponse: + _ = path, query + return self._return_next_response() + + async def post(self: Self, *path: PathPart, data: dict) -> RequesterResponse: + _ = path, data + return self._return_next_response() + + async def patch(self: Self, *path: PathPart, data: dict) -> RequesterResponse: + _ = path, data + return self._return_next_response() + + async def delete(self: Self, *path: PathPart) -> RequesterResponse: + _ = path + return self._return_next_response() + + # specifics + + def queue_responses(self: Self, *responses: FakeResponseData) -> Self: + self.queue.extend(responses) + return self + + def flush(self: Self) -> Self: + self.queue.clear() + return self + + def _return_next_response(self: Self) -> RequesterResponse: + next_response = self.queue.popleft() + return QueueResponse(data=next_response) diff --git a/tests/unit/test_accessors.py b/tests/unit/test_accessors.py new file mode 100644 index 00000000..c1faa8b0 --- /dev/null +++ b/tests/unit/test_accessors.py @@ -0,0 +1,269 @@ +from typing import Any, AsyncGenerator, Callable, Self + +import pytest + +from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.objects._accessors import ( + Accessor, + NonPaginatedChildAccessor, + PaginatedAccessor, + PaginatedChildAccessor, +) +from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject +from adcm_aio_client.core.types import Endpoint +from tests.unit.mocks.requesters import QueueRequester +from tests.unit.utils import n_entries_as_list + +pytestmark = [pytest.mark.asyncio] + + +class _OwnPath: + def get_own_path(self: Self) -> Endpoint: + return () + + +class Dummy(_OwnPath, InteractiveObject): ... + + +class DummyChild(_OwnPath, InteractiveChildObject): ... + + +class DummyPaginatedAccessor(PaginatedAccessor[Dummy, None]): + class_type = Dummy + + +class DummyChildPaginatedAccessor(PaginatedChildAccessor[Dummy, DummyChild, None]): + class_type = DummyChild + + +class DummyChildNonPaginatedAccessor(NonPaginatedChildAccessor[Dummy, DummyChild, None]): + class_type = DummyChild + + +def create_paginated_response(amount: int) -> dict: + return {"results": [{} for _ in range(amount)]} + + +def extract_paginated_response_entries(data: dict) -> list: + return data["results"] + + +def create_non_paginated_response(amount: int) -> list: + return [{} for _ in range(amount)] + + +async def test_paginated(queue_requester: QueueRequester) -> None: + requester = queue_requester + accessor = DummyPaginatedAccessor(requester=requester, path=()) + + await _test_paginated_accessor_common_methods( + accessor=accessor, + requester=requester, + create_response=create_paginated_response, + extract_entries=extract_paginated_response_entries, + check_entry=lambda entry: isinstance(entry, Dummy), + ) + + +async def test_paginated_child(queue_requester: QueueRequester) -> None: + requester = queue_requester + parent = Dummy(requester=requester, data={}) + accessor = DummyChildPaginatedAccessor(requester=requester, path=(), parent=parent) + + await _test_paginated_accessor_common_methods( + accessor=accessor, + requester=requester, + create_response=create_paginated_response, + extract_entries=extract_paginated_response_entries, + check_entry=lambda entry: isinstance(entry, DummyChild) and entry._parent is parent, + ) + + +async def test_non_paginated_child(queue_requester: QueueRequester) -> None: + requester = queue_requester + parent = Dummy(requester=requester, data={}) + accessor = DummyChildNonPaginatedAccessor(requester=requester, path=(), parent=parent) + create_response = create_non_paginated_response + check_entry = lambda entry: isinstance(entry, DummyChild) and entry._parent is parent # noqa: E731 + + response_sequence = (create_response(10), create_response(4), create_response(0)) + amount_of_entries = len(response_sequence[0]) + + # get + + requester.flush().queue_responses(create_response(1)) + result = await accessor.get() + + assert check_entry(result) + + requester.flush().queue_responses(create_response(0)) + + with pytest.raises(ObjectDoesNotExistError): + await accessor.get() + + requester.flush().queue_responses(create_response(2)) + + with pytest.raises(MultipleObjectsReturnedError): + await accessor.get() + + # get or none + + requester.flush().queue_responses(create_response(1)) + result = await accessor.get_or_none() + + assert check_entry(result) + + requester.flush().queue_responses(create_response(0)) + result = await accessor.get_or_none() + + assert result is None + + requester.flush().queue_responses(create_response(2)) + + with pytest.raises(MultipleObjectsReturnedError): + await accessor.get_or_none() + + # list + + requester.flush().queue_responses(*response_sequence) + result = await accessor.list() + + assert isinstance(result, list) + assert len(result) == 10 + assert all(map(check_entry, result)) + + assert len(requester.queue) == len(response_sequence) - 1 + + # all + + requester.flush().queue_responses(*response_sequence) + result = await accessor.all() + + assert isinstance(result, list) + assert len(result) == amount_of_entries + assert all(map(check_entry, result)) + + assert len(requester.queue) == len(response_sequence) - 1 + + # filter (with no args is the same as all) + + requester.flush().queue_responses(*response_sequence) + result = await accessor.filter() + + assert isinstance(result, list) + assert len(result) == amount_of_entries + assert all(map(check_entry, result)) + + assert len(requester.queue) == len(response_sequence) - 1 + + # iter + + requester.flush().queue_responses(*response_sequence) + result = accessor.iter() + + # see no requests made at first + assert len(requester.queue) == len(response_sequence) + assert isinstance(result, AsyncGenerator) + + all_entries = [entry async for entry in result] + assert len(all_entries) == amount_of_entries + assert all(map(check_entry, all_entries)) + + # see 1 "pages" read, because it's not paginated + assert len(requester.queue) == len(response_sequence) - 1 + + +async def _test_paginated_accessor_common_methods[T: dict | list]( + accessor: Accessor, + requester: QueueRequester, + create_response: Callable[[int], T], + extract_entries: Callable[[T], list], + check_entry: Callable[[Any], bool], +) -> None: + response_sequence = (create_response(10), create_response(10), create_response(4), create_response(0)) + amount_of_all_entries = sum(map(len, map(extract_entries, response_sequence))) + + # get + + requester.flush().queue_responses(create_response(1)) + result = await accessor.get() + + assert check_entry(result) + + requester.flush().queue_responses(create_response(0)) + + with pytest.raises(ObjectDoesNotExistError): + await accessor.get() + + requester.flush().queue_responses(create_response(2)) + + with pytest.raises(MultipleObjectsReturnedError): + await accessor.get() + + # get or none + + requester.flush().queue_responses(create_response(1)) + result = await accessor.get_or_none() + + assert check_entry(result) + + requester.flush().queue_responses(create_response(0)) + result = await accessor.get_or_none() + + assert result is None + + requester.flush().queue_responses(create_response(2)) + + with pytest.raises(MultipleObjectsReturnedError): + await accessor.get_or_none() + + # list + + requester.flush().queue_responses(*response_sequence) + result = await accessor.list() + + assert isinstance(result, list) + assert len(result) == 10 + assert all(map(check_entry, result)) + assert len(requester.queue) == len(response_sequence) - 1 + + # all + + requester.flush().queue_responses(*response_sequence) + result = await accessor.all() + + assert isinstance(result, list) + assert len(result) == amount_of_all_entries + assert all(map(check_entry, result)) + + # filter (with no args is the same as all) + + requester.flush().queue_responses(*response_sequence) + result = await accessor.filter() + + assert isinstance(result, list) + assert len(result) == amount_of_all_entries + assert all(map(check_entry, result)) + + # iter + + requester.flush().queue_responses(*response_sequence) + result = accessor.iter() + + # see no requests made at first + assert len(requester.queue) == len(response_sequence) + assert isinstance(result, AsyncGenerator) + + n = 11 + first_entries = await n_entries_as_list(result, n=n) + assert len(first_entries) == n + + # see 2 "pages" read + assert len(requester.queue) == len(response_sequence) - 2 + + rest_entries = [i async for i in result] + assert len(rest_entries) == amount_of_all_entries - n + assert all(map(check_entry, (*first_entries, *rest_entries))) + + # now all results are read + assert len(requester.queue) == 0 diff --git a/tests/unit/test_dummy.py b/tests/unit/test_dummy.py deleted file mode 100644 index e71b4902..00000000 --- a/tests/unit/test_dummy.py +++ /dev/null @@ -1,2 +0,0 @@ -def test_dummy_integration() -> None: - assert 1 == 1 # noqa: S101 diff --git a/tests/unit/test_requesters.py b/tests/unit/test_requesters.py new file mode 100644 index 00000000..28798f28 --- /dev/null +++ b/tests/unit/test_requesters.py @@ -0,0 +1,121 @@ +from dataclasses import dataclass +from functools import partial +from typing import Any, Self +import json + +import pytest + +from adcm_aio_client.core.errors import ResponseDataConversionError, ResponseError +from adcm_aio_client.core.requesters import DefaultRequester, HTTPXRequesterResponse + +pytestmark = [pytest.mark.asyncio] + + +@dataclass() +class HTTPXLikeResponse: + status_code: int = 200 + data: str = "{}" + + def json(self: Self) -> Any: # noqa: ANN401 + return json.loads(self.data) + + +def build_mock_response(response: HTTPXLikeResponse): # noqa: ANN201 + async def return_response(*a, **kw) -> HTTPXLikeResponse: # noqa: ANN002, ANN003 + _ = a, kw + return response + + return return_response + + +@pytest.fixture() +def httpx_requester() -> DefaultRequester: + return DefaultRequester(base_url="dummy", retries=1, retry_interval=0) + + +@pytest.mark.parametrize( + ("method", "status_code", "call_kwargs"), + [("get", 200, {}), ("post", 201, {"data": {}}), ("patch", 299, {"data": {}}), ("delete", 204, {})], + ids=lambda value: value if not isinstance(value, dict) else "kw", +) +async def test_successful_request( + method: str, status_code: int, call_kwargs: dict, httpx_requester: DefaultRequester, monkeypatch: pytest.MonkeyPatch +) -> None: + requester = httpx_requester + + response = HTTPXLikeResponse(status_code=status_code, data="{}") + return_response = build_mock_response(response) + monkeypatch.setattr(requester.client, "request", return_response) + + result = await getattr(requester, method)(**call_kwargs) + + assert isinstance(result, HTTPXRequesterResponse) + assert result.response is response + assert result.as_dict() == {} + + +async def test_successful_response_data_conversion( + httpx_requester: DefaultRequester, monkeypatch: pytest.MonkeyPatch +) -> None: + requester = httpx_requester + + return_response = build_mock_response(HTTPXLikeResponse(data="{}")) + monkeypatch.setattr(requester.client, "request", return_response) + + response = await requester.get() + assert response.as_dict() == {} + + return_response = build_mock_response(HTTPXLikeResponse(data="[]")) + monkeypatch.setattr(requester.client, "request", return_response) + + response = await requester.delete() + assert response.as_list() == [] + + +@pytest.mark.parametrize("status_code", [300, 301, 399, 400, 403, 499, 500, 501, 599]) +async def test_raising_client_error_for_status( + status_code: int, httpx_requester: DefaultRequester, monkeypatch: pytest.MonkeyPatch +) -> None: + requester = httpx_requester + + return_response = build_mock_response(HTTPXLikeResponse(status_code=status_code, data="")) + monkeypatch.setattr(requester.client, "request", return_response) + + for method in ( + partial(requester.get, query={}), + partial(requester.post, data={}), + partial(requester.patch, data={}), + requester.delete, + ): + with pytest.raises(ResponseError): + await method() + + +async def test_response_as_dict_error_on_wrong_type( + httpx_requester: DefaultRequester, monkeypatch: pytest.MonkeyPatch +) -> None: + requester = httpx_requester + + for incorrect_data in ("[]", "{,"): + return_response = build_mock_response(HTTPXLikeResponse(data=incorrect_data)) + monkeypatch.setattr(requester.client, "request", return_response) + + response = await requester.get() + + with pytest.raises(ResponseDataConversionError): + response.as_dict() + + +async def test_response_as_list_error_on_wrong_type( + httpx_requester: DefaultRequester, monkeypatch: pytest.MonkeyPatch +) -> None: + requester = httpx_requester + + for incorrect_data in ("{}", "[,"): + return_response = build_mock_response(HTTPXLikeResponse(data=incorrect_data)) + monkeypatch.setattr(requester.client, "request", return_response) + + response = await requester.get() + + with pytest.raises(ResponseDataConversionError): + response.as_list() diff --git a/tests/unit/utils.py b/tests/unit/utils.py new file mode 100644 index 00000000..e42b7579 --- /dev/null +++ b/tests/unit/utils.py @@ -0,0 +1,14 @@ +from typing import AsyncGenerator + + +async def n_entries_as_list[T](gen: AsyncGenerator[T, None], n: int) -> list[T]: + result = [] + i = 1 + + async for entry in gen: + result.append(entry) + if i == n: + break + i += 1 + + return result From b52e635b778010c1866a63756d309bf57fce8910 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 18 Nov 2024 23:05:03 +0500 Subject: [PATCH 07/46] ADCM-6114 Implement Cluster own methods & drop caches on `refresh` call (#9) --- .github/CODEOWNERS | 2 +- adcm_aio_client/core/objects/_base.py | 42 ++++++++++++++++++- adcm_aio_client/core/objects/_common.py | 30 ++++++++++++++ adcm_aio_client/core/objects/_imports.py | 1 + adcm_aio_client/core/objects/_mapping.py | 3 ++ adcm_aio_client/core/objects/cm.py | 53 ++++++++++++++++-------- tests/unit/test_objects_base.py | 39 +++++++++++++++++ 7 files changed, 150 insertions(+), 20 deletions(-) create mode 100644 adcm_aio_client/core/objects/_imports.py create mode 100644 adcm_aio_client/core/objects/_mapping.py create mode 100644 tests/unit/test_objects_base.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9f624206..26d4916e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ -* @a-alferov +*.* @a-alferov *.py @Sealwing @DanBalalan @Starovoitov diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index a2a9c34d..28066726 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -1,14 +1,42 @@ +from collections import deque +from functools import cached_property from typing import Any, Self from adcm_aio_client.core.requesters import Requester -from adcm_aio_client.core.types import AwareOfOwnPath, WithRequester +from adcm_aio_client.core.types import AwareOfOwnPath, Endpoint, WithRequester class InteractiveObject(WithRequester, AwareOfOwnPath): + _delete_on_refresh: deque[str] + + def __init_subclass__(cls: type[Self]) -> None: + super().__init_subclass__() + + # names of cached properties, so they can be deleted + cls._delete_on_refresh = deque() + for name in dir(cls): + # None is for declared, but unset values + attr = getattr(cls, name, None) + if isinstance(attr, cached_property): + cls._delete_on_refresh.append(name) + def __init__(self: Self, requester: Requester, data: dict[str, Any]) -> None: self._requester = requester self._data = data + @property + def id(self: Self) -> int: + # it's the default behavior, without id many things can't be done + return int(self._data["id"]) + + async def refresh(self: Self) -> Self: + response = await self._requester.get(*self.get_own_path()) + self._data = response.as_dict() + # todo drop caches + self._clear_cache() + + return self + def _construct[Object: "InteractiveObject"](self: Self, what: type[Object], from_data: dict[str, Any]) -> Object: return what(requester=self._requester, data=from_data) @@ -17,6 +45,18 @@ def _construct_child[Child: "InteractiveChildObject"]( ) -> Child: return what(requester=self._requester, data=from_data, parent=self) + def _clear_cache(self: Self) -> None: + for name in self._delete_on_refresh: + # works for cached_property + delattr(self, name) + + +class RootInteractiveObject(InteractiveObject): + PATH_PREFIX: str + + def get_own_path(self: Self) -> Endpoint: + return self.PATH_PREFIX, self.id + class InteractiveChildObject[Parent](InteractiveObject): def __init__(self: Self, parent: Parent, requester: Requester, data: dict[str, Any]) -> None: diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 9a3bc5b8..32b839b7 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -1,3 +1,4 @@ +from functools import cached_property from typing import Self from adcm_aio_client.core.objects._base import AwareOfOwnPath, WithRequester @@ -6,3 +7,32 @@ class Deletable(WithRequester, AwareOfOwnPath): async def delete(self: Self) -> None: await self._requester.delete(*self.get_own_path()) + + +# todo whole section lacking implementation (and maybe code move is required) +class WithConfig(WithRequester, AwareOfOwnPath): + @cached_property + def config(self: Self) -> ...: ... + + @cached_property + def config_history(self: Self) -> ...: ... + + +class WithActions(WithRequester, AwareOfOwnPath): + @cached_property + def actions(self: Self) -> ...: ... + + +class WithUpgrades(WithRequester, AwareOfOwnPath): + @cached_property + def upgrades(self: Self) -> ...: ... + + +class WithConfigGroups(WithRequester, AwareOfOwnPath): + @cached_property + def config_groups(self: Self) -> ...: ... + + +class WithActionHostGroups(WithRequester, AwareOfOwnPath): + @cached_property + def action_host_groups(self: Self) -> ...: ... diff --git a/adcm_aio_client/core/objects/_imports.py b/adcm_aio_client/core/objects/_imports.py new file mode 100644 index 00000000..31dda734 --- /dev/null +++ b/adcm_aio_client/core/objects/_imports.py @@ -0,0 +1 @@ +class ClusterImports: ... diff --git a/adcm_aio_client/core/objects/_mapping.py b/adcm_aio_client/core/objects/_mapping.py new file mode 100644 index 00000000..d7b99c6f --- /dev/null +++ b/adcm_aio_client/core/objects/_mapping.py @@ -0,0 +1,3 @@ +class ClusterMapping: + # todo lacking implementation + ... diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 1e40b75b..215d5811 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -7,19 +7,29 @@ PaginatedChildAccessor, ) from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject -from adcm_aio_client.core.objects._common import Deletable +from adcm_aio_client.core.objects._common import ( + Deletable, + WithActionHostGroups, + WithActions, + WithConfig, + WithConfigGroups, + WithUpgrades, +) +from adcm_aio_client.core.objects._imports import ClusterImports +from adcm_aio_client.core.objects._mapping import ClusterMapping from adcm_aio_client.core.types import Endpoint class Bundle(Deletable, InteractiveObject): ... -class Cluster(Deletable, InteractiveObject): - # data-based properties +class Host(Deletable, InteractiveObject): ... - @property - def id(self: Self) -> int: - return int(self._data["id"]) + +class Cluster( + Deletable, WithActions, WithUpgrades, WithConfig, WithActionHostGroups, WithConfigGroups, InteractiveObject +): + # data-based properties @property def name(self: Self) -> str: @@ -51,25 +61,28 @@ async def get_status(self: Self) -> Literal["up", "down"]: return response.as_dict()["status"] async def set_ansible_forks(self: Self, value: int) -> Self: - # todo - ... + await self._requester.post( + *self.get_own_path(), "ansible-config", data={"config": {"defaults": {"forks": value}}, "adcmMeta": {}} + ) + return self # nodes and managers to access + @cached_property + def mapping(self: Self) -> ClusterMapping: + return ClusterMapping() + @cached_property def services(self: Self) -> "ServicesNode": return ServicesNode(parent=self, path=(*self.get_own_path(), "services"), requester=self._requester) - # todo IMPLEMENT: - # Nodes: - # - hosts: "ClusterHostsNode" - # - imports (probably not an accessor node, but some cool class) - # - actions - # - upgrades - # - config-groups - # Managers: - # - config - # - mapping + @cached_property + def hosts(self: Self) -> "HostsInClusterNode": + return HostsInClusterNode(path=(*self.get_own_path(), "hosts"), requester=self._requester) + + @cached_property + def imports(self: Self) -> ClusterImports: + return ClusterImports() def get_own_path(self: Self) -> Endpoint: return "clusters", self.id @@ -82,6 +95,10 @@ def get_own_path(self: Self) -> Endpoint: return ("clusters",) +class HostsInClusterNode(PaginatedAccessor[Host, None]): + class_type = Host + + class Service(InteractiveChildObject[Cluster]): @property def id(self: Self) -> int: diff --git a/tests/unit/test_objects_base.py b/tests/unit/test_objects_base.py new file mode 100644 index 00000000..309d467a --- /dev/null +++ b/tests/unit/test_objects_base.py @@ -0,0 +1,39 @@ +from functools import cached_property +from typing import Self + +import pytest + +from adcm_aio_client.core.objects._base import InteractiveObject +from adcm_aio_client.core.types import Endpoint +from tests.unit.mocks.requesters import QueueRequester + +pytestmark = [pytest.mark.asyncio] + + +async def test_cache_cleaning(queue_requester: QueueRequester) -> None: + class ObjectA(InteractiveObject): + def get_own_path(self: Self) -> Endpoint: + return "not", "important" + + @property + def plain(self: Self) -> str: + return self._data["name"] + + @cached_property + def complex(self: Self) -> str: + return self._data["name"] + + data_1 = {"id": 4, "name": "awesome"} + data_2 = {"id": 4, "name": "best"} + + instance = ObjectA(requester=queue_requester, data=data_1) + + assert instance.plain == instance.complex + assert instance.complex == data_1["name"] + + queue_requester.queue_responses(data_2) + + await instance.refresh() + + assert instance.plain == instance.complex + assert instance.complex == data_2["name"] From e4ef859dcf06ba28b09e3f42dc16534a3e6ffc7c Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Wed, 20 Nov 2024 10:43:24 +0300 Subject: [PATCH 08/46] ADCM-6118: Implement Host object (#10) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 6 ++- adcm_aio_client/core/objects/_base.py | 14 ++++- adcm_aio_client/core/objects/cm.py | 78 ++++++++++++++++++++------- 3 files changed, 76 insertions(+), 22 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 1bc64065..437de023 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ClustersNode +from adcm_aio_client.core.objects.cm import ClustersNode, HostsNode from adcm_aio_client.core.requesters import Requester from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify @@ -26,6 +26,10 @@ def __init__(self: Self, requester: Requester) -> None: def clusters(self: Self) -> ClustersNode: return ClustersNode(path=(), requester=self._requester) + @cached_property + def hosts(self: Self) -> HostsNode: + return HostsNode(path=(), requester=self._requester) + async def build_client( url: str | list[str], diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 28066726..afd419bd 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -55,7 +55,19 @@ class RootInteractiveObject(InteractiveObject): PATH_PREFIX: str def get_own_path(self: Self) -> Endpoint: - return self.PATH_PREFIX, self.id + # change here + return self._build_own_path(self.id) + + # let's add this one + @classmethod + async def with_id(cls: type[Self], requester: Requester, object_id: int) -> Self: + object_path = cls._build_own_path(object_id) + response = await requester.get(*object_path) + return cls(requester=requester, data=response.as_dict()) + + @classmethod + def _build_own_path(cls: type[Self], object_id: int) -> Endpoint: + return cls.PATH_PREFIX, object_id class InteractiveChildObject[Parent](InteractiveObject): diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 215d5811..b9edc8e4 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,12 +1,13 @@ +from enum import Enum from functools import cached_property -from typing import Literal, Self +from typing import Self from adcm_aio_client.core.objects._accessors import ( NonPaginatedChildAccessor, PaginatedAccessor, PaginatedChildAccessor, ) -from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject +from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject, RootInteractiveObject from adcm_aio_client.core.objects._common import ( Deletable, WithActionHostGroups, @@ -20,15 +21,18 @@ from adcm_aio_client.core.types import Endpoint -class Bundle(Deletable, InteractiveObject): ... +class ADCMEntityStatus(str, Enum): + UP = "up" + DOWN = "down" -class Host(Deletable, InteractiveObject): ... +class Bundle(Deletable, InteractiveObject): ... class Cluster( - Deletable, WithActions, WithUpgrades, WithConfig, WithActionHostGroups, WithConfigGroups, InteractiveObject + Deletable, WithActions, WithUpgrades, WithConfig, WithActionHostGroups, WithConfigGroups, RootInteractiveObject ): + PATH_PREFIX = "clusters" # data-based properties @property @@ -56,9 +60,9 @@ async def bundle(self: Self) -> Bundle: # object-specific methods - async def get_status(self: Self) -> Literal["up", "down"]: + async def get_status(self: Self) -> ADCMEntityStatus: response = await self._requester.get(*self.get_own_path()) - return response.as_dict()["status"] + return ADCMEntityStatus(response.as_dict()["status"]) async def set_ansible_forks(self: Self, value: int) -> Self: await self._requester.post( @@ -85,7 +89,7 @@ def imports(self: Self) -> ClusterImports: return ClusterImports() def get_own_path(self: Self) -> Endpoint: - return "clusters", self.id + return self.PATH_PREFIX, self.id class ClustersNode(PaginatedAccessor[Cluster, None]): @@ -95,15 +99,7 @@ def get_own_path(self: Self) -> Endpoint: return ("clusters",) -class HostsInClusterNode(PaginatedAccessor[Host, None]): - class_type = Host - - class Service(InteractiveChildObject[Cluster]): - @property - def id(self: Self) -> int: - return int(self._data["id"]) - def get_own_path(self: Self) -> Endpoint: return (*self._parent.get_own_path(), "services", self.id) @@ -117,13 +113,55 @@ class ServicesNode(PaginatedChildAccessor[Cluster, Service, None]): class Component(InteractiveChildObject[Service]): - @property - def id(self: Self) -> int: - return int(self._data["id"]) - def get_own_path(self: Self) -> Endpoint: return (*self._parent.get_own_path(), "components", self.id) class ComponentsNode(NonPaginatedChildAccessor[Service, Component, None]): class_type = Component + + +class HostProvider(Deletable, WithActions, WithUpgrades, WithConfig, RootInteractiveObject): + PATH_PREFIX = "hostproviders" + + +class HostProvidersNode(PaginatedChildAccessor): ... + + +class Host(Deletable, RootInteractiveObject): + PATH_PREFIX = "hosts" + + @property + def name(self: Self) -> str: + return str(self._data["name"]) + + @property + def description(self: Self) -> str: + return str(self._data["description"]) + + async def get_status(self: Self) -> ADCMEntityStatus: + response = await self._requester.get(*self.get_own_path()) + return ADCMEntityStatus(response.as_dict()["status"]) + + @cached_property + async def cluster(self: Self) -> Cluster | None: + if not self._data["cluster"]: + return None + return await Cluster.with_id(requester=self._requester, object_id=self._data["cluster"]["id"]) + + @cached_property + async def hostprovider(self: Self) -> HostProvider: + return await HostProvider.with_id(requester=self._requester, object_id=self._data["hostprovider"]["id"]) + + def get_own_path(self: Self) -> Endpoint: + return self.PATH_PREFIX, self.id + + +class HostsNode(PaginatedAccessor[Host, None]): + class_type = Host + + # TODO: define def __init__(self, hostprovider: Hostprovider, name: str, cluster: Cluster = None): ... + + +class HostsInClusterNode(PaginatedAccessor[Host, None]): + class_type = Host From c350f0225f4373e6666f64c57b86c572a310c5a3 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Wed, 20 Nov 2024 13:28:04 +0300 Subject: [PATCH 09/46] ADCM-6119: Implement HostProvider object (#8) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 6 +++++- adcm_aio_client/core/objects/cm.py | 21 ++++++++++++++++++--- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 437de023..832f951d 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ClustersNode, HostsNode +from adcm_aio_client.core.objects.cm import ClustersNode, HostProvidersNode, HostsNode from adcm_aio_client.core.requesters import Requester from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify @@ -30,6 +30,10 @@ def clusters(self: Self) -> ClustersNode: def hosts(self: Self) -> HostsNode: return HostsNode(path=(), requester=self._requester) + @cached_property + def hostproviders(self: Self) -> HostProvidersNode: + return HostProvidersNode(path=(), requester=self._requester) + async def build_client( url: str | list[str], diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index b9edc8e4..86d5a289 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -123,9 +123,26 @@ class ComponentsNode(NonPaginatedChildAccessor[Service, Component, None]): class HostProvider(Deletable, WithActions, WithUpgrades, WithConfig, RootInteractiveObject): PATH_PREFIX = "hostproviders" + # data-based properties + @property + def name(self: Self) -> str: + return str(self._data["name"]) -class HostProvidersNode(PaginatedChildAccessor): ... + @property + def description(self: Self) -> str: + return str(self._data["description"]) + + @property + def display_name(self: Self) -> str: + return str(self._data["prototype"]["displayName"]) + + def get_own_path(self: Self) -> Endpoint: + return self.PATH_PREFIX, self.id + + +class HostProvidersNode(PaginatedAccessor[HostProvider, None]): + class_type = HostProvider class Host(Deletable, RootInteractiveObject): @@ -160,8 +177,6 @@ def get_own_path(self: Self) -> Endpoint: class HostsNode(PaginatedAccessor[Host, None]): class_type = Host - # TODO: define def __init__(self, hostprovider: Hostprovider, name: str, cluster: Cluster = None): ... - class HostsInClusterNode(PaginatedAccessor[Host, None]): class_type = Host From 621ae63511cd93b26bc8c638ed7d68121770bda6 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Thu, 21 Nov 2024 10:01:35 +0300 Subject: [PATCH 10/46] ADCM-6115: Implement Service object (#12) --- adcm_aio_client/core/objects/_base.py | 6 ++- adcm_aio_client/core/objects/_common.py | 7 ++++ adcm_aio_client/core/objects/cm.py | 52 +++++++++++++++++-------- adcm_aio_client/core/types.py | 6 +++ 4 files changed, 52 insertions(+), 19 deletions(-) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index afd419bd..6c192e6c 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -1,4 +1,5 @@ from collections import deque +from contextlib import suppress from functools import cached_property from typing import Any, Self @@ -47,8 +48,9 @@ def _construct_child[Child: "InteractiveChildObject"]( def _clear_cache(self: Self) -> None: for name in self._delete_on_refresh: - # works for cached_property - delattr(self, name) + # Works for cached_property. Suppresses errors on deleting values not yet cached (absent in self.__dict__) + with suppress(AttributeError): + delattr(self, name) class RootInteractiveObject(InteractiveObject): diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 32b839b7..f9a9ba37 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -2,6 +2,7 @@ from typing import Self from adcm_aio_client.core.objects._base import AwareOfOwnPath, WithRequester +from adcm_aio_client.core.types import ADCMEntityStatus class Deletable(WithRequester, AwareOfOwnPath): @@ -9,6 +10,12 @@ async def delete(self: Self) -> None: await self._requester.delete(*self.get_own_path()) +class WithStatus(WithRequester, AwareOfOwnPath): + async def get_status(self: Self) -> ADCMEntityStatus: + response = await self._requester.get(*self.get_own_path()) + return ADCMEntityStatus(response.as_dict()["status"]) + + # todo whole section lacking implementation (and maybe code move is required) class WithConfig(WithRequester, AwareOfOwnPath): @cached_property diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 86d5a289..9a8323fc 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,9 +1,7 @@ -from enum import Enum from functools import cached_property from typing import Self from adcm_aio_client.core.objects._accessors import ( - NonPaginatedChildAccessor, PaginatedAccessor, PaginatedChildAccessor, ) @@ -14,23 +12,26 @@ WithActions, WithConfig, WithConfigGroups, + WithStatus, WithUpgrades, ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.objects._mapping import ClusterMapping -from adcm_aio_client.core.types import Endpoint - - -class ADCMEntityStatus(str, Enum): - UP = "up" - DOWN = "down" +from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint class Bundle(Deletable, InteractiveObject): ... class Cluster( - Deletable, WithActions, WithUpgrades, WithConfig, WithActionHostGroups, WithConfigGroups, RootInteractiveObject + WithStatus, + Deletable, + WithActions, + WithUpgrades, + WithConfig, + WithActionHostGroups, + WithConfigGroups, + RootInteractiveObject, ): PATH_PREFIX = "clusters" # data-based properties @@ -59,11 +60,6 @@ async def bundle(self: Self) -> Bundle: return self._construct(what=Bundle, from_data=response.as_dict()) # object-specific methods - - async def get_status(self: Self) -> ADCMEntityStatus: - response = await self._requester.get(*self.get_own_path()) - return ADCMEntityStatus(response.as_dict()["status"]) - async def set_ansible_forks(self: Self, value: int) -> Self: await self._requester.post( *self.get_own_path(), "ansible-config", data={"config": {"defaults": {"forks": value}}, "adcmMeta": {}} @@ -99,9 +95,31 @@ def get_own_path(self: Self) -> Endpoint: return ("clusters",) -class Service(InteractiveChildObject[Cluster]): +class Service( + WithStatus, + Deletable, + WithActions, + WithConfig, + WithActionHostGroups, + WithConfigGroups, + InteractiveChildObject[Cluster], +): + PATH_PREFIX = "services" + + @property + def name(self: Self) -> str: + return self._data["name"] + + @property + def display_name(self: Self) -> str: + return self._data["displayName"] + + @cached_property + def cluster(self: Self) -> Cluster: + return self._parent + def get_own_path(self: Self) -> Endpoint: - return (*self._parent.get_own_path(), "services", self.id) + return *self._parent.get_own_path(), "services", self.id @cached_property def components(self: Self) -> "ComponentsNode": @@ -117,7 +135,7 @@ def get_own_path(self: Self) -> Endpoint: return (*self._parent.get_own_path(), "components", self.id) -class ComponentsNode(NonPaginatedChildAccessor[Service, Component, None]): +class ComponentsNode(PaginatedChildAccessor[Service, Component, None]): class_type = Component diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index ee14b747..487b0c53 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -11,6 +11,7 @@ # limitations under the License. from dataclasses import asdict, dataclass +from enum import Enum from typing import Optional, Protocol, Self # Init / Authorization @@ -67,3 +68,8 @@ class WithRequester(Protocol): class AwareOfOwnPath(Protocol): def get_own_path(self: Self) -> Endpoint: ... + + +class ADCMEntityStatus(str, Enum): + UP = "up" + DOWN = "down" From 55cceaf7ca675320238749c1ad85dcbe4e4a5c5b Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 22 Nov 2024 11:53:28 +0300 Subject: [PATCH 11/46] ADCM-6117: Implement Component object (#13) --- adcm_aio_client/core/objects/_base.py | 2 +- adcm_aio_client/core/objects/cm.py | 50 +++++++++++++++++++++++---- 2 files changed, 45 insertions(+), 7 deletions(-) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 6c192e6c..958db4bb 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -18,7 +18,7 @@ def __init_subclass__(cls: type[Self]) -> None: for name in dir(cls): # None is for declared, but unset values attr = getattr(cls, name, None) - if isinstance(attr, cached_property): + if isinstance(attr, cached_property): # TODO: asyncstdlib.functools.CachedProperty cls._delete_on_refresh.append(name) def __init__(self: Self, requester: Requester, data: dict[str, Any]) -> None: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 9a8323fc..90e26d50 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,6 +1,7 @@ from functools import cached_property from typing import Self +from adcm_aio_client.core.errors import NotFoundError from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, PaginatedChildAccessor, @@ -49,7 +50,7 @@ def description(self: Self) -> str: # todo think how such properties will be invalidated when data is updated # during `refresh()` / `reread()` calls. # See cache invalidation or alternatives in documentation for `cached_property` - @cached_property + @cached_property # TODO: replace with asyncstdlib.functools.cached_property async def bundle(self: Self) -> Bundle: prototype_id = self._data["prototype"]["id"] response = await self._requester.get("prototypes", prototype_id) @@ -119,7 +120,7 @@ def cluster(self: Self) -> Cluster: return self._parent def get_own_path(self: Self) -> Endpoint: - return *self._parent.get_own_path(), "services", self.id + return *self._parent.get_own_path(), self.PATH_PREFIX, self.id @cached_property def components(self: Self) -> "ComponentsNode": @@ -130,9 +131,46 @@ class ServicesNode(PaginatedChildAccessor[Cluster, Service, None]): class_type = Service -class Component(InteractiveChildObject[Service]): +class Component( + WithStatus, WithActions, WithConfig, WithActionHostGroups, WithConfigGroups, InteractiveChildObject[Service] +): + PATH_PREFIX = "components" + + @property + def name(self: Self) -> str: + return self._data["name"] + + @property + def display_name(self: Self) -> str: + return self._data["displayName"] + + @cached_property # TODO: replace with asyncstdlib.functools.cached_property + async def constraint(self: Self) -> list[int | str]: + response = (await self._requester.get(*self.cluster.get_own_path(), "mapping", "components")).as_list() + for component in response: + if component["id"] == self.id: + return component["constraints"] + + raise NotFoundError + + @cached_property + def service(self: Self) -> Service: + return self._parent + + @cached_property + def cluster(self: Self) -> Cluster: + return self.service.cluster + + @cached_property + def hosts(self: Self) -> "HostsInClusterNode": + return HostsInClusterNode( # TODO: new ComponentHostsNode + path=(*self.cluster.get_own_path(), "hosts"), + requester=self._requester, + # filter=Filter({"componentId": self.id}), + ) + def get_own_path(self: Self) -> Endpoint: - return (*self._parent.get_own_path(), "components", self.id) + return *self._parent.get_own_path(), self.PATH_PREFIX, self.id class ComponentsNode(PaginatedChildAccessor[Service, Component, None]): @@ -178,13 +216,13 @@ async def get_status(self: Self) -> ADCMEntityStatus: response = await self._requester.get(*self.get_own_path()) return ADCMEntityStatus(response.as_dict()["status"]) - @cached_property + @cached_property # TODO: replace with asyncstdlib.functools.cached_property async def cluster(self: Self) -> Cluster | None: if not self._data["cluster"]: return None return await Cluster.with_id(requester=self._requester, object_id=self._data["cluster"]["id"]) - @cached_property + @cached_property # TODO: replace with asyncstdlib.functools.cached_property async def hostprovider(self: Self) -> HostProvider: return await HostProvider.with_id(requester=self._requester, object_id=self._data["hostprovider"]["id"]) From 32ab331f368bab62c5aac6f556c1f5d3d3ee060d Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 22 Nov 2024 12:28:33 +0300 Subject: [PATCH 12/46] ADCM-6122: Implement ADCM object (#11) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 6 +++++- adcm_aio_client/core/objects/cm.py | 15 +++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 832f951d..c2dc3256 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ClustersNode, HostProvidersNode, HostsNode +from adcm_aio_client.core.objects.cm import ADCM, ClustersNode, HostProvidersNode, HostsNode from adcm_aio_client.core.requesters import Requester from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify @@ -34,6 +34,10 @@ def hosts(self: Self) -> HostsNode: def hostproviders(self: Self) -> HostProvidersNode: return HostProvidersNode(path=(), requester=self._requester) + @cached_property + def adcm(self: Self) -> ADCM: + return ADCM(requester=self._requester, data={}) + async def build_client( url: str | list[str], diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 90e26d50..f36bec17 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -21,6 +21,21 @@ from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint +class ADCM(InteractiveObject, WithActions, WithConfig): + @property + def id(self: Self) -> int: + return 1 + + @cached_property + async def version(self: Self) -> str: + # TODO: override root_path for being without /api/v2 + response = await self._requester.get("versions") + return response.as_dict()["adcm"]["version"] + + def get_own_path(self: Self) -> Endpoint: + return ("adcm",) + + class Bundle(Deletable, InteractiveObject): ... From 3ecf9cc21624ae160f4e89e4f53e784e544bc742 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Tue, 26 Nov 2024 10:19:33 +0300 Subject: [PATCH 13/46] ADCM-6133: Implement node for getting hosts (#15) --- adcm_aio_client/core/client.py | 10 ++-- adcm_aio_client/core/errors.py | 4 ++ adcm_aio_client/core/objects/_accessors.py | 20 +++++--- adcm_aio_client/core/objects/cm.py | 60 +++++++++++++++++++--- adcm_aio_client/core/requesters.py | 8 +-- adcm_aio_client/core/types.py | 4 +- tests/unit/mocks/requesters.py | 4 +- 7 files changed, 83 insertions(+), 27 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index c2dc3256..3606e5a0 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ADCM, ClustersNode, HostProvidersNode, HostsNode +from adcm_aio_client.core.objects.cm import ADCM, ClustersNode, HostProvidersNode, HostsAccessor from adcm_aio_client.core.requesters import Requester from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify @@ -24,15 +24,15 @@ def __init__(self: Self, requester: Requester) -> None: @cached_property def clusters(self: Self) -> ClustersNode: - return ClustersNode(path=(), requester=self._requester) + return ClustersNode(path=("clusters",), requester=self._requester) @cached_property - def hosts(self: Self) -> HostsNode: - return HostsNode(path=(), requester=self._requester) + def hosts(self: Self) -> HostsAccessor: + return HostsAccessor(path=("hosts",), requester=self._requester) @cached_property def hostproviders(self: Self) -> HostProvidersNode: - return HostProvidersNode(path=(), requester=self._requester) + return HostProvidersNode(path=("hostproviders",), requester=self._requester) @cached_property def adcm(self: Self) -> ADCM: diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index 29c36918..775e6fb5 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -83,3 +83,7 @@ class MultipleObjectsReturnedError(AccessorError): class ObjectDoesNotExistError(AccessorError): pass + + +class OperationError(AccessorError): + pass diff --git a/adcm_aio_client/core/objects/_accessors.py b/adcm_aio_client/core/objects/_accessors.py index f9a02646..3f93cda0 100644 --- a/adcm_aio_client/core/objects/_accessors.py +++ b/adcm_aio_client/core/objects/_accessors.py @@ -18,13 +18,17 @@ from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject from adcm_aio_client.core.types import Endpoint, QueryParameters, Requester, RequesterResponse +# filter for narrowing response objects +type AccessorFilter = QueryParameters | None + class Accessor[ReturnObject: InteractiveObject, Filter](ABC): class_type: type[ReturnObject] - def __init__(self: Self, path: Endpoint, requester: Requester) -> None: + def __init__(self: Self, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None) -> None: self._path = path self._requester = requester + self._accessor_filter = accessor_filter or {} @abstractmethod async def iter(self: Self) -> AsyncGenerator[ReturnObject, None]: ... @@ -62,7 +66,7 @@ async def list(self: Self) -> list[ReturnObject]: return [self._create_object(obj) for obj in results] async def _request_endpoint(self: Self, query: QueryParameters) -> RequesterResponse: - return await self._requester.get(*self._path, query=query) + return await self._requester.get(*self._path, query={**query, **self._accessor_filter}) def _create_object(self: Self, data: dict[str, Any]) -> ReturnObject: return self.class_type(requester=self._requester, data=data) @@ -88,8 +92,10 @@ def _extract_results_from_response(self: Self, response: RequesterResponse) -> l class PaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](PaginatedAccessor[Child, Filter]): - def __init__(self: Self, parent: Parent, path: Endpoint, requester: Requester) -> None: - super().__init__(path, requester) + def __init__( + self: Self, parent: Parent, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None + ) -> None: + super().__init__(path, requester, accessor_filter) self._parent = parent def _create_object(self: Self, data: dict[str, Any]) -> Child: @@ -97,8 +103,10 @@ def _create_object(self: Self, data: dict[str, Any]) -> Child: class NonPaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](Accessor[Child, Filter]): - def __init__(self: Self, parent: Parent, path: Endpoint, requester: Requester) -> None: - super().__init__(path, requester) + def __init__( + self: Self, parent: Parent, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None + ) -> None: + super().__init__(path, requester, accessor_filter) self._parent = parent async def iter(self: Self) -> AsyncGenerator[Child, None]: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index f36bec17..456a7ea8 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,7 +1,8 @@ from functools import cached_property -from typing import Self +from typing import Iterable, Self +import asyncio -from adcm_aio_client.core.errors import NotFoundError +from adcm_aio_client.core.errors import NotFoundError, OperationError, ResponseError from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, PaginatedChildAccessor, @@ -20,6 +21,8 @@ from adcm_aio_client.core.objects._mapping import ClusterMapping from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint +type Filter = object # TODO: implement + class ADCM(InteractiveObject, WithActions, WithConfig): @property @@ -177,11 +180,11 @@ def cluster(self: Self) -> Cluster: return self.service.cluster @cached_property - def hosts(self: Self) -> "HostsInClusterNode": - return HostsInClusterNode( # TODO: new ComponentHostsNode + def hosts(self: Self) -> "HostsAccessor": + return HostsAccessor( path=(*self.cluster.get_own_path(), "hosts"), requester=self._requester, - # filter=Filter({"componentId": self.id}), + accessor_filter={"componentId": self.id}, ) def get_own_path(self: Self) -> Endpoint: @@ -208,6 +211,12 @@ def description(self: Self) -> str: def display_name(self: Self) -> str: return str(self._data["prototype"]["displayName"]) + @cached_property + def hosts(self: Self) -> "HostsAccessor": + return HostsAccessor( + path=("hosts",), requester=self._requester, accessor_filter={"hostproviderName": self.name} + ) + def get_own_path(self: Self) -> Endpoint: return self.PATH_PREFIX, self.id @@ -244,10 +253,45 @@ async def hostprovider(self: Self) -> HostProvider: def get_own_path(self: Self) -> Endpoint: return self.PATH_PREFIX, self.id + def __str__(self: Self) -> str: + return f"<{self.__class__.__name__} #{self.id} {self.name}>" + -class HostsNode(PaginatedAccessor[Host, None]): +class HostsAccessor(PaginatedAccessor[Host, dict | None]): class_type = Host -class HostsInClusterNode(PaginatedAccessor[Host, None]): - class_type = Host +class HostsInClusterNode(HostsAccessor): + async def add(self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None) -> None: + hosts = await self._get_hosts_from_arg_or_filter(host=host, filters=filters) + + await self._requester.post(*self._path, data=[{"hostId": host.id} for host in hosts]) + + async def remove(self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None) -> None: + hosts = await self._get_hosts_from_arg_or_filter(host=host, filters=filters) + + results = await asyncio.gather( + *(self._requester.delete(*self._path, host_.id) for host_ in hosts), return_exceptions=True + ) + + errors = set() + for host_, result in zip(hosts, results): + if isinstance(result, ResponseError): + errors.add(str(host_)) + + if errors: + errors = ", ".join(errors) + raise OperationError(f"Some hosts can't be deleted from cluster: {errors}") + + async def _get_hosts_from_arg_or_filter( + self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None + ) -> Iterable[Host]: + if all((host, filters)): + raise ValueError("`host` and `filters` arguments are mutually exclusive.") + + if host: + hosts = [host] if isinstance(host, Host) else host + else: + hosts = await self.filter(filters) # type: ignore # TODO + + return hosts diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 98a68c96..80247a29 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -154,11 +154,11 @@ async def login(self: Self, credentials: Credentials) -> Self: async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.get, params=query or {}) - async def post(self: Self, *path: PathPart, data: dict) -> HTTPXRequesterResponse: - return await self.request(*path, method=self.client.post, data=data) + async def post(self: Self, *path: PathPart, data: dict | list) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.post, json=data) - async def patch(self: Self, *path: PathPart, data: dict) -> HTTPXRequesterResponse: - return await self.request(*path, method=self.client.patch, data=data) + async def patch(self: Self, *path: PathPart, data: dict | list) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.patch, json=data) async def delete(self: Self, *path: PathPart) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.delete) diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 487b0c53..0762c9b8 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -52,9 +52,9 @@ async def login(self: Self, credentials: Credentials) -> Self: ... async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> RequesterResponse: ... - async def post(self: Self, *path: PathPart, data: dict) -> RequesterResponse: ... + async def post(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: ... - async def patch(self: Self, *path: PathPart, data: dict) -> RequesterResponse: ... + async def patch(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: ... async def delete(self: Self, *path: PathPart) -> RequesterResponse: ... diff --git a/tests/unit/mocks/requesters.py b/tests/unit/mocks/requesters.py index b00895dd..5adadf68 100644 --- a/tests/unit/mocks/requesters.py +++ b/tests/unit/mocks/requesters.py @@ -39,11 +39,11 @@ async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) _ = path, query return self._return_next_response() - async def post(self: Self, *path: PathPart, data: dict) -> RequesterResponse: + async def post(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: _ = path, data return self._return_next_response() - async def patch(self: Self, *path: PathPart, data: dict) -> RequesterResponse: + async def patch(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: _ = path, data return self._return_next_response() From 3d99dc8673c18bb5d7b63e0603eb0622427cb741 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Wed, 27 Nov 2024 11:18:57 +0300 Subject: [PATCH 14/46] ADCM-6143: Change `cached_property` to `async_cached_property` (#18) --- adcm_aio_client/core/objects/_base.py | 4 +++- adcm_aio_client/core/objects/cm.py | 12 ++++++---- poetry.lock | 18 +++++++++++++- pyproject.toml | 1 + tests/integration/test_misc.py | 34 +++++++++++++++++++++++++++ 5 files changed, 62 insertions(+), 7 deletions(-) create mode 100644 tests/integration/test_misc.py diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 958db4bb..c8e53d42 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -3,6 +3,8 @@ from functools import cached_property from typing import Any, Self +from asyncstdlib.functools import CachedProperty + from adcm_aio_client.core.requesters import Requester from adcm_aio_client.core.types import AwareOfOwnPath, Endpoint, WithRequester @@ -18,7 +20,7 @@ def __init_subclass__(cls: type[Self]) -> None: for name in dir(cls): # None is for declared, but unset values attr = getattr(cls, name, None) - if isinstance(attr, cached_property): # TODO: asyncstdlib.functools.CachedProperty + if isinstance(attr, (cached_property, CachedProperty)): cls._delete_on_refresh.append(name) def __init__(self: Self, requester: Requester, data: dict[str, Any]) -> None: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 456a7ea8..6a88139a 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -2,6 +2,8 @@ from typing import Iterable, Self import asyncio +from asyncstdlib.functools import cached_property as async_cached_property + from adcm_aio_client.core.errors import NotFoundError, OperationError, ResponseError from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, @@ -29,7 +31,7 @@ class ADCM(InteractiveObject, WithActions, WithConfig): def id(self: Self) -> int: return 1 - @cached_property + @async_cached_property async def version(self: Self) -> str: # TODO: override root_path for being without /api/v2 response = await self._requester.get("versions") @@ -68,7 +70,7 @@ def description(self: Self) -> str: # todo think how such properties will be invalidated when data is updated # during `refresh()` / `reread()` calls. # See cache invalidation or alternatives in documentation for `cached_property` - @cached_property # TODO: replace with asyncstdlib.functools.cached_property + @async_cached_property async def bundle(self: Self) -> Bundle: prototype_id = self._data["prototype"]["id"] response = await self._requester.get("prototypes", prototype_id) @@ -162,7 +164,7 @@ def name(self: Self) -> str: def display_name(self: Self) -> str: return self._data["displayName"] - @cached_property # TODO: replace with asyncstdlib.functools.cached_property + @async_cached_property async def constraint(self: Self) -> list[int | str]: response = (await self._requester.get(*self.cluster.get_own_path(), "mapping", "components")).as_list() for component in response: @@ -240,13 +242,13 @@ async def get_status(self: Self) -> ADCMEntityStatus: response = await self._requester.get(*self.get_own_path()) return ADCMEntityStatus(response.as_dict()["status"]) - @cached_property # TODO: replace with asyncstdlib.functools.cached_property + @async_cached_property async def cluster(self: Self) -> Cluster | None: if not self._data["cluster"]: return None return await Cluster.with_id(requester=self._requester, object_id=self._data["cluster"]["id"]) - @cached_property # TODO: replace with asyncstdlib.functools.cached_property + @async_cached_property async def hostprovider(self: Self) -> HostProvider: return await HostProvider.with_id(requester=self._requester, object_id=self._data["hostprovider"]["id"]) diff --git a/poetry.lock b/poetry.lock index 48ef0aa0..a4a080f5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -20,6 +20,22 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "asyncstdlib" +version = "3.13.0" +description = "The missing async toolbox" +optional = false +python-versions = "~=3.8" +files = [ + {file = "asyncstdlib-3.13.0-py3-none-any.whl", hash = "sha256:60e097c19e815f3c419a77426cf6c3653aebcb766544d631d5ce6128d0851ae8"}, + {file = "asyncstdlib-3.13.0.tar.gz", hash = "sha256:f2a6ffb44f118233bb99bef50861d6f64c432decbdcc4c2cb93b3fff40d1b533"}, +] + +[package.extras] +doc = ["sphinx", "sphinxcontrib-trio"] +test = ["black", "coverage", "flake8", "flake8-2020", "flake8-bugbear", "mypy", "pytest", "pytest-cov"] +typetest = ["mypy", "pyright", "typing-extensions"] + [[package]] name = "certifi" version = "2024.8.30" @@ -271,4 +287,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "20136fad059dd6f087334eea57612f368edbe485fcd2874666338000b4859d1b" +content-hash = "c14d12ccbcd8910aed151decce58a67d6f4798d9a9c6b7c086be12ca2c935587" diff --git a/pyproject.toml b/pyproject.toml index ce8b2c44..6cfadd69 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,7 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.12" httpx = "^0.27.2" +asyncstdlib = "^3.13.0" [tool.poetry.group.dev] optional = true diff --git a/tests/integration/test_misc.py b/tests/integration/test_misc.py new file mode 100644 index 00000000..13772aec --- /dev/null +++ b/tests/integration/test_misc.py @@ -0,0 +1,34 @@ +from typing import Self + +from asyncstdlib.functools import cached_property as async_cached_property +import pytest + +pytestmark = [pytest.mark.asyncio] + + +class Dummy: + def __init__(self: Self) -> None: + self.counter = 0 + + @async_cached_property + async def func(self: Self) -> int: + self.counter += 1 + + return self.counter + + +async def test_async_cached_property() -> None: + obj = Dummy() + assert "func" not in obj.__dict__, "`func` key should not be cached yet" + + res = await obj.func + assert res == 1 + assert "func" in obj.__dict__, "`func` key should be cached" + + res = await obj.func + assert res == 1, "Cached value must be used" + + delattr(obj, "func") + res = await obj.func + assert res == 2, "Expected to execute func() again, increasing the counter" + assert "func" in obj.__dict__ From 2ab48e4718ed1192ecf0fe40acc5e36947c6226d Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Wed, 27 Nov 2024 18:23:23 +0300 Subject: [PATCH 15/46] ADCM-6120: Prepare infrastructure for integration tests. (#16) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 22 +- poetry.lock | 329 ++++++++++++++++++++++++- pyproject.toml | 1 + tests/integration/conftest.py | 43 ++++ tests/integration/setup_environment.py | 91 +++++++ tests/integration/test_dummy.py | 17 +- 6 files changed, 491 insertions(+), 12 deletions(-) create mode 100644 tests/integration/conftest.py create mode 100644 tests/integration/setup_environment.py diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 3606e5a0..e9a538f9 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -14,8 +14,8 @@ from typing import Self from adcm_aio_client.core.objects.cm import ADCM, ClustersNode, HostProvidersNode, HostsAccessor -from adcm_aio_client.core.requesters import Requester -from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify +from adcm_aio_client.core.requesters import DefaultRequester +from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Requester, Verify class ADCMClient: @@ -40,11 +40,15 @@ def adcm(self: Self) -> ADCM: async def build_client( - url: str | list[str], - credentials: Credentials | AuthToken, + url: str, + credentials: Credentials | AuthToken, # noqa: ARG001 *, - verify: Verify | None = None, - cert: Cert | None = None, - timeout: int | None = None, - retries: int | None = None, -) -> ADCMClient: ... + verify: Verify | None = None, # noqa: ARG001 + cert: Cert | None = None, # noqa: ARG001 + timeout: float = 0.5, + retries: int = 5, + retry_interval: float = 5.0, +) -> ADCMClient: + requester = DefaultRequester(base_url=url, retries=retries, retry_interval=retry_interval, timeout=timeout) + await requester.login(credentials=Credentials(username="admin", password="admin")) # noqa: S106 + return ADCMClient(requester=requester) diff --git a/poetry.lock b/poetry.lock index a4a080f5..f6b76599 100644 --- a/poetry.lock +++ b/poetry.lock @@ -47,6 +47,120 @@ files = [ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "charset-normalizer" +version = "3.4.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + [[package]] name = "colorama" version = "0.4.6" @@ -58,6 +172,28 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "docker" +version = "7.1.0" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.8" +files = [ + {file = "docker-7.1.0-py3-none-any.whl", hash = "sha256:c96b93b7f0a746f9e77d325bcfb87422a3d8bd4f03136ae8a85b37f1898d5fc0"}, + {file = "docker-7.1.0.tar.gz", hash = "sha256:ad8c70e6e3f8926cb8a92619b832b4ea5299e2831c14284663184e200546fa6c"}, +] + +[package.dependencies] +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" + +[package.extras] +dev = ["coverage (==7.2.7)", "pytest (==7.4.2)", "pytest-cov (==4.1.0)", "pytest-timeout (==2.1.0)", "ruff (==0.1.8)"] +docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] +ssh = ["paramiko (>=2.4.3)"] +websockets = ["websocket-client (>=1.3.0)"] + [[package]] name = "h11" version = "0.14.0" @@ -235,6 +371,54 @@ pytest = ">=8.2,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "pywin32" +version = "308" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + [[package]] name = "ruff" version = "0.7.3" @@ -273,6 +457,58 @@ files = [ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "testcontainers" +version = "4.8.2" +description = "Python library for throwaway instances of anything that can run in a Docker container" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "testcontainers-4.8.2-py3-none-any.whl", hash = "sha256:9e19af077cd96e1957c13ee466f1f32905bc6c5bc1bc98643eb18be1a989bfb0"}, + {file = "testcontainers-4.8.2.tar.gz", hash = "sha256:dd4a6a2ea09e3c3ecd39e180b6548105929d0bb78d665ce9919cb3f8c98f9853"}, +] + +[package.dependencies] +docker = "*" +typing-extensions = "*" +urllib3 = "*" +wrapt = "*" + +[package.extras] +arangodb = ["python-arango (>=7.8,<8.0)"] +aws = ["boto3", "httpx"] +azurite = ["azure-storage-blob (>=12.19,<13.0)"] +chroma = ["chromadb-client"] +clickhouse = ["clickhouse-driver"] +cosmosdb = ["azure-cosmos"] +db2 = ["ibm_db_sa", "sqlalchemy"] +generic = ["httpx", "redis"] +google = ["google-cloud-datastore (>=2)", "google-cloud-pubsub (>=2)"] +influxdb = ["influxdb", "influxdb-client"] +k3s = ["kubernetes", "pyyaml"] +keycloak = ["python-keycloak"] +localstack = ["boto3"] +mailpit = ["cryptography"] +minio = ["minio"] +mongodb = ["pymongo"] +mssql = ["pymssql", "sqlalchemy"] +mysql = ["pymysql[rsa]", "sqlalchemy"] +nats = ["nats-py"] +neo4j = ["neo4j"] +opensearch = ["opensearch-py"] +oracle = ["oracledb", "sqlalchemy"] +oracle-free = ["oracledb", "sqlalchemy"] +qdrant = ["qdrant-client"] +rabbitmq = ["pika"] +redis = ["redis"] +registry = ["bcrypt"] +scylla = ["cassandra-driver (==3.29.1)"] +selenium = ["selenium"] +sftp = ["cryptography"] +test-module-import = ["httpx"] +trino = ["trino"] +weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -284,7 +520,98 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wrapt" +version = "1.17.0" +description = "Module for decorators, wrappers and monkey patching." +optional = false +python-versions = ">=3.8" +files = [ + {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, + {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, + {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, + {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, + {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, + {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, + {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, + {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, + {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, + {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, + {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, + {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, + {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, + {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, + {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, + {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, + {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, + {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, + {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, + {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, + {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, + {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, + {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, +] + [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "c14d12ccbcd8910aed151decce58a67d6f4798d9a9c6b7c086be12ca2c935587" +content-hash = "1ef31ee0a5363bd5fe7d3cdaf63fd956a3d84471cdbaf206b3f792b9fc173bce" diff --git a/pyproject.toml b/pyproject.toml index 6cfadd69..09ac94ed 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^8.3.3" pytest-asyncio = "^0.24.0" +testcontainers = "^4.8.2" [build-system] requires = ["poetry-core"] diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 00000000..c9e1445a --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,43 @@ +from typing import AsyncGenerator, Generator + +from testcontainers.core.network import Network +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient, build_client +from adcm_aio_client.core.types import Credentials +from tests.integration.setup_environment import ( + ADCMContainer, + ADCMPostgresContainer, + adcm_image_name, + db_name, + db_password, + db_user, + postgres_image_name, +) + + +@pytest.fixture(scope="session") +def network() -> Generator[Network, None, None]: + with Network() as network: + yield network + + +@pytest.fixture(scope="function") +def postgres(network: Network) -> Generator[ADCMPostgresContainer, None, None]: + with ADCMPostgresContainer(postgres_image_name, network) as container: + container.setup_postgres(db_user, db_password, db_name) + yield container + + +@pytest.fixture(scope="function") +def adcm(postgres: ADCMPostgresContainer) -> Generator[ADCMContainer, None, None]: + with ADCMContainer(adcm_image_name, postgres.network, postgres.adcm_env_kwargs) as container: + container.setup_container() + yield container + + +@pytest_asyncio.fixture(scope="function") +async def adcm_client(adcm: ADCMContainer) -> AsyncGenerator[ADCMClient, None]: + credentials = Credentials(username="admin", password="admin") # noqa: S106 + yield await build_client(url=adcm.url, credentials=credentials, retries=3, retry_interval=15, timeout=30) diff --git a/tests/integration/setup_environment.py b/tests/integration/setup_environment.py new file mode 100644 index 00000000..c5eebdc9 --- /dev/null +++ b/tests/integration/setup_environment.py @@ -0,0 +1,91 @@ +from typing import Self +import socket + +from docker.errors import DockerException +from testcontainers.core.container import DockerContainer +from testcontainers.core.network import Network +from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs +from testcontainers.postgres import PostgresContainer + +postgres_image_name = "postgres:latest" +adcm_image_name = "hub.adsw.io/adcm/adcm:develop" +adcm_port_range = (8000, 8010) +postgres_port_range = (5432, 5442) +adcm_container_name = "test_adcm" +postgres_name = "test_pg_db" +db_user = "adcm" +db_name = "adcm" +db_password = "password" # noqa: S105 + + +def find_free_port(start: int, end: int) -> int: + """Try to find a free port in the given range.""" + for port in range(start, end): + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + if s.connect_ex(("127.0.0.1", port)) != 0: # Port is free + return port + raise DockerContainerError(f"No free ports found in the range {start} to {end}") + + +class ADCMPostgresContainer(PostgresContainer): + def __init__(self: Self, image: str, network: Network) -> None: + super().__init__(image) + self.adcm_env_kwargs = {"STATISTICS_ENABLED": 0} + self.network = network + + def setup_postgres(self: Self, username: str, password: str, adcm_db_name: str) -> None: + postgres_port = find_free_port(postgres_port_range[0], postgres_port_range[1]) + + self.adcm_env_kwargs = self.adcm_env_kwargs | { + "DB_HOST": f"{postgres_name}_{postgres_port}", + "DB_USER": db_user, + "DB_NAME": db_name, + "DB_PASS": db_password, + "DB_PORT": str(postgres_port), + } + + self.with_name(f"{postgres_name}_{postgres_port}") + self.password = password + self.with_network(self.network) + self.with_bind_ports(postgres_port, postgres_port) + + self.start() + wait_container_is_ready(self) + + self.exec( + f"psql --username test --dbname postgres " + f"-c \"CREATE USER {username} WITH ENCRYPTED PASSWORD '{db_password}';\"" + ) + self.exec(f"psql --username test --dbname postgres " f'-c "CREATE DATABASE {adcm_db_name} OWNER {username};"') + + wait_for_logs(self, "database system is ready to accept connections") + + +class ADCMContainer(DockerContainer): + url: str + + def __init__(self: Self, image: str, network: Network, env_kwargs: dict) -> None: + super().__init__(image) + self.postgres_env_kwargs = {} + self.network = network + self.adcm_env_kwarg = env_kwargs + + def setup_container(self: Self) -> None: + adcm_port = find_free_port(adcm_port_range[0], adcm_port_range[1]) + self.with_name(f"{adcm_container_name}_{adcm_port}") + self.with_network(self.network) + self.with_bind_ports(adcm_port, adcm_port) + + for key, value in self.postgres_env_kwargs.items(): + self.with_env(key, value) + + self.start() + + wait_container_is_ready(self) + wait_for_logs(self, "Run Nginx ...") + + self.url = f"http://{self.get_container_host_ip()}:{self.get_exposed_port(adcm_port)}" + + +class DockerContainerError(DockerException): + pass diff --git a/tests/integration/test_dummy.py b/tests/integration/test_dummy.py index 95aab17f..264e56e2 100644 --- a/tests/integration/test_dummy.py +++ b/tests/integration/test_dummy.py @@ -1,2 +1,15 @@ -def test_dummy_unit() -> None: - assert 1 == 1 # noqa: S101 +import logging + +import pytest + +from adcm_aio_client.core.client import ADCMClient + +logging.basicConfig(level=logging.DEBUG) + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="the docker hub is unavailable currently") +async def test_clusters_page(adcm_client: ADCMClient) -> None: + clusters = await adcm_client.clusters.list() + + assert len(clusters) == 0 From 9036f74c27c7bc6864f4c496d9183a0697a32f5f Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 29 Nov 2024 14:01:57 +0500 Subject: [PATCH 16/46] ADCM-6125 Configuration management (#17) --- adcm_aio_client/core/config/__init__.py | 25 + adcm_aio_client/core/config/_objects.py | 469 ++++++++++++++++++ adcm_aio_client/core/config/_operations.py | 56 +++ adcm_aio_client/core/config/merge.py | 70 +++ adcm_aio_client/core/config/types.py | 277 +++++++++++ adcm_aio_client/core/errors.py | 9 + adcm_aio_client/core/objects/_base.py | 10 +- adcm_aio_client/core/objects/_common.py | 24 +- adcm_aio_client/core/objects/cm.py | 2 +- adcm_aio_client/core/types.py | 8 +- .../bundles/config_example_v1/config.yaml | 69 +++ tests/unit/conftest.py | 5 + tests/unit/files/responses/.description | 2 + .../responses/test_config_example_config.json | 37 ++ .../test_config_example_config_schema.json | 324 ++++++++++++ tests/unit/test_config.py | 166 +++++++ 16 files changed, 1537 insertions(+), 16 deletions(-) create mode 100644 adcm_aio_client/core/config/__init__.py create mode 100644 adcm_aio_client/core/config/_objects.py create mode 100644 adcm_aio_client/core/config/_operations.py create mode 100644 adcm_aio_client/core/config/merge.py create mode 100644 adcm_aio_client/core/config/types.py create mode 100644 tests/unit/bundles/config_example_v1/config.yaml create mode 100644 tests/unit/files/responses/.description create mode 100644 tests/unit/files/responses/test_config_example_config.json create mode 100644 tests/unit/files/responses/test_config_example_config_schema.json create mode 100644 tests/unit/test_config.py diff --git a/adcm_aio_client/core/config/__init__.py b/adcm_aio_client/core/config/__init__.py new file mode 100644 index 00000000..d597b330 --- /dev/null +++ b/adcm_aio_client/core/config/__init__.py @@ -0,0 +1,25 @@ +from adcm_aio_client.core.config._objects import ( + ActionConfig, + ActivatableParameterGroup, + ActivatableParameterGroupHG, + ConfigHistoryNode, + HostGroupConfig, + ObjectConfig, + Parameter, + ParameterGroup, + ParameterGroupHG, + ParameterHG, +) + +__all__ = [ + "ConfigHistoryNode", + "ObjectConfig", + "ActionConfig", + "HostGroupConfig", + "Parameter", + "ParameterHG", + "ParameterGroup", + "ParameterGroupHG", + "ActivatableParameterGroup", + "ActivatableParameterGroupHG", +] diff --git a/adcm_aio_client/core/config/_objects.py b/adcm_aio_client/core/config/_objects.py new file mode 100644 index 00000000..b5c21d9e --- /dev/null +++ b/adcm_aio_client/core/config/_objects.py @@ -0,0 +1,469 @@ +from copy import deepcopy +from functools import partial +from typing import Any, Callable, Coroutine, Protocol, Self, overload +import json +import asyncio + +from adcm_aio_client.core.config._operations import find_config_difference +from adcm_aio_client.core.config.merge import apply_local_changes +from adcm_aio_client.core.config.types import ( + AnyParameterName, + ConfigData, + ConfigDifference, + ConfigSchema, + LevelNames, + LocalConfigs, + MergeStrategy, +) +from adcm_aio_client.core.errors import ConfigComparisonError, RequesterError +from adcm_aio_client.core.types import AwareOfOwnPath, WithRequesterProperty + + +class ConfigOwner(WithRequesterProperty, AwareOfOwnPath, Protocol): ... + + +# Config Entries Wrappers + + +class _ConfigWrapper: + __slots__ = ("_name", "_schema", "_data") + + def __init__( + self: Self, + name: LevelNames, + data: ConfigData, + schema: ConfigSchema, + ) -> None: + self._name = name + self._schema = schema + self._data = data + + +class _Group(_ConfigWrapper): + __slots__ = ("_name", "_schema", "_data", "_wrappers_cache") + + def __init__(self: Self, name: LevelNames, data: ConfigData, schema: ConfigSchema) -> None: + super().__init__(name, data, schema) + self._wrappers_cache = {} + + def _find_and_wrap_config_entry[ValueW: _ConfigWrapper, GroupW: _ConfigWrapper, AGroupW: _ConfigWrapper]( + self: Self, + item: AnyParameterName | tuple[AnyParameterName, type[ValueW | GroupW | AGroupW]], + value_class: type[ValueW], + group_class: type[GroupW], + a_group_class: type[AGroupW], + ) -> ValueW | GroupW | AGroupW: + if isinstance(item, str): + name = item + else: + name, *_ = item + + level_name = self._schema.get_level_name(group=self._name, display_name=name) + if level_name is None: + level_name = name + + cached_wrapper = self._wrappers_cache.get(level_name) + if cached_wrapper: + return cached_wrapper + + parameter_full_name = (*self._name, level_name) + + class_ = value_class + if self._schema.is_group(parameter_full_name): + class_ = a_group_class if self._schema.is_activatable_group(parameter_full_name) else group_class + + wrapper = class_(name=parameter_full_name, data=self._data, schema=self._schema) + + self._wrappers_cache[level_name] = wrapper + + return wrapper + + +class Parameter[T](_ConfigWrapper): + @property + def value(self: Self) -> T: + # todo probably want to return read-only proxies for list/dict + return self._data.get_value(parameter=self._name) + + def set(self: Self, value: Any) -> Self: # noqa: ANN401 + self._data.set_value(parameter=self._name, value=value) + return self + + +class _Desyncable(_ConfigWrapper): + def sync(self: Self) -> Self: + self._data.set_attribute(parameter=self._name, attribute="isSynced", value=True) + return self + + def desync(self: Self) -> Self: + self._data.set_attribute(parameter=self._name, attribute="isSynced", value=False) + return self + + +class ParameterHG[T](_Desyncable, Parameter[T]): + def set(self: Self, value: Any) -> Self: # noqa: ANN401 + super().set(value) + self.desync() + return self + + +class ParameterGroup(_Group): + @overload + def __getitem__[ExpectedType: "ConfigEntry"]( + self: Self, item: tuple[AnyParameterName, type[ExpectedType]] + ) -> ExpectedType: ... + + @overload + def __getitem__(self: Self, item: AnyParameterName) -> "ConfigEntry": ... + + def __getitem__[ExpectedType: "ConfigEntry"]( + self: Self, item: AnyParameterName | tuple[AnyParameterName, type[ExpectedType]] + ) -> "ConfigEntry": + """ + Get config entry by given display name (or "technical" name). + + Item is either a string (name) or tuple with name on first position + and type info at second. + + NOTE: types aren't checked, they are just helpers for users' type checking setups. + """ + return self._find_and_wrap_config_entry( + item=item, value_class=Parameter, group_class=ParameterGroup, a_group_class=ActivatableParameterGroup + ) + + +class ParameterGroupHG(_Group): + @overload + def __getitem__[ExpectedType: "ConfigEntryHG"]( + self: Self, item: tuple[AnyParameterName, type[ExpectedType]] + ) -> ExpectedType: ... + + @overload + def __getitem__(self: Self, item: AnyParameterName) -> "ConfigEntryHG": ... + + def __getitem__[ExpectedType: "ConfigEntryHG"]( + self: Self, item: AnyParameterName | tuple[AnyParameterName, type[ExpectedType]] + ) -> "ConfigEntryHG": + """ + Get config entry by given display name (or "technical" name). + + Item is either a string (name) or tuple with name on first position + and type info at second. + + NOTE: types aren't checked, they are just helpers for users' type checking setups. + """ + return self._find_and_wrap_config_entry( + item=item, + value_class=ParameterHG, + group_class=ParameterGroupHG, + a_group_class=ActivatableParameterGroupHG, + ) + + +class _Activatable(_Group): + def activate(self: Self) -> Self: + self._data.set_attribute(parameter=self._name, attribute="isActive", value=True) + return self + + def deactivate(self: Self) -> Self: + self._data.set_attribute(parameter=self._name, attribute="isActive", value=False) + return self + + +class ActivatableParameterGroup(_Activatable, ParameterGroup): ... + + +class ActivatableParameterGroupHG(_Desyncable, _Activatable, ParameterGroup): + def activate(self: Self) -> Self: + super().activate() + self.desync() + return self + + def deactivate(self: Self) -> Self: + super().deactivate() + self.desync() + return self + + +class _ConfigWrapperCreator(_ConfigWrapper): + @property + def config(self: Self) -> ConfigData: + return self._data + + def change_data(self: Self, new_data: ConfigData) -> ConfigData: + self._data = new_data + return self._data + + +class ObjectConfigWrapper(ParameterGroup, _ConfigWrapperCreator): ... + + +class HostGroupConfigWrapper(ParameterGroupHG, _ConfigWrapperCreator): ... + + +type ConfigEntry = Parameter | ParameterGroup | ActivatableParameterGroup +type ConfigEntryHG = ParameterHG | ParameterGroupHG | ActivatableParameterGroupHG + +# API Objects + + +class _GeneralConfig[T: _ConfigWrapperCreator]: + __slots__ = ("_schema", "_parent", "_initial_config", "_current_config", "_wrapper_class") + + _wrapper_class: type[T] + + def __init__(self: Self, config: ConfigData, schema: ConfigSchema, parent: ConfigOwner) -> None: + self._schema = schema + self._initial_config: ConfigData = self._parse_json_fields_inplace_safe(config) + self._current_config = self._wrapper_class(data=deepcopy(self._initial_config), schema=self._schema, name=()) + self._parent = parent + + # Public Interface (for End User) + + @property + def id(self: Self) -> int: + return self._initial_config.id + + @property + def description(self: Self) -> str: + return self._initial_config.description + + def reset(self: Self) -> Self: + self._current_config.change_data(new_data=deepcopy(self._initial_config)) + return self + + def difference(self: Self, other: Self, *, other_is_previous: bool = True) -> ConfigDifference: + if self.schema != other.schema: + message = f"Schema of configuration {other.id} doesn't match schema of {self.id}" + raise ConfigComparisonError(message) + + if other_is_previous: + previous = other + current = self + else: + previous = self + current = other + + return find_config_difference(previous=previous.data, current=current.data, schema=self._schema) + + async def save(self: Self, description: str = "") -> Self: + config_to_save = self._current_config.config + self._serialize_json_fields_inplace_safe(config_to_save) + payload = {"description": description, "config": config_to_save.values, "adcmMeta": config_to_save.attributes} + + try: + response = await self._parent.requester.post(*self._parent.get_own_path(), "configs", data=payload) + except RequesterError: + # config isn't saved, no data update is in play, + # returning "pre-saved" parsed values + self._parse_json_fields_inplace_safe(config_to_save) + else: + new_config = ConfigData.from_v2_response(data_in_v2_format=response.as_dict()) + self._initial_config = self._parse_json_fields_inplace_safe(new_config) + self.reset() + + return self + + # Public For Internal Use Only + + @property + def schema(self: Self) -> ConfigSchema: + return self._schema + + @property + def data(self: Self) -> ConfigData: + return self._current_config.config + + # Private + def _parse_json_fields_inplace_safe(self: Self, config: ConfigData) -> ConfigData: + return self._apply_to_all_json_fields(func=json.loads, when=lambda value: isinstance(value, str), config=config) + + def _serialize_json_fields_inplace_safe(self: Self, config: ConfigData) -> ConfigData: + return self._apply_to_all_json_fields(func=json.dumps, when=lambda value: value is not None, config=config) + + def _apply_to_all_json_fields( + self: Self, func: Callable, when: Callable[[Any], bool], config: ConfigData + ) -> ConfigData: + for parameter_name in self._schema.json_fields: + input_value = config.get_value(parameter_name) + if when(input_value): + parsed_value = func(input_value) + config.set_value(parameter_name, parsed_value) + + return config + + async def _retrieve_current_config(self: Self) -> ConfigData: + configs_path = (*self._parent.get_own_path(), "configs") + + history_response = await self._parent.requester.get( + *configs_path, query={"ordering": "-id", "limit": 5, "offset": 0} + ) + + current_config_entry = get_current_config(results=history_response.as_dict()["results"]) + config_id = current_config_entry["id"] + + if config_id == self.id: + return self._initial_config + + config_response = await self._parent.requester.get(*configs_path, config_id) + + config_data = ConfigData.from_v2_response(data_in_v2_format=config_response.as_dict()) + + return self._parse_json_fields_inplace_safe(config_data) + + +class _RefreshableConfig[T: _ConfigWrapperCreator](_GeneralConfig[T]): + async def refresh(self: Self, strategy: MergeStrategy = apply_local_changes) -> Self: + remote_config = await retrieve_current_config( + parent=self._parent, get_schema=partial(retrieve_schema, parent=self._parent) + ) + if self.schema != remote_config.schema: + message = "Can't refresh configuration after upgrade: schema is different for local and remote configs" + raise ConfigComparisonError(message) + + local = LocalConfigs(initial=self._initial_config, changed=self._current_config.config) + merged_config = strategy(local=local, remote=remote_config.data, schema=self._schema) + + self._initial_config = remote_config.data + self._current_config.change_data(new_data=merged_config) + + return self + + +class ActionConfig(_GeneralConfig[ObjectConfigWrapper]): + _wrapper_class = ObjectConfigWrapper + + @overload + def __getitem__[ExpectedType: ConfigEntry]( + self: Self, item: tuple[AnyParameterName, type[ExpectedType]] + ) -> ExpectedType: ... + + @overload + def __getitem__(self: Self, item: AnyParameterName) -> ConfigEntry: ... + + def __getitem__[ExpectedType: ConfigEntry]( + self: Self, item: AnyParameterName | tuple[AnyParameterName, type[ExpectedType]] + ) -> ConfigEntry: + return self._current_config[item] + + +class ObjectConfig(_RefreshableConfig[ObjectConfigWrapper]): + _wrapper_class = ObjectConfigWrapper + + # todo fix typing copy-paste + @overload + def __getitem__[ExpectedType: ConfigEntry]( + self: Self, item: tuple[AnyParameterName, type[ExpectedType]] + ) -> ExpectedType: ... + + @overload + def __getitem__(self: Self, item: AnyParameterName) -> ConfigEntry: ... + + def __getitem__[ExpectedType: ConfigEntry]( + self: Self, item: AnyParameterName | tuple[AnyParameterName, type[ExpectedType]] + ) -> ConfigEntry: + return self._current_config[item] + + +class HostGroupConfig(_RefreshableConfig[HostGroupConfigWrapper]): + _wrapper_class = HostGroupConfigWrapper + + @overload + def __getitem__[ExpectedType: ConfigEntryHG]( + self: Self, item: tuple[AnyParameterName, type[ExpectedType]] + ) -> ExpectedType: ... + + @overload + def __getitem__(self: Self, item: AnyParameterName) -> ConfigEntryHG: ... + + def __getitem__[ExpectedType: ConfigEntryHG]( + self: Self, item: AnyParameterName | tuple[AnyParameterName, type[ExpectedType]] + ) -> "ConfigEntryHG": + return self._current_config[item] + + +class ConfigHistoryNode: + def __init__(self: Self, parent: ConfigOwner) -> None: + self._schema: ConfigSchema | None = None + self._parent = parent + + async def current(self: Self) -> ObjectConfig: + return await retrieve_current_config(parent=self._parent, get_schema=self._ensure_schema) + + async def __getitem__(self: Self, position: int) -> ObjectConfig: + # since we don't have date in here, we sort by id + ordering = "id" + offset = position + if offset < 0: + ordering = "-id" + # `-1` is the same as `0` in reverse order + offset = abs(offset) - 1 + + query = {"limit": 1, "offset": offset, "ordering": ordering} + + return await retrieve_config( + parent=self._parent, get_schema=self._ensure_schema, query=query, choose_suitable_config=get_first_result + ) + + async def _ensure_schema(self: Self) -> ConfigSchema: + if self._schema is not None: + return self._schema + + self._schema = await retrieve_schema(parent=self._parent) + + return self._schema + + +type GetSchemaFunc = Callable[[], Coroutine[Any, Any, ConfigSchema]] + + +async def retrieve_schema(parent: ConfigOwner) -> ConfigSchema: + response = await parent.requester.get(*parent.get_own_path(), "config-schema") + return ConfigSchema(spec_as_jsonschema=response.as_dict()) + + +async def retrieve_current_config(parent: ConfigOwner, get_schema: GetSchemaFunc) -> ObjectConfig: + # we are relying that current configuration will be + # one of last created + query = {"ordering": "-id", "limit": 10, "offset": 0} + return await retrieve_config( + parent=parent, get_schema=get_schema, query=query, choose_suitable_config=get_current_config + ) + + +async def retrieve_config( + parent: ConfigOwner, + get_schema: GetSchemaFunc, + query: dict, + choose_suitable_config: Callable[[list[dict]], dict], +) -> ObjectConfig: + schema_task = asyncio.create_task(get_schema()) + + path = (*parent.get_own_path(), "configs") + + config_records_response = await parent.requester.get(*path, query=query) + config_record = choose_suitable_config(config_records_response.as_dict()["results"]) + + config_data_response = await parent.requester.get(*path, config_record["id"]) + config_data = ConfigData.from_v2_response(data_in_v2_format=config_data_response.as_dict()) + + schema = await schema_task + + return ObjectConfig(config=config_data, schema=schema, parent=parent) + + +def get_first_result(results: list[dict]) -> dict: + try: + return results[0] + except KeyError as e: + message = "Configuration can't be found" + raise RuntimeError(message) from e + + +def get_current_config(results: list[dict]) -> dict: + for config in results: + if config["isCurrent"]: + return config + + message = "Failed to determine current configuraiton" + raise RuntimeError(message) diff --git a/adcm_aio_client/core/config/_operations.py b/adcm_aio_client/core/config/_operations.py new file mode 100644 index 00000000..d7994af4 --- /dev/null +++ b/adcm_aio_client/core/config/_operations.py @@ -0,0 +1,56 @@ +from adcm_aio_client.core.config.types import ( + ConfigDifference, + ConfigSchema, + GenericConfigData, + LevelNames, + ValueChange, + full_name_to_level_names, +) + + +# Difference +def find_config_difference( + previous: GenericConfigData, current: GenericConfigData, schema: ConfigSchema +) -> ConfigDifference: + diff = ConfigDifference(schema=schema) + + _fill_values_diff_at_level(level=(), diff=diff, previous=previous.values, current=current.values) + _fill_attributes_diff(diff=diff, previous=previous.attributes, current=current.attributes) + + return diff + + +def _fill_values_diff_at_level(level: LevelNames, diff: ConfigDifference, previous: dict, current: dict) -> None: + missing = object() + for key, cur_value in current.items(): + level_names = (*level, key) + prev_value = previous.get(key, missing) + + if prev_value is missing: + # there may be collision between two None's, but for now we'll consider it a "special case" + diff.values[level_names] = ValueChange(previous=None, current=cur_value) + continue + + if cur_value == prev_value: + continue + + if not (diff.schema.is_group(level_names) and isinstance(prev_value, dict) and (isinstance(cur_value, dict))): + diff.values[level_names] = ValueChange(previous=prev_value, current=cur_value) + continue + + _fill_values_diff_at_level(diff=diff, level=level_names, previous=prev_value, current=cur_value) + + +def _fill_attributes_diff(diff: ConfigDifference, previous: dict, current: dict) -> None: + missing = object() + for full_name, cur_value in current.items(): + prev_value = previous.get(full_name, missing) + if cur_value == prev_value: + continue + + level_names = full_name_to_level_names(full_name) + + if prev_value is missing: + prev_value = None + + diff.attributes[level_names] = ValueChange(previous=prev_value, current=cur_value) diff --git a/adcm_aio_client/core/config/merge.py b/adcm_aio_client/core/config/merge.py new file mode 100644 index 00000000..56f2ec90 --- /dev/null +++ b/adcm_aio_client/core/config/merge.py @@ -0,0 +1,70 @@ +from adcm_aio_client.core.config._operations import find_config_difference +from adcm_aio_client.core.config.types import ConfigData, ConfigSchema, LocalConfigs + + +def apply_local_changes(local: LocalConfigs, remote: ConfigData, schema: ConfigSchema) -> ConfigData: + if local.initial.id == remote.id: + return local.changed + + local_diff = find_config_difference(previous=local.initial, current=local.changed, schema=schema) + if local_diff.is_empty: + # no changed, nothing to apply + return remote + + for parameter_name, value_change in local_diff.values.items(): + remote.set_value(parameter=parameter_name, value=value_change.current) + + for parameter_name, attribute_change in local_diff.attributes.items(): + if not isinstance(attribute_change, dict): + message = f"Can't apply attribute changes of type {type(attribute_change)}, expected dict-like" + raise TypeError(message) + + for attribute_name, value in attribute_change.current.items(): + remote.set_attribute(parameter=parameter_name, attribute=attribute_name, value=value) + + return remote + + +def apply_remote_changes(local: LocalConfigs, remote: ConfigData, schema: ConfigSchema) -> ConfigData: + if local.initial.id == remote.id: + return remote + + local_diff = find_config_difference(previous=local.initial, current=local.changed, schema=schema) + if local_diff.is_empty: + return remote + + remote_diff = find_config_difference(previous=local.initial, current=remote, schema=schema) + + locally_changed = set(local_diff.values.keys()) + changed_in_both = locally_changed.intersection(remote_diff.values.keys()) + changed_locally_only = locally_changed.difference(remote_diff.values.keys()) + + for parameter_name in changed_in_both: + remote.set_value(parameter=parameter_name, value=remote_diff.values[parameter_name].current) + + for parameter_name in changed_locally_only: + remote.set_value(parameter=parameter_name, value=local_diff.values[parameter_name].current) + + locally_changed = set(local_diff.attributes.keys()) + changed_in_both = locally_changed.intersection(remote_diff.attributes.keys()) + changed_locally_only = locally_changed.difference(remote_diff.attributes.keys()) + + for parameter_name in changed_in_both: + attribute_change = remote_diff.attributes[parameter_name] + if not isinstance(attribute_change, dict): + message = f"Can't apply attribute changes of type {type(attribute_change)}, expected dict-like" + raise TypeError(message) + + for attribute_name, value in attribute_change.current.items(): + remote.set_attribute(parameter=parameter_name, attribute=attribute_name, value=value) + + for parameter_name in changed_locally_only: + attribute_change = local_diff.attributes[parameter_name] + if not isinstance(attribute_change, dict): + message = f"Can't apply attribute changes of type {type(attribute_change)}, expected dict-like" + raise TypeError(message) + + for attribute_name, value in attribute_change.current.items(): + remote.set_attribute(parameter=parameter_name, attribute=attribute_name, value=value) + + return remote diff --git a/adcm_aio_client/core/config/types.py b/adcm_aio_client/core/config/types.py new file mode 100644 index 00000000..bea50b77 --- /dev/null +++ b/adcm_aio_client/core/config/types.py @@ -0,0 +1,277 @@ +from abc import ABC +from collections import defaultdict +from dataclasses import dataclass, field +from functools import reduce +from typing import Any, Callable, Iterable, NamedTuple, Protocol, Self + +# External Section +# these functions are heavily inspired by configuration rework in ADCM (ADCM-6034) + + +type ParameterName = str +type ParameterDisplayName = str +type AnyParameterName = ParameterName | ParameterDisplayName + +type LevelNames = tuple[ParameterName, ...] +type ParameterFullName = str +""" +Name inclusing all level names joined with (and prefixed by) `/` +""" + +ROOT_PREFIX = "/" + + +def set_nested_config_value[T](config: dict[str, Any], level_names: LevelNames, value: T) -> T: + group, level_name = get_group_with_value(config=config, level_names=level_names) + group[level_name] = value + return value + + +def change_nested_config_value[T](config: dict[str, Any], level_names: LevelNames, func: Callable[[Any], T]) -> T: + group, level_name = get_group_with_value(config=config, level_names=level_names) + group[level_name] = func(group[level_name]) + return group[level_name] + + +def get_nested_config_value(config: dict[str, Any], level_names: LevelNames) -> Any: # noqa: ANN401 + group, level_name = get_group_with_value(config=config, level_names=level_names) + return group[level_name] + + +def get_group_with_value(config: dict[str, Any], level_names: LevelNames) -> tuple[dict[str, Any], ParameterName]: + return _get_group_with_value(config=config, level_names=level_names) + + +def _get_group_with_value( + config: dict[str, Any], level_names: Iterable[ParameterName] +) -> tuple[dict[str, Any], ParameterName]: + level_name, *rest = level_names + if not rest: + return config, level_name + + return _get_group_with_value(config=config[level_name], level_names=rest) + + +def level_names_to_full_name(levels: LevelNames) -> str: + return ensure_full_name("/".join(levels)) + + +def full_name_to_level_names(full: ParameterFullName) -> tuple[ParameterName, ...]: + return tuple(filter(bool, full.split("/"))) + + +def ensure_full_name(name: str) -> str: + if not name.startswith(ROOT_PREFIX): + return f"{ROOT_PREFIX}{name}" + + return name + + +# External Section End + + +class GenericConfigData(ABC): # noqa: B024 + __slots__ = ("_values", "_attributes") + + def __init__(self: Self, values: dict, attributes: dict) -> None: + self._values = values + self._attributes = attributes + + @property + def values(self: Self) -> dict: + return self._values + + @property + def attributes(self: Self) -> dict: + return self._attributes + + def get_value(self: Self, parameter: LevelNames) -> Any: # noqa: ANN401 + return get_nested_config_value(config=self._values, level_names=parameter) + + def set_value[T](self: Self, parameter: LevelNames, value: T) -> T: + return set_nested_config_value(config=self._values, level_names=parameter, value=value) + + def get_attribute(self: Self, parameter: LevelNames, attribute: str) -> bool: + full_name = level_names_to_full_name(parameter) + return self._attributes[full_name][attribute] + + def set_attribute(self: Self, parameter: LevelNames, attribute: str, value: bool) -> bool: # noqa: FBT001 + full_name = level_names_to_full_name(parameter) + self._attributes[full_name][attribute] = value + return value + + +class ActionConfigData(GenericConfigData): + __slots__ = GenericConfigData.__slots__ + + +class ConfigData(GenericConfigData): + __slots__ = ("id", "description", "_values", "_attributes") + + def __init__(self: Self, id: int, description: str, values: dict, attributes: dict) -> None: # noqa: A002 + self.id = id + self.description = description + super().__init__(values=values, attributes=attributes) + + @classmethod + def from_v2_response(cls: type[Self], data_in_v2_format: dict) -> Self: + return cls( + id=int(data_in_v2_format["id"]), + description=str(data_in_v2_format["description"]), + values=data_in_v2_format["config"], + attributes=data_in_v2_format["adcmMeta"], + ) + + +@dataclass(slots=True) +class ValueChange: + previous: Any + current: Any + + +def recursive_defaultdict() -> defaultdict: + return defaultdict(recursive_defaultdict) + + +@dataclass(slots=True) +class ConfigDifference: + schema: "ConfigSchema" + values: dict[LevelNames, ValueChange] = field(default_factory=dict) + attributes: dict[LevelNames, ValueChange] = field(default_factory=dict) + + @property + def is_empty(self: Self) -> bool: + return bool(self.values or self.attributes) + + def __str__(self: Self) -> str: + values_nested = self._to_nested_dict(self.values) + attributes_nested = self._to_nested_dict(self.attributes) + + if not (values_nested or attributes_nested): + return "No Changes" + + values_repr = f"Changed Values:\n{values_nested}" if values_nested else "" + attributes_repr = f"Changed Attributes:\n{attributes_nested}" if attributes_nested else "" + + return "\n\n".join((values_repr, attributes_repr)) + + def _to_nested_dict(self: Self, changes: dict[LevelNames, ValueChange]) -> dict: + result = recursive_defaultdict() + + for names, change in changes.items(): + changes_tuple = (change.previous, change.current) + + if len(names) == 1: + result[names[0]] = changes_tuple + continue + + *groups, name = names + group_node = reduce(dict.__getitem__, groups, result) + group_node[name] = changes_tuple + + return result + + +class ConfigSchema: + def __init__(self: Self, spec_as_jsonschema: dict) -> None: + self._raw = spec_as_jsonschema + + self._jsons: set[LevelNames] = set() + self._groups: set[LevelNames] = set() + self._activatable_groups: set[LevelNames] = set() + self._display_name_map: dict[tuple[LevelNames, ParameterDisplayName], ParameterName] = {} + + self._analyze_schema() + + def __eq__(self: Self, value: object) -> bool: + if not isinstance(value, ConfigSchema): + return NotImplemented + + this_name_type_mapping = self._retrieve_name_type_mapping() + other_name_type_mapping = value._retrieve_name_type_mapping() + + return this_name_type_mapping == other_name_type_mapping + + @property + def json_fields(self: Self) -> set[LevelNames]: + return self._jsons + + def is_group(self: Self, parameter_name: LevelNames) -> bool: + return parameter_name in self._groups + + def is_activatable_group(self: Self, parameter_name: LevelNames) -> bool: + return parameter_name in self._activatable_groups + + def get_level_name(self: Self, group: LevelNames, display_name: ParameterDisplayName) -> ParameterName | None: + key = (group, display_name) + return self._display_name_map.get(key) + + def _analyze_schema(self: Self) -> None: + for level_names, param_spec in self._iterate_parameters(object_schema=self._raw): + if is_group_v2(param_spec): + self._groups.add(level_names) + + if is_activatable_v2(param_spec): + self._activatable_groups.add(level_names) + + elif is_json_v2(param_spec): + self._jsons.add(level_names) + + *group, own_level_name = level_names + display_name = param_spec["title"] + self._display_name_map[tuple(group), display_name] = own_level_name + + def _retrieve_name_type_mapping(self: Self) -> dict[LevelNames, str]: + return { + level_names: param_spec.get("type", "enum") + for level_names, param_spec in self._iterate_parameters(object_schema=self._raw) + } + + def _iterate_parameters(self: Self, object_schema: dict) -> Iterable[tuple[LevelNames, dict]]: + for level_name, optional_attrs in object_schema["properties"].items(): + attributes = self._unwrap_optional(optional_attrs) + + yield (level_name,), attributes + + if is_group_v2(attributes): + for inner_level, inner_optional_attrs in self._iterate_parameters(attributes): + inner_attributes = self._unwrap_optional(inner_optional_attrs) + yield (level_name, *inner_level), inner_attributes + + def _unwrap_optional(self: Self, attributes: dict) -> dict: + if "oneOf" not in attributes: + return attributes + + # bald search, a lot may fail, + # but for more precise work with spec if require incapsulation in a separate handler class + return next(entry for entry in attributes["oneOf"] if entry.get("type") != "null") + + +def is_group_v2(attributes: dict) -> bool: + # todo need to check group-like structures, because they are almost impossible to distinct from groups + return ( + attributes.get("type") == "object" + and attributes.get("additionalProperties") is False + and attributes.get("default") == {} + ) + + +def is_activatable_v2(attributes: dict) -> bool: + return (attributes["adcmMeta"].get("activation") or {}).get("isAllowChange", False) + + +def is_json_v2(attributes: dict) -> bool: + return attributes.get("format") == "json" + + +class LocalConfigs(NamedTuple): + initial: ConfigData + changed: ConfigData + + +class MergeStrategy(Protocol): + def __call__(self: Self, local: LocalConfigs, remote: ConfigData, schema: ConfigSchema) -> ConfigData: + """ + `remote` may be changed according to strategy, so it shouldn't be "read-only"/"initial" + """ + ... diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index 775e6fb5..33bf5935 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -87,3 +87,12 @@ class ObjectDoesNotExistError(AccessorError): class OperationError(AccessorError): pass + + +# Config + + +class ConfigError(ADCMClientError): ... + + +class ConfigComparisonError(ConfigError): ... diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index c8e53d42..aa84731b 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -6,10 +6,10 @@ from asyncstdlib.functools import CachedProperty from adcm_aio_client.core.requesters import Requester -from adcm_aio_client.core.types import AwareOfOwnPath, Endpoint, WithRequester +from adcm_aio_client.core.types import AwareOfOwnPath, Endpoint, WithProtectedRequester, WithRequesterProperty -class InteractiveObject(WithRequester, AwareOfOwnPath): +class InteractiveObject(WithProtectedRequester, WithRequesterProperty, AwareOfOwnPath): _delete_on_refresh: deque[str] def __init_subclass__(cls: type[Self]) -> None: @@ -28,6 +28,10 @@ def __init__(self: Self, requester: Requester, data: dict[str, Any]) -> None: self._data = data @property + def requester(self: Self) -> Requester: + return self._requester + + @cached_property def id(self: Self) -> int: # it's the default behavior, without id many things can't be done return int(self._data["id"]) @@ -35,7 +39,6 @@ def id(self: Self) -> int: async def refresh(self: Self) -> Self: response = await self._requester.get(*self.get_own_path()) self._data = response.as_dict() - # todo drop caches self._clear_cache() return self @@ -62,7 +65,6 @@ def get_own_path(self: Self) -> Endpoint: # change here return self._build_own_path(self.id) - # let's add this one @classmethod async def with_id(cls: type[Self], requester: Requester, object_id: int) -> Self: object_path = cls._build_own_path(object_id) diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index f9a9ba37..739eaf0d 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -1,45 +1,49 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects._base import AwareOfOwnPath, WithRequester +from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig +from adcm_aio_client.core.config._objects import ConfigOwner +from adcm_aio_client.core.objects._base import AwareOfOwnPath, WithProtectedRequester from adcm_aio_client.core.types import ADCMEntityStatus -class Deletable(WithRequester, AwareOfOwnPath): +class Deletable(WithProtectedRequester, AwareOfOwnPath): async def delete(self: Self) -> None: await self._requester.delete(*self.get_own_path()) -class WithStatus(WithRequester, AwareOfOwnPath): +class WithStatus(WithProtectedRequester, AwareOfOwnPath): async def get_status(self: Self) -> ADCMEntityStatus: response = await self._requester.get(*self.get_own_path()) return ADCMEntityStatus(response.as_dict()["status"]) # todo whole section lacking implementation (and maybe code move is required) -class WithConfig(WithRequester, AwareOfOwnPath): +class WithConfig(ConfigOwner): @cached_property - def config(self: Self) -> ...: ... + async def config(self: Self) -> ObjectConfig: + return await self.config_history.current() @cached_property - def config_history(self: Self) -> ...: ... + def config_history(self: Self) -> ConfigHistoryNode: + return ConfigHistoryNode(parent=self) -class WithActions(WithRequester, AwareOfOwnPath): +class WithActions(WithProtectedRequester, AwareOfOwnPath): @cached_property def actions(self: Self) -> ...: ... -class WithUpgrades(WithRequester, AwareOfOwnPath): +class WithUpgrades(WithProtectedRequester, AwareOfOwnPath): @cached_property def upgrades(self: Self) -> ...: ... -class WithConfigGroups(WithRequester, AwareOfOwnPath): +class WithConfigGroups(WithProtectedRequester, AwareOfOwnPath): @cached_property def config_groups(self: Self) -> ...: ... -class WithActionHostGroups(WithRequester, AwareOfOwnPath): +class WithActionHostGroups(WithProtectedRequester, AwareOfOwnPath): @cached_property def action_host_groups(self: Self) -> ...: ... diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 6a88139a..656835a3 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -27,7 +27,7 @@ class ADCM(InteractiveObject, WithActions, WithConfig): - @property + @cached_property def id(self: Self) -> int: return 1 diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 0762c9b8..5b8376fe 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -62,10 +62,16 @@ async def delete(self: Self, *path: PathPart) -> RequesterResponse: ... # Objects -class WithRequester(Protocol): +class WithProtectedRequester(Protocol): _requester: Requester +class WithRequesterProperty(Protocol): + # ignored linter check, because with `: Self` type checking breaks, so it's fastfix + @property + def requester(self) -> Requester: ... # noqa: ANN101 + + class AwareOfOwnPath(Protocol): def get_own_path(self: Self) -> Endpoint: ... diff --git a/tests/unit/bundles/config_example_v1/config.yaml b/tests/unit/bundles/config_example_v1/config.yaml new file mode 100644 index 00000000..04edb7da --- /dev/null +++ b/tests/unit/bundles/config_example_v1/config.yaml @@ -0,0 +1,69 @@ +- type: cluster + name: Cluster With Config Example + version: 1 + description: | + This bundle is designed to provide sample of config, + not nessesary including all config types or combinations. + Don't change configs of existing objects in it, + add new service / component if you need. +- type: service + name: with_json_fields_and_groups + version: 1.0 + + config: + - name: root_int + display_name: Integer At Root + type: integer + default: 100 + - name: root_list + display_name: List At Root + type: list + default: ["first", "second", "third"] + - name: root_dict + display_name: Map At Root + type: map + default: {"k1": "v1", "k2": "v2"} + required: false + - name: duplicate + display_name: Duplicate + type: string + default: "hehe" + - name: root_json + display_name: JSON At Root + type: json + default: {} + - name: main + display_name: Main Section + type: group + subs: + - name: inner_str + display_name: String In Group + type: string + default: "evil" + - name: inner_dict + display_name: Map In Group + type: map + default: {"a": "b"} + - name: inner_json + display_name: JSON In Group + type: json + default: {"complex": [], "jsonfield": 23, "server": "bestever"} + - name: duplicate + display_name: Duplicate + type: integer + default: 44 + - name: optional_group + display_name: Optional Section + type: group + activatable: true + active: false + subs: + - name: param + display_name: Param In Activatable Group + type: float + default: 44.44 + required: false + - name: root_str + display_name: String At Root + type: string + required: false diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 6de824ce..3df732e6 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,7 +1,12 @@ +from pathlib import Path + import pytest from tests.unit.mocks.requesters import QueueRequester +FILES = Path(__file__).parent / "files" +RESPONSES = FILES / "responses" + @pytest.fixture() def queue_requester() -> QueueRequester: diff --git a/tests/unit/files/responses/.description b/tests/unit/files/responses/.description new file mode 100644 index 00000000..837b946b --- /dev/null +++ b/tests/unit/files/responses/.description @@ -0,0 +1,2 @@ +This directory have samples of responses from ADCM +to use them as mock responses in unit tests. diff --git a/tests/unit/files/responses/test_config_example_config.json b/tests/unit/files/responses/test_config_example_config.json new file mode 100644 index 00000000..9a50a74a --- /dev/null +++ b/tests/unit/files/responses/test_config_example_config.json @@ -0,0 +1,37 @@ +{ + "id": 3, + "isCurrent": true, + "creationTime": "2024-11-21T06:38:58.517310Z", + "config": { + "main": { + "duplicate": 44, + "inner_str": "evil", + "inner_dict": { + "a": "b" + }, + "inner_json": "{\"server\": \"bestever\", \"complex\": [], \"jsonfield\": 23}" + }, + "root_int": 100, + "root_str": null, + "duplicate": "hehe", + "root_dict": { + "k1": "v1", + "k2": "v2" + }, + "root_json": "{}", + "root_list": [ + "first", + "second", + "third" + ], + "optional_group": { + "param": 44.44 + } + }, + "adcmMeta": { + "/optional_group": { + "isActive": false + } + }, + "description": "init" +} diff --git a/tests/unit/files/responses/test_config_example_config_schema.json b/tests/unit/files/responses/test_config_example_config_schema.json new file mode 100644 index 00000000..21981799 --- /dev/null +++ b/tests/unit/files/responses/test_config_example_config_schema.json @@ -0,0 +1,324 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "title": "Configuration", + "description": "", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "nullValue": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "type": "object", + "properties": { + "root_int": { + "title": "Integer At Root", + "type": "integer", + "description": "", + "default": 100, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + } + }, + "root_list": { + "title": "List At Root", + "type": "array", + "description": "", + "default": [ + "first", + "second", + "third" + ], + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "items": { + "type": "string", + "title": "", + "description": "", + "default": null, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "nullValue": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + } + }, + "minItems": 1 + }, + "root_dict": { + "oneOf": [ + { + "title": "Map At Root", + "type": "object", + "description": "", + "default": { + "k1": "v1", + "k2": "v2" + }, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "additionalProperties": true, + "properties": {} + }, + { + "type": "null" + } + ] + }, + "duplicate": { + "title": "Duplicate", + "type": "string", + "description": "", + "default": "hehe", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": { + "isMultiline": false + }, + "enumExtra": null + }, + "minLength": 1 + }, + "root_json": { + "title": "JSON At Root", + "type": "string", + "description": "", + "default": "{}", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": { + "isMultiline": true + }, + "enumExtra": null + }, + "format": "json", + "minLength": 1 + }, + "main": { + "title": "Main Section", + "type": "object", + "description": "", + "default": {}, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "additionalProperties": false, + "properties": { + "inner_str": { + "title": "String In Group", + "type": "string", + "description": "", + "default": "evil", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": { + "isMultiline": false + }, + "enumExtra": null + }, + "minLength": 1 + }, + "inner_dict": { + "title": "Map In Group", + "type": "object", + "description": "", + "default": { + "a": "b" + }, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "additionalProperties": true, + "properties": {}, + "minProperties": 1 + }, + "inner_json": { + "title": "JSON In Group", + "type": "string", + "description": "", + "default": "{\"complex\": [], \"jsonfield\": 23, \"server\": \"bestever\"}", + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": { + "isMultiline": true + }, + "enumExtra": null + }, + "format": "json", + "minLength": 1 + }, + "duplicate": { + "title": "Duplicate", + "type": "integer", + "description": "", + "default": 44, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + } + } + }, + "required": [ + "inner_str", + "inner_dict", + "inner_json", + "duplicate" + ] + }, + "optional_group": { + "title": "Optional Section", + "type": "object", + "description": "", + "default": {}, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": { + "isAllowChange": true + }, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + }, + "additionalProperties": false, + "properties": { + "param": { + "oneOf": [ + { + "title": "Param In Activatable Group", + "type": "number", + "description": "", + "default": 44.44, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": null, + "enumExtra": null + } + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "param" + ] + }, + "root_str": { + "oneOf": [ + { + "title": "String At Root", + "type": "string", + "description": "", + "default": null, + "readOnly": false, + "adcmMeta": { + "isAdvanced": false, + "isInvisible": false, + "activation": null, + "synchronization": null, + "isSecret": false, + "stringExtra": { + "isMultiline": false + }, + "enumExtra": null + } + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "root_int", + "root_list", + "root_dict", + "duplicate", + "root_json", + "main", + "optional_group", + "root_str" + ] +} diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py new file mode 100644 index 00000000..d2ffc9ec --- /dev/null +++ b/tests/unit/test_config.py @@ -0,0 +1,166 @@ +from copy import deepcopy +from typing import Self +import json + +import pytest + +from adcm_aio_client.core.config._objects import ( + ActivatableParameterGroup, + ConfigOwner, + ObjectConfig, + Parameter, + ParameterGroup, +) +from adcm_aio_client.core.config.types import ConfigData, ConfigSchema +from adcm_aio_client.core.objects._base import InteractiveObject +from adcm_aio_client.core.types import Endpoint, Requester +from tests.unit.conftest import RESPONSES + + +class DummyParent(InteractiveObject): + def get_own_path(self: Self) -> Endpoint: + return ("dummy",) + + +@pytest.fixture() +def example_config() -> tuple[dict, dict]: + config = json.loads((RESPONSES / "test_config_example_config.json").read_text()) + + schema = json.loads((RESPONSES / "test_config_example_config_schema.json").read_text()) + + return config, schema + + +@pytest.fixture() +def dummy_parent(queue_requester: Requester) -> ConfigOwner: + return DummyParent(data={"id": 4}, requester=queue_requester) + + +@pytest.fixture() +def object_config(example_config: tuple[dict, dict], dummy_parent: ConfigOwner) -> ObjectConfig: + config_data, schema_data = example_config + + data = ConfigData.from_v2_response(data_in_v2_format=deepcopy(config_data)) + schema = ConfigSchema(spec_as_jsonschema=schema_data) + + return ObjectConfig(config=data, schema=schema, parent=dummy_parent) + + +def test_edit_config(example_config: tuple[dict, dict], object_config: ObjectConfig) -> None: + data, _ = example_config + + initial_parsed_data = deepcopy(data) + initial_parsed_data["config"]["root_json"] = json.loads(initial_parsed_data["config"]["root_json"]) + initial_parsed_data["config"]["main"]["inner_json"] = json.loads( + initial_parsed_data["config"]["main"]["inner_json"] + ) + + new_inner_json = { + "complex": [], + "jsonfield": 23, + "link": "do i look like a link to you?", + "arguments": ["-q", "something"], + } + new_root_json = ["now", "I am", "cool"] + + new_config = { + "root_int": 430, + "root_list": ["first", "second", "third", "best thing there is"], + "root_dict": None, + "duplicate": "hehe", + "root_json": new_root_json, + "main": { + "inner_str": "not the worst at least", + "inner_dict": {"a": "b", "additional": "keys", "are": "welcome"}, + "inner_json": new_inner_json, + "duplicate": 44, + }, + "optional_group": {"param": 44.44}, + "root_str": "newstring", + } + + # todo: + # - check no POST requests are performed + + config = object_config + + assert config.data.values == initial_parsed_data["config"] + assert config.data.attributes == initial_parsed_data["adcmMeta"] + + # Edit "root" values + + config["root_int", Parameter].set(new_config["root_int"]) + + # inner type won't be checked (list), + # but here we pretend "to be 100% sure" it's `list`, not `None` + config["root_list", Parameter].set([*config["root_list", Parameter[list]].value, new_config["root_list"][-1]]) + + root_dict = config["root_dict"] + assert isinstance(root_dict, Parameter) + assert isinstance(root_dict.value, dict) + root_dict.set(None) + assert root_dict.value is None + assert config["root_dict", Parameter].value is None + + # Edit group ("nested") values + + assert isinstance(config["main"], ParameterGroup) + # if we don't want type checker to bother us, we can yolo like that + config["main"]["inner_str"].set(new_config["main"]["inner_str"]) # type: ignore + + main_group = config["main"] + assert isinstance(main_group, ParameterGroup) + main_group["inner_dict", Parameter].set( + {**main_group["inner_dict", Parameter[dict]].value, "additional": "keys", "are": "welcome"} + ) + + activatable_group = config["optional_group"] + assert isinstance(activatable_group, ActivatableParameterGroup) + activatable_group.activate() + + # Edit JSON field + + # change value separately and set + json_field = main_group["inner_json"] + assert isinstance(json_field, Parameter) + assert isinstance(json_field.value, dict) + new_value = deepcopy(json_field.value) + new_value.pop("server") + new_value |= {"link": "do i look like a link to you?", "arguments": ["-q", "something"]} + json_field.set(new_value) + + # swap value type with direct set + assert isinstance(config["root_json"].value, dict) # type: ignore + config["root_json"].set(["now", "I am", "cool"]) # type: ignore + + # Type change specifics + + param = config["root_str"] + assert isinstance(param, Parameter) + assert param.value is None + + param.set("newstring") + assert isinstance(config["root_str"].value, str) # type: ignore + + # Check all values are changed + + config_for_save = config.data + assert config_for_save.values == new_config + assert config_for_save.attributes == {"/optional_group": {"isActive": True}} + + +def test_display_name_search(object_config: ObjectConfig) -> None: + # only display name search + assert object_config["Map At Root", Parameter].value == {"k1": "v1", "k2": "v2"} + assert object_config["Main Section", ParameterGroup]["String In Group", Parameter].value == "evil" + + # name and display name search mixed + assert object_config["root_int"] is object_config["Integer At Root"] + + value_1 = object_config["optional_group"]["Param In Activatable Group"] # type: ignore + value_2 = object_config["Optional Section"]["param"] # type: ignore + assert value_1 is value_2 + + # duplication at different levels + assert object_config["Duplicate", Parameter].value == "hehe" + assert object_config["Main Section", ParameterGroup]["Duplicate", Parameter].value == 44 From 3699a52e3d05f45e441462cdd83270dd9ac8cdab Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Fri, 29 Nov 2024 12:46:12 +0300 Subject: [PATCH 17/46] Revert integration tests to running (#27) --- .github/CODEOWNERS | 2 +- .github/workflows/on_push_to_pull_request.yaml | 7 ------- tests/integration/test_dummy.py | 1 - 3 files changed, 1 insertion(+), 9 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 26d4916e..f169c6ba 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ -*.* @a-alferov +*.* @a-alferov @Sealwing *.py @Sealwing @DanBalalan @Starovoitov diff --git a/.github/workflows/on_push_to_pull_request.yaml b/.github/workflows/on_push_to_pull_request.yaml index 3099e751..f80ed36a 100644 --- a/.github/workflows/on_push_to_pull_request.yaml +++ b/.github/workflows/on_push_to_pull_request.yaml @@ -14,10 +14,3 @@ jobs: with: target: tests/unit description: Unit - - integration_tests: - name: Run integration tests - uses: ./.github/workflows/step_test_from_dir.yaml - with: - target: tests/integration - description: Integration diff --git a/tests/integration/test_dummy.py b/tests/integration/test_dummy.py index 264e56e2..24ad75af 100644 --- a/tests/integration/test_dummy.py +++ b/tests/integration/test_dummy.py @@ -8,7 +8,6 @@ @pytest.mark.asyncio -@pytest.mark.skip(reason="the docker hub is unavailable currently") async def test_clusters_page(adcm_client: ADCMClient) -> None: clusters = await adcm_client.clusters.list() From 4d02c261761d64503a68c365d7c1f988d765118e Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 29 Nov 2024 12:55:32 +0300 Subject: [PATCH 18/46] ADCM-6138: Implement Bundle object (#20) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 10 ++++-- adcm_aio_client/core/objects/cm.py | 50 ++++++++++++++++++++++++++---- 2 files changed, 51 insertions(+), 9 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index e9a538f9..b26254b0 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,9 +13,9 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ADCM, ClustersNode, HostProvidersNode, HostsAccessor -from adcm_aio_client.core.requesters import DefaultRequester -from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Requester, Verify +from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsAccessor +from adcm_aio_client.core.requesters import DefaultRequester, Requester +from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify class ADCMClient: @@ -38,6 +38,10 @@ def hostproviders(self: Self) -> HostProvidersNode: def adcm(self: Self) -> ADCM: return ADCM(requester=self._requester, data={}) + @cached_property + def bundles(self: Self) -> BundlesNode: + return BundlesNode(path=("bundles",), requester=self._requester) + async def build_client( url: str, diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 656835a3..337c88b1 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,8 +1,8 @@ from functools import cached_property -from typing import Iterable, Self +from typing import Iterable, Literal, Self import asyncio -from asyncstdlib.functools import cached_property as async_cached_property +from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 from adcm_aio_client.core.errors import NotFoundError, OperationError, ResponseError from adcm_aio_client.core.objects._accessors import ( @@ -41,7 +41,48 @@ def get_own_path(self: Self) -> Endpoint: return ("adcm",) -class Bundle(Deletable, InteractiveObject): ... +class License(InteractiveObject): ... + + +class Bundle(Deletable, RootInteractiveObject): + PATH_PREFIX = "bundles" + + @property + def name(self: Self) -> str: + return str(self._data["name"]) + + @property + def display_name(self: Self) -> str: + return str(self._data["display_name"]) + + @property + def version(self: Self) -> str: + return str(self._data["version"]) + + @property + def edition(self: Self) -> Literal["community", "enterprise"]: + return self._data["edition"] + + @property + def signature_status(self: Self) -> Literal["invalid", "valid", "absent"]: + return self._data["signatureStatus"] + + @property + def _type(self: Self) -> Literal["cluster", "provider"]: + return self._data["mainPrototype"]["type"] + + def license(self: Self) -> License: + return self._construct(what=License, from_data=self._data["mainPrototype"]["license"]) + + def get_own_path(self: Self) -> Endpoint: + return self.PATH_PREFIX, self.id + + +class BundlesNode(PaginatedAccessor[Bundle, None]): + class_type = Bundle + + def get_own_path(self: Self) -> Endpoint: + return ("bundles",) class Cluster( @@ -112,9 +153,6 @@ def get_own_path(self: Self) -> Endpoint: class ClustersNode(PaginatedAccessor[Cluster, None]): class_type = Cluster - def get_own_path(self: Self) -> Endpoint: - return ("clusters",) - class Service( WithStatus, From 50acda0d2754cf42f3554bff8895874809c6aa91 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Fri, 29 Nov 2024 16:08:20 +0300 Subject: [PATCH 19/46] ADCM-6157: Implement missing methods for HostProviderNode (#22) Co-authored-by: astarovo Co-authored-by: Artem Starovoitov --- adcm_aio_client/core/client.py | 4 ++-- adcm_aio_client/core/objects/cm.py | 11 +++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index b26254b0..e993fbdf 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -14,8 +14,8 @@ from typing import Self from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsAccessor -from adcm_aio_client.core.requesters import DefaultRequester, Requester -from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify +from adcm_aio_client.core.requesters import DefaultRequester +from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Requester, Verify class ADCMClient: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 337c88b1..691e3b08 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -77,6 +77,10 @@ def license(self: Self) -> License: def get_own_path(self: Self) -> Endpoint: return self.PATH_PREFIX, self.id + @cached_property + def _main_prototype_id(self: Self) -> int: + return self._data["mainPrototype"]["id"] + class BundlesNode(PaginatedAccessor[Bundle, None]): class_type = Bundle @@ -264,6 +268,13 @@ def get_own_path(self: Self) -> Endpoint: class HostProvidersNode(PaginatedAccessor[HostProvider, None]): class_type = HostProvider + async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> HostProvider: + response = await self._requester.post( + "hostproviders", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} + ) + + return HostProvider(requester=self._requester, data=response.as_dict()) + class Host(Deletable, RootInteractiveObject): PATH_PREFIX = "hosts" From f68dbe4b9d360c3fcd4f65f2e6cfddc5b244c2c3 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Mon, 2 Dec 2024 10:51:52 +0300 Subject: [PATCH 20/46] ADCM-6134: Implement node for getting actions (#21) --- adcm_aio_client/core/actions/__init__.py | 3 ++ adcm_aio_client/core/actions/actions.py | 59 ++++++++++++++++++++++++ adcm_aio_client/core/objects/_base.py | 6 ++- adcm_aio_client/core/objects/_common.py | 15 +++--- adcm_aio_client/core/objects/cm.py | 19 +------- adcm_aio_client/core/types.py | 5 ++ 6 files changed, 82 insertions(+), 25 deletions(-) create mode 100644 adcm_aio_client/core/actions/__init__.py create mode 100644 adcm_aio_client/core/actions/actions.py diff --git a/adcm_aio_client/core/actions/__init__.py b/adcm_aio_client/core/actions/__init__.py new file mode 100644 index 00000000..3ea14f1d --- /dev/null +++ b/adcm_aio_client/core/actions/__init__.py @@ -0,0 +1,3 @@ +from adcm_aio_client.core.actions.actions import ActionsAccessor + +__all__ = ["ActionsAccessor"] diff --git a/adcm_aio_client/core/actions/actions.py b/adcm_aio_client/core/actions/actions.py new file mode 100644 index 00000000..b64737e4 --- /dev/null +++ b/adcm_aio_client/core/actions/actions.py @@ -0,0 +1,59 @@ +from functools import cached_property +from typing import Any, Self + +from asyncstdlib import cached_property as async_cached_property + +from adcm_aio_client.core.objects._accessors import NonPaginatedChildAccessor +from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject +from adcm_aio_client.core.types import Requester + + +class Action(InteractiveChildObject): + PATH_PREFIX = "actions" + + def __init__(self: Self, parent: InteractiveObject, requester: Requester, data: dict[str, Any]) -> None: + super().__init__(parent, requester, data) + self._verbose = False + + @cached_property + def name(self: Self) -> str: + return self._data["name"] + + @cached_property + def display_name(self: Self) -> str: + return self._data["displayName"] + + async def run(self: Self) -> dict: # TODO: implement Task, return Task + return (await self._requester.post(*self.get_own_path(), "run", data={"isVerbose": self._verbose})).as_dict() + + @async_cached_property + async def _mapping_rule(self: Self) -> list[dict]: + return (await self._rich_data)["hostComponentMapRules"] + + @async_cached_property + async def mapping(self: Self) -> "ActionMapping": + return ActionMapping() + + def set_verbose(self: Self) -> Self: + self._verbose = True + return self + + def validate(self: Self) -> None: ... # TODO: implement + + @async_cached_property # TODO: Config class + async def config(self: Self) -> ...: + return (await self._rich_data)["configuration"] + + @async_cached_property + async def _rich_data(self: Self) -> dict: + return (await self._requester.get(*self.get_own_path())).as_dict() + + +class ActionsAccessor(NonPaginatedChildAccessor): + class_type = Action + + +class ActionMapping: + def add(self: Self) -> ...: ... + + def remove(self: Self) -> ...: ... diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index aa84731b..e6fd910f 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -10,6 +10,7 @@ class InteractiveObject(WithProtectedRequester, WithRequesterProperty, AwareOfOwnPath): + PATH_PREFIX: str _delete_on_refresh: deque[str] def __init_subclass__(cls: type[Self]) -> None: @@ -76,7 +77,10 @@ def _build_own_path(cls: type[Self], object_id: int) -> Endpoint: return cls.PATH_PREFIX, object_id -class InteractiveChildObject[Parent](InteractiveObject): +class InteractiveChildObject[Parent: InteractiveObject](InteractiveObject): def __init__(self: Self, parent: Parent, requester: Requester, data: dict[str, Any]) -> None: super().__init__(requester=requester, data=data) self._parent = parent + + def get_own_path(self: Self) -> Endpoint: + return *self._parent.get_own_path(), self.PATH_PREFIX, self.id diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 739eaf0d..85e71e4c 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -1,10 +1,10 @@ from functools import cached_property from typing import Self +from adcm_aio_client.core.actions import ActionsAccessor from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner -from adcm_aio_client.core.objects._base import AwareOfOwnPath, WithProtectedRequester -from adcm_aio_client.core.types import ADCMEntityStatus +from adcm_aio_client.core.types import ADCMEntityStatus, AwareOfOwnPath, WithProtectedRequester class Deletable(WithProtectedRequester, AwareOfOwnPath): @@ -18,6 +18,12 @@ async def get_status(self: Self) -> ADCMEntityStatus: return ADCMEntityStatus(response.as_dict()["status"]) +class WithActions(WithProtectedRequester, AwareOfOwnPath): + @cached_property + def actions(self: Self) -> ActionsAccessor: + return ActionsAccessor(parent=self, path=(*self.get_own_path(), "actions"), requester=self._requester) + + # todo whole section lacking implementation (and maybe code move is required) class WithConfig(ConfigOwner): @cached_property @@ -29,11 +35,6 @@ def config_history(self: Self) -> ConfigHistoryNode: return ConfigHistoryNode(parent=self) -class WithActions(WithProtectedRequester, AwareOfOwnPath): - @cached_property - def actions(self: Self) -> ...: ... - - class WithUpgrades(WithProtectedRequester, AwareOfOwnPath): @cached_property def upgrades(self: Self) -> ...: ... diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 691e3b08..d120d00b 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -150,9 +150,6 @@ def hosts(self: Self) -> "HostsInClusterNode": def imports(self: Self) -> ClusterImports: return ClusterImports() - def get_own_path(self: Self) -> Endpoint: - return self.PATH_PREFIX, self.id - class ClustersNode(PaginatedAccessor[Cluster, None]): class_type = Cluster @@ -181,9 +178,6 @@ def display_name(self: Self) -> str: def cluster(self: Self) -> Cluster: return self._parent - def get_own_path(self: Self) -> Endpoint: - return *self._parent.get_own_path(), self.PATH_PREFIX, self.id - @cached_property def components(self: Self) -> "ComponentsNode": return ComponentsNode(parent=self, path=(*self.get_own_path(), "components"), requester=self._requester) @@ -231,9 +225,6 @@ def hosts(self: Self) -> "HostsAccessor": accessor_filter={"componentId": self.id}, ) - def get_own_path(self: Self) -> Endpoint: - return *self._parent.get_own_path(), self.PATH_PREFIX, self.id - class ComponentsNode(PaginatedChildAccessor[Service, Component, None]): class_type = Component @@ -261,9 +252,6 @@ def hosts(self: Self) -> "HostsAccessor": path=("hosts",), requester=self._requester, accessor_filter={"hostproviderName": self.name} ) - def get_own_path(self: Self) -> Endpoint: - return self.PATH_PREFIX, self.id - class HostProvidersNode(PaginatedAccessor[HostProvider, None]): class_type = HostProvider @@ -276,7 +264,7 @@ async def create(self: Self, bundle: Bundle, name: str, description: str = "") - return HostProvider(requester=self._requester, data=response.as_dict()) -class Host(Deletable, RootInteractiveObject): +class Host(Deletable, WithActions, RootInteractiveObject): PATH_PREFIX = "hosts" @property @@ -301,14 +289,11 @@ async def cluster(self: Self) -> Cluster | None: async def hostprovider(self: Self) -> HostProvider: return await HostProvider.with_id(requester=self._requester, object_id=self._data["hostprovider"]["id"]) - def get_own_path(self: Self) -> Endpoint: - return self.PATH_PREFIX, self.id - def __str__(self: Self) -> str: return f"<{self.__class__.__name__} #{self.id} {self.name}>" -class HostsAccessor(PaginatedAccessor[Host, dict | None]): +class HostsAccessor(PaginatedAccessor[Host, None]): class_type = Host diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 5b8376fe..2cd89226 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -79,3 +79,8 @@ def get_own_path(self: Self) -> Endpoint: ... class ADCMEntityStatus(str, Enum): UP = "up" DOWN = "down" + + +class MappingOperation(str, Enum): + ADD = "add" + REMOVE = "remove" From 31b11f81d3e2eb79038d58820e5c3bd86865f8f2 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Tue, 3 Dec 2024 10:36:45 +0300 Subject: [PATCH 21/46] ADCM-6156: Implement missing methods for BundleNode (#26) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 11 ++++++----- adcm_aio_client/core/objects/cm.py | 30 ++++++++++++++++++++++++++++-- adcm_aio_client/core/requesters.py | 24 ++++++++++++++++++++++-- adcm_aio_client/core/types.py | 4 ++++ 4 files changed, 60 insertions(+), 9 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index e993fbdf..98ad5c66 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -14,13 +14,14 @@ from typing import Self from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsAccessor -from adcm_aio_client.core.requesters import DefaultRequester -from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Requester, Verify +from adcm_aio_client.core.requesters import BundleRetriever, BundleRetrieverInterface, DefaultRequester, Requester +from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify class ADCMClient: - def __init__(self: Self, requester: Requester) -> None: + def __init__(self: Self, requester: Requester, bundle_retriever: BundleRetrieverInterface) -> None: self._requester = requester + self.bundle_retriever = bundle_retriever @cached_property def clusters(self: Self) -> ClustersNode: @@ -40,7 +41,7 @@ def adcm(self: Self) -> ADCM: @cached_property def bundles(self: Self) -> BundlesNode: - return BundlesNode(path=("bundles",), requester=self._requester) + return BundlesNode(path=("bundles",), requester=self._requester, retriever=self.bundle_retriever) async def build_client( @@ -55,4 +56,4 @@ async def build_client( ) -> ADCMClient: requester = DefaultRequester(base_url=url, retries=retries, retry_interval=retry_interval, timeout=timeout) await requester.login(credentials=Credentials(username="admin", password="admin")) # noqa: S106 - return ADCMClient(requester=requester) + return ADCMClient(requester=requester, bundle_retriever=BundleRetriever()) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index d120d00b..f6b6126e 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,4 +1,5 @@ from functools import cached_property +from pathlib import Path from typing import Iterable, Literal, Self import asyncio @@ -21,7 +22,8 @@ ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.objects._mapping import ClusterMapping -from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint +from adcm_aio_client.core.requesters import BundleRetrieverInterface +from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint, Requester, UrlPath type Filter = object # TODO: implement @@ -41,7 +43,10 @@ def get_own_path(self: Self) -> Endpoint: return ("adcm",) -class License(InteractiveObject): ... +class License(InteractiveObject): + state: str + + def accept(self: Self) -> None: ... class Bundle(Deletable, RootInteractiveObject): @@ -71,6 +76,7 @@ def signature_status(self: Self) -> Literal["invalid", "valid", "absent"]: def _type(self: Self) -> Literal["cluster", "provider"]: return self._data["mainPrototype"]["type"] + @property def license(self: Self) -> License: return self._construct(what=License, from_data=self._data["mainPrototype"]["license"]) @@ -85,6 +91,26 @@ def _main_prototype_id(self: Self) -> int: class BundlesNode(PaginatedAccessor[Bundle, None]): class_type = Bundle + def __init__(self: Self, path: Endpoint, requester: Requester, retriever: BundleRetrieverInterface) -> None: + super().__init__(path, requester) + self.retriever = retriever + + async def create(self: Self, source: Path | UrlPath, accept_license: bool = False) -> Bundle: # noqa: FBT001, FBT002 + if isinstance(source, UrlPath): + file_content = await self.retriever.download_external_bundle(source) + files = {"file": file_content} + else: + files = {"file": Path(source).read_bytes()} + + response = await self._requester.post("bundles", data=files) + + bundle = Bundle(requester=self._requester, data=response.as_dict()) + + if accept_license and bundle.license.state == "unaccepted": + bundle.license.accept() + + return bundle + def get_own_path(self: Self) -> Endpoint: return ("bundles",) diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 80247a29..d693945e 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -9,7 +9,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +from abc import ABC, abstractmethod from asyncio import sleep from contextlib import suppress from dataclasses import dataclass @@ -27,6 +27,7 @@ LoginError, NoCredentialsError, NotFoundError, + OperationError, ResponseDataConversionError, ResponseError, RetryRequestError, @@ -34,7 +35,7 @@ UnauthorizedError, WrongCredentialsError, ) -from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester +from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, UrlPath Json: TypeAlias = Any Params = ParamSpec("Params") @@ -183,3 +184,22 @@ def _ensure_credentials(self: Self) -> Credentials: raise NoCredentialsError return self._credentials + + +class BundleRetrieverInterface(ABC): + @abstractmethod + async def download_external_bundle(self: Self, url: UrlPath) -> bytes: + pass + + +class BundleRetriever(BundleRetrieverInterface): + async def download_external_bundle(self: Self, url: UrlPath) -> bytes: + try: + async with httpx.AsyncClient() as client: + response = await client.get(url) + response.raise_for_status() + return response.content + except ValueError as err: + raise OperationError(f"Failed to download the bundle {url}") from err + except httpx.HTTPStatusError as err: + raise OperationError(f"HTTP error occurred: {err}") from err diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 2cd89226..57fec10a 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -84,3 +84,7 @@ class ADCMEntityStatus(str, Enum): class MappingOperation(str, Enum): ADD = "add" REMOVE = "remove" + + +class UrlPath(str): + pass From a5e612a9fe173b72512a0a00578b260dd8e3257e Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Tue, 3 Dec 2024 13:07:02 +0300 Subject: [PATCH 22/46] ADCM-6154: Implement missing methods for ClusterNode (#25) Co-authored-by: astarovo Co-authored-by: Artem Starovoitov --- adcm_aio_client/core/objects/cm.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index f6b6126e..beabe9f1 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -80,9 +80,6 @@ def _type(self: Self) -> Literal["cluster", "provider"]: def license(self: Self) -> License: return self._construct(what=License, from_data=self._data["mainPrototype"]["license"]) - def get_own_path(self: Self) -> Endpoint: - return self.PATH_PREFIX, self.id - @cached_property def _main_prototype_id(self: Self) -> int: return self._data["mainPrototype"]["id"] @@ -180,6 +177,13 @@ def imports(self: Self) -> ClusterImports: class ClustersNode(PaginatedAccessor[Cluster, None]): class_type = Cluster + async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> Cluster: + response = await self._requester.post( + "clusters", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} + ) + + return Cluster(requester=self._requester, data=response.as_dict()) + class Service( WithStatus, From 9e6838ddd663a36048e36a09ef0bc2cac2391814 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Tue, 3 Dec 2024 13:07:54 +0300 Subject: [PATCH 23/46] ADCM-6139: Implement License object (#19) Co-authored-by: astarovo --- adcm_aio_client/core/objects/cm.py | 26 ++++++++++++++++++++------ 1 file changed, 20 insertions(+), 6 deletions(-) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index beabe9f1..a1c6f441 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -23,7 +23,7 @@ from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.objects._mapping import ClusterMapping from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint, Requester, UrlPath +from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint, Requester, UrlPath, WithProtectedRequester type Filter = object # TODO: implement @@ -43,10 +43,24 @@ def get_own_path(self: Self) -> Endpoint: return ("adcm",) -class License(InteractiveObject): - state: str +class License(WithProtectedRequester): + def __init__(self: Self, requester: Requester, prototypes_data: dict) -> None: + self._license_prototype_id = prototypes_data["id"] + self._data = prototypes_data["license"] + self._requester = requester - def accept(self: Self) -> None: ... + @property + def text(self: Self) -> str: + return str(self._data["text"]) + + @property + def state(self: Self) -> Literal["absent", "accepted", "unaccepted"]: + return self._data["status"] + + async def accept(self: Self) -> str: + await self._requester.post("prototypes", self._license_prototype_id, "license", "accept", data={}) + self._data["status"] = "accepted" + return self._data["status"] class Bundle(Deletable, RootInteractiveObject): @@ -78,7 +92,7 @@ def _type(self: Self) -> Literal["cluster", "provider"]: @property def license(self: Self) -> License: - return self._construct(what=License, from_data=self._data["mainPrototype"]["license"]) + return License(self._requester, self._data["mainPrototype"]) @cached_property def _main_prototype_id(self: Self) -> int: @@ -104,7 +118,7 @@ async def create(self: Self, source: Path | UrlPath, accept_license: bool = Fals bundle = Bundle(requester=self._requester, data=response.as_dict()) if accept_license and bundle.license.state == "unaccepted": - bundle.license.accept() + await bundle.license.accept() return bundle From de419f6bd0778e740d1a972a7299a6d2534776c3 Mon Sep 17 00:00:00 2001 From: Aleksandr Alferov Date: Wed, 4 Dec 2024 11:07:07 +0300 Subject: [PATCH 24/46] Fix CODEOWNERS file (#33) --- .github/CODEOWNERS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f169c6ba..d3847246 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ -*.* @a-alferov @Sealwing -*.py @Sealwing @DanBalalan @Starovoitov +* @a-alferov @Sealwing +*.py @a-alferov @Sealwing @DanBalalan @Starovoitov From fee702f0651a24732f35952c1773401ff199e531 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 4 Dec 2024 13:11:30 +0500 Subject: [PATCH 25/46] ADCM-6126 Mapping management (#29) --- adcm_aio_client/core/actions/__init__.py | 2 +- adcm_aio_client/core/actions/_objects.py | 87 +++++++++ adcm_aio_client/core/actions/actions.py | 59 ------ adcm_aio_client/core/config/_objects.py | 6 +- .../core/config/{merge.py => refresh.py} | 2 +- adcm_aio_client/core/config/types.py | 2 +- adcm_aio_client/core/errors.py | 9 + adcm_aio_client/core/mapping/__init__.py | 3 + adcm_aio_client/core/mapping/_objects.py | 181 ++++++++++++++++++ adcm_aio_client/core/mapping/refresh.py | 37 ++++ adcm_aio_client/core/mapping/types.py | 28 +++ adcm_aio_client/core/objects/_accessors.py | 24 +-- adcm_aio_client/core/objects/_base.py | 6 +- adcm_aio_client/core/objects/_mapping.py | 3 - adcm_aio_client/core/objects/cm.py | 13 +- adcm_aio_client/core/types.py | 7 + 16 files changed, 380 insertions(+), 89 deletions(-) create mode 100644 adcm_aio_client/core/actions/_objects.py delete mode 100644 adcm_aio_client/core/actions/actions.py rename adcm_aio_client/core/config/{merge.py => refresh.py} (99%) create mode 100644 adcm_aio_client/core/mapping/__init__.py create mode 100644 adcm_aio_client/core/mapping/_objects.py create mode 100644 adcm_aio_client/core/mapping/refresh.py create mode 100644 adcm_aio_client/core/mapping/types.py delete mode 100644 adcm_aio_client/core/objects/_mapping.py diff --git a/adcm_aio_client/core/actions/__init__.py b/adcm_aio_client/core/actions/__init__.py index 3ea14f1d..25395066 100644 --- a/adcm_aio_client/core/actions/__init__.py +++ b/adcm_aio_client/core/actions/__init__.py @@ -1,3 +1,3 @@ -from adcm_aio_client.core.actions.actions import ActionsAccessor +from adcm_aio_client.core.actions._objects import ActionsAccessor __all__ = ["ActionsAccessor"] diff --git a/adcm_aio_client/core/actions/_objects.py b/adcm_aio_client/core/actions/_objects.py new file mode 100644 index 00000000..f864013d --- /dev/null +++ b/adcm_aio_client/core/actions/_objects.py @@ -0,0 +1,87 @@ +from __future__ import annotations + +from functools import cached_property +from typing import TYPE_CHECKING, Any, Self + +from asyncstdlib import cached_property as async_cached_property + +from adcm_aio_client.core.errors import HostNotInClusterError, NoMappingRulesForActionError +from adcm_aio_client.core.mapping import ActionMapping +from adcm_aio_client.core.objects._accessors import NonPaginatedChildAccessor +from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject + +if TYPE_CHECKING: + from adcm_aio_client.core.objects.cm import Cluster + + +class Action(InteractiveChildObject): + PATH_PREFIX = "actions" + + def __init__(self: Self, parent: InteractiveObject, data: dict[str, Any]) -> None: + super().__init__(parent, data) + self._verbose = False + + @cached_property + def name(self: Self) -> str: + return self._data["name"] + + @cached_property + def display_name(self: Self) -> str: + return self._data["displayName"] + + async def run(self: Self) -> dict: # TODO: implement Task, return Task + return (await self._requester.post(*self.get_own_path(), "run", data={"isVerbose": self._verbose})).as_dict() + + @async_cached_property + async def _mapping_rule(self: Self) -> list[dict] | None: + return (await self._rich_data)["hostComponentMapRules"] + + @async_cached_property + async def mapping(self: Self) -> ActionMapping: + mapping_change_allowed = await self._mapping_rule + if not mapping_change_allowed: + message = f"Action {self.display_name} doesn't allow mapping changes" + raise NoMappingRulesForActionError(message) + + cluster = await detect_cluster(owner=self._parent) + mapping = await cluster.mapping + entries = mapping.all() + + return ActionMapping(owner=self._parent, cluster=cluster, entries=entries) + + def set_verbose(self: Self) -> Self: + self._verbose = True + return self + + @async_cached_property # TODO: Config class + async def config(self: Self) -> ...: + return (await self._rich_data)["configuration"] + + @async_cached_property + async def _rich_data(self: Self) -> dict: + return (await self._requester.get(*self.get_own_path())).as_dict() + + +class ActionsAccessor(NonPaginatedChildAccessor): + class_type = Action + + +async def detect_cluster(owner: InteractiveObject) -> Cluster: + from adcm_aio_client.core.objects.cm import Cluster, Component, Host, Service + + if isinstance(owner, Cluster): + return owner + + if isinstance(owner, (Service, Component)): + return owner.cluster + + if isinstance(owner, Host): + cluster = await owner.cluster + if cluster is None: + message = f"Host {owner.name} isn't bound to cluster " "or it's not refreshed" + raise HostNotInClusterError(message) + + return cluster + + message = f"No cluster in hierarchy for {owner}" + raise RuntimeError(message) diff --git a/adcm_aio_client/core/actions/actions.py b/adcm_aio_client/core/actions/actions.py deleted file mode 100644 index b64737e4..00000000 --- a/adcm_aio_client/core/actions/actions.py +++ /dev/null @@ -1,59 +0,0 @@ -from functools import cached_property -from typing import Any, Self - -from asyncstdlib import cached_property as async_cached_property - -from adcm_aio_client.core.objects._accessors import NonPaginatedChildAccessor -from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject -from adcm_aio_client.core.types import Requester - - -class Action(InteractiveChildObject): - PATH_PREFIX = "actions" - - def __init__(self: Self, parent: InteractiveObject, requester: Requester, data: dict[str, Any]) -> None: - super().__init__(parent, requester, data) - self._verbose = False - - @cached_property - def name(self: Self) -> str: - return self._data["name"] - - @cached_property - def display_name(self: Self) -> str: - return self._data["displayName"] - - async def run(self: Self) -> dict: # TODO: implement Task, return Task - return (await self._requester.post(*self.get_own_path(), "run", data={"isVerbose": self._verbose})).as_dict() - - @async_cached_property - async def _mapping_rule(self: Self) -> list[dict]: - return (await self._rich_data)["hostComponentMapRules"] - - @async_cached_property - async def mapping(self: Self) -> "ActionMapping": - return ActionMapping() - - def set_verbose(self: Self) -> Self: - self._verbose = True - return self - - def validate(self: Self) -> None: ... # TODO: implement - - @async_cached_property # TODO: Config class - async def config(self: Self) -> ...: - return (await self._rich_data)["configuration"] - - @async_cached_property - async def _rich_data(self: Self) -> dict: - return (await self._requester.get(*self.get_own_path())).as_dict() - - -class ActionsAccessor(NonPaginatedChildAccessor): - class_type = Action - - -class ActionMapping: - def add(self: Self) -> ...: ... - - def remove(self: Self) -> ...: ... diff --git a/adcm_aio_client/core/config/_objects.py b/adcm_aio_client/core/config/_objects.py index b5c21d9e..d959a465 100644 --- a/adcm_aio_client/core/config/_objects.py +++ b/adcm_aio_client/core/config/_objects.py @@ -5,15 +5,15 @@ import asyncio from adcm_aio_client.core.config._operations import find_config_difference -from adcm_aio_client.core.config.merge import apply_local_changes +from adcm_aio_client.core.config.refresh import apply_local_changes from adcm_aio_client.core.config.types import ( AnyParameterName, ConfigData, ConfigDifference, + ConfigRefreshStrategy, ConfigSchema, LevelNames, LocalConfigs, - MergeStrategy, ) from adcm_aio_client.core.errors import ConfigComparisonError, RequesterError from adcm_aio_client.core.types import AwareOfOwnPath, WithRequesterProperty @@ -313,7 +313,7 @@ async def _retrieve_current_config(self: Self) -> ConfigData: class _RefreshableConfig[T: _ConfigWrapperCreator](_GeneralConfig[T]): - async def refresh(self: Self, strategy: MergeStrategy = apply_local_changes) -> Self: + async def refresh(self: Self, strategy: ConfigRefreshStrategy = apply_local_changes) -> Self: remote_config = await retrieve_current_config( parent=self._parent, get_schema=partial(retrieve_schema, parent=self._parent) ) diff --git a/adcm_aio_client/core/config/merge.py b/adcm_aio_client/core/config/refresh.py similarity index 99% rename from adcm_aio_client/core/config/merge.py rename to adcm_aio_client/core/config/refresh.py index 56f2ec90..c67062b9 100644 --- a/adcm_aio_client/core/config/merge.py +++ b/adcm_aio_client/core/config/refresh.py @@ -27,7 +27,7 @@ def apply_local_changes(local: LocalConfigs, remote: ConfigData, schema: ConfigS def apply_remote_changes(local: LocalConfigs, remote: ConfigData, schema: ConfigSchema) -> ConfigData: if local.initial.id == remote.id: - return remote + return local.changed local_diff = find_config_difference(previous=local.initial, current=local.changed, schema=schema) if local_diff.is_empty: diff --git a/adcm_aio_client/core/config/types.py b/adcm_aio_client/core/config/types.py index bea50b77..8dd817fa 100644 --- a/adcm_aio_client/core/config/types.py +++ b/adcm_aio_client/core/config/types.py @@ -269,7 +269,7 @@ class LocalConfigs(NamedTuple): changed: ConfigData -class MergeStrategy(Protocol): +class ConfigRefreshStrategy(Protocol): def __call__(self: Self, local: LocalConfigs, remote: ConfigData, schema: ConfigSchema) -> ConfigData: """ `remote` may be changed according to strategy, so it shouldn't be "read-only"/"initial" diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index 33bf5935..d71207e9 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -89,6 +89,9 @@ class OperationError(AccessorError): pass +class HostNotInClusterError(ADCMClientError): ... + + # Config @@ -96,3 +99,9 @@ class ConfigError(ADCMClientError): ... class ConfigComparisonError(ConfigError): ... + + +# Mapping + + +class NoMappingRulesForActionError(ADCMClientError): ... diff --git a/adcm_aio_client/core/mapping/__init__.py b/adcm_aio_client/core/mapping/__init__.py new file mode 100644 index 00000000..8f707b69 --- /dev/null +++ b/adcm_aio_client/core/mapping/__init__.py @@ -0,0 +1,3 @@ +from adcm_aio_client.core.mapping._objects import ActionMapping, ClusterMapping + +__all__ = ["ActionMapping", "ClusterMapping"] diff --git a/adcm_aio_client/core/mapping/_objects.py b/adcm_aio_client/core/mapping/_objects.py new file mode 100644 index 00000000..2d6ac6b3 --- /dev/null +++ b/adcm_aio_client/core/mapping/_objects.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +from collections.abc import Generator +from copy import copy +from functools import cached_property +from typing import TYPE_CHECKING, Any, Iterable, Self +import asyncio + +from adcm_aio_client.core.mapping.refresh import apply_local_changes, apply_remote_changes +from adcm_aio_client.core.mapping.types import LocalMappings, MappingEntry, MappingPair, MappingRefreshStrategy +from adcm_aio_client.core.objects._accessors import NonPaginatedAccessor +from adcm_aio_client.core.types import ComponentID, HostID, Requester + +if TYPE_CHECKING: + from adcm_aio_client.core.objects.cm import Cluster, Component, Host, HostsAccessor, Service + + +class ComponentsMappingNode(NonPaginatedAccessor["Component", None]): + def __new__(cls: type[Self], cluster: Cluster, requester: Requester) -> Self: + _ = cluster, requester + + if not hasattr(cls, "class_type"): + from adcm_aio_client.core.objects.cm import Component + + cls.class_type = Component + + return super().__new__(cls) + + def __init__(self: Self, cluster: Cluster, requester: Requester) -> None: + path = (*cluster.get_own_path(), "mapping", "components") + super().__init__(path=path, requester=requester, accessor_filter=None) + self._cluster = cluster + + def _create_object(self: Self, data: dict[str, Any]) -> Component: + from adcm_aio_client.core.objects.cm import Service + + # service data here should be enough, + # when not, we should use lazy objects + # or request services (means it should be async) + caches + service = Service(parent=self._cluster, data=data["service"]) + return self.class_type(parent=service, data=data) + + +class ActionMapping: + def __init__( + self: Self, owner: Cluster | Service | Component | Host, cluster: Cluster, entries: Iterable[MappingPair] + ) -> None: + self._owner = owner + self._cluster = cluster + self._requester = self._owner.requester + + self._components: dict[ComponentID, Component] = {} + self._hosts: dict[HostID, Host] = {} + + self._initial: set[MappingEntry] = set() + + for component, host in entries: + self._components[component.id] = component + self._hosts[host.id] = host + self._initial.add(MappingEntry(host_id=host.id, component_id=component.id)) + + self._current: set[MappingEntry] = copy(self._initial) + + def empty(self: Self) -> Self: + self._current.clear() + return self + + def all(self: Self) -> list[MappingPair]: + return list(self.iter()) + + def iter(self: Self) -> Generator[MappingPair, None, None]: + for entry in self._current: + yield (self._components[entry.component_id], self._hosts[entry.host_id]) + + async def add(self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host]) -> Self: + components, hosts = self._ensure_collections(component=component, host=host) + to_add = self._to_entries(components=components, hosts=hosts) + + self._current |= to_add + + return self + + async def remove(self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host]) -> Self: + components, hosts = self._ensure_collections(component=component, host=host) + to_remove = self._to_entries(components=components, hosts=hosts) + + self._current -= to_remove + + return self + + @cached_property + def components(self: Self) -> ComponentsMappingNode: + return ComponentsMappingNode(cluster=self._cluster, requester=self._owner.requester) + + @cached_property + def hosts(self: Self) -> HostsAccessor: + from adcm_aio_client.core.objects.cm import HostsAccessor + + cluster_path = self._cluster.get_own_path() + + return HostsAccessor(path=cluster_path, requester=self._owner.requester) + + def _ensure_collections( + self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] + ) -> tuple[Iterable[Component], Iterable[Host]]: + if isinstance(component, Component): + component = (component,) + + if isinstance(host, Host): + host = (host,) + + return component, host + + def _to_entries(self: Self, components: Iterable[Component], hosts: Iterable[Host]) -> set[MappingEntry]: + return {MappingEntry(host_id=host.id, component_id=component.id) for host in hosts for component in components} + + def _to_payload(self: Self) -> list[dict]: + return [{"componentId": entry.component_id, "hostId": entry.host_id} for entry in self._current] + + +class ClusterMapping(ActionMapping): + def __init__(self: Self, owner: Cluster, entries: Iterable[MappingPair]) -> None: + super().__init__(owner=owner, cluster=owner, entries=entries) + + @classmethod + async def for_cluster(cls: type[Self], owner: Cluster) -> Self: + instance = cls(owner=owner, entries=()) + await instance.refresh(strategy=apply_remote_changes) + return instance + + async def save(self: Self) -> Self: + data = self._to_payload() + + await self._requester.post(*self._cluster.get_own_path(), "mapping", data=data) + + self._initial = copy(self._current) + + return self + + async def refresh(self: Self, strategy: MappingRefreshStrategy = apply_local_changes) -> Self: + response = await self._requester.get(*self._cluster.get_own_path(), "mapping") + remote = {MappingEntry(**entry) for entry in response.as_list()} + + local = LocalMappings(initial=self._initial, current=self._current) + merged_mapping = strategy(local=local, remote=remote) + + self._initial = merged_mapping + self._current = copy(merged_mapping) + + await self._fill_missing_objects() + + return self + + async def _fill_missing_objects(self: Self) -> None: + missing_hosts = set() + missing_components = set() + + for entry in self._current | self._initial: + if entry.host_id not in self._hosts: + missing_hosts.add(entry.host_id) + + if entry.component_id not in self._components: + missing_components.add(entry.component_id) + + hosts_task = None + if missing_hosts: + hosts_task = asyncio.create_task( + self.hosts.list(query={"id__in": missing_hosts, "limit": len(missing_hosts)}) + ) + + components_task = None + if missing_components: + components_task = asyncio.create_task( + self.components.list(query={"id__in": missing_components, "limit": len(missing_components)}) + ) + + if hosts_task is not None: + self._hosts |= {host.id: host for host in await hosts_task} + + if components_task is not None: + self._components |= {component.id: component for component in await components_task} diff --git a/adcm_aio_client/core/mapping/refresh.py b/adcm_aio_client/core/mapping/refresh.py new file mode 100644 index 00000000..5048871d --- /dev/null +++ b/adcm_aio_client/core/mapping/refresh.py @@ -0,0 +1,37 @@ +from adcm_aio_client.core.mapping.types import LocalMappings, MappingData + +type Added = MappingData +type Removed = MappingData + + +def apply_local_changes(local: LocalMappings, remote: MappingData) -> MappingData: + if local.initial == remote: + return local.current + + local_added, local_removed = _find_difference(previous=local.initial, current=local.current) + + remote |= local_added + remote -= local_removed + + return remote + + +def apply_remote_changes(local: LocalMappings, remote: MappingData) -> MappingData: + local_added, local_removed = _find_difference(previous=local.initial, current=local.current) + + remote_added, remote_removed = _find_difference(previous=local.initial, current=remote) + + to_add = local_added - remote_removed + to_remove = local_removed - remote_added + + remote |= to_add + remote -= to_remove + + return remote + + +def _find_difference(previous: MappingData, current: MappingData) -> tuple[Added, Removed]: + added = current - previous + removed = previous - current + + return added, removed diff --git a/adcm_aio_client/core/mapping/types.py b/adcm_aio_client/core/mapping/types.py new file mode 100644 index 00000000..3e7bc67b --- /dev/null +++ b/adcm_aio_client/core/mapping/types.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, NamedTuple, Protocol + +from adcm_aio_client.core.types import ComponentID, HostID + +if TYPE_CHECKING: + from adcm_aio_client.core.objects.cm import Component, Host + + +type MappingPair = tuple[Component, Host] + + +class MappingEntry(NamedTuple): + host_id: HostID + component_id: ComponentID + + +type MappingData = set[MappingEntry] + + +class LocalMappings(NamedTuple): + initial: MappingData + current: MappingData + + +class MappingRefreshStrategy(Protocol): + def __call__(self, local: LocalMappings, remote: MappingData) -> MappingData: ... # noqa: ANN101 diff --git a/adcm_aio_client/core/objects/_accessors.py b/adcm_aio_client/core/objects/_accessors.py index 3f93cda0..53c95d3a 100644 --- a/adcm_aio_client/core/objects/_accessors.py +++ b/adcm_aio_client/core/objects/_accessors.py @@ -60,8 +60,8 @@ async def all(self: Self) -> list[ReturnObject]: async def filter(self: Self) -> list[ReturnObject]: return [i async for i in self.iter()] - async def list(self: Self) -> list[ReturnObject]: - response = await self._request_endpoint(query={}) + async def list(self: Self, query: dict | None = None) -> list[ReturnObject]: + response = await self._request_endpoint(query=query or {}) results = self._extract_results_from_response(response) return [self._create_object(obj) for obj in results] @@ -99,16 +99,10 @@ def __init__( self._parent = parent def _create_object(self: Self, data: dict[str, Any]) -> Child: - return self.class_type(parent=self._parent, requester=self._requester, data=data) - + return self.class_type(parent=self._parent, data=data) -class NonPaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](Accessor[Child, Filter]): - def __init__( - self: Self, parent: Parent, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None - ) -> None: - super().__init__(path, requester, accessor_filter) - self._parent = parent +class NonPaginatedAccessor[Child: InteractiveObject, Filter](Accessor[Child, Filter]): async def iter(self: Self) -> AsyncGenerator[Child, None]: response = await self._request_endpoint(query={}) results = self._extract_results_from_response(response=response) @@ -118,5 +112,13 @@ async def iter(self: Self) -> AsyncGenerator[Child, None]: def _extract_results_from_response(self: Self, response: RequesterResponse) -> list[dict]: return response.as_list() + +class NonPaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](NonPaginatedAccessor[Child, Filter]): + def __init__( + self: Self, parent: Parent, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None + ) -> None: + super().__init__(path, requester, accessor_filter) + self._parent = parent + def _create_object(self: Self, data: dict[str, Any]) -> Child: - return self.class_type(parent=self._parent, requester=self._requester, data=data) + return self.class_type(parent=self._parent, data=data) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index e6fd910f..b9e748c4 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -50,7 +50,7 @@ def _construct[Object: "InteractiveObject"](self: Self, what: type[Object], from def _construct_child[Child: "InteractiveChildObject"]( self: Self, what: type[Child], from_data: dict[str, Any] ) -> Child: - return what(requester=self._requester, data=from_data, parent=self) + return what(data=from_data, parent=self) def _clear_cache(self: Self) -> None: for name in self._delete_on_refresh: @@ -78,8 +78,8 @@ def _build_own_path(cls: type[Self], object_id: int) -> Endpoint: class InteractiveChildObject[Parent: InteractiveObject](InteractiveObject): - def __init__(self: Self, parent: Parent, requester: Requester, data: dict[str, Any]) -> None: - super().__init__(requester=requester, data=data) + def __init__(self: Self, parent: Parent, data: dict[str, Any]) -> None: + super().__init__(requester=parent.requester, data=data) self._parent = parent def get_own_path(self: Self) -> Endpoint: diff --git a/adcm_aio_client/core/objects/_mapping.py b/adcm_aio_client/core/objects/_mapping.py deleted file mode 100644 index d7b99c6f..00000000 --- a/adcm_aio_client/core/objects/_mapping.py +++ /dev/null @@ -1,3 +0,0 @@ -class ClusterMapping: - # todo lacking implementation - ... diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index a1c6f441..978a546d 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -6,6 +6,7 @@ from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 from adcm_aio_client.core.errors import NotFoundError, OperationError, ResponseError +from adcm_aio_client.core.mapping import ClusterMapping from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, PaginatedChildAccessor, @@ -21,7 +22,6 @@ WithUpgrades, ) from adcm_aio_client.core.objects._imports import ClusterImports -from adcm_aio_client.core.objects._mapping import ClusterMapping from adcm_aio_client.core.requesters import BundleRetrieverInterface from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint, Requester, UrlPath, WithProtectedRequester @@ -137,6 +137,7 @@ class Cluster( RootInteractiveObject, ): PATH_PREFIX = "clusters" + # data-based properties @property @@ -149,9 +150,6 @@ def description(self: Self) -> str: # related/dynamic data access - # todo think how such properties will be invalidated when data is updated - # during `refresh()` / `reread()` calls. - # See cache invalidation or alternatives in documentation for `cached_property` @async_cached_property async def bundle(self: Self) -> Bundle: prototype_id = self._data["prototype"]["id"] @@ -163,6 +161,7 @@ async def bundle(self: Self) -> Bundle: return self._construct(what=Bundle, from_data=response.as_dict()) # object-specific methods + async def set_ansible_forks(self: Self, value: int) -> Self: await self._requester.post( *self.get_own_path(), "ansible-config", data={"config": {"defaults": {"forks": value}}, "adcmMeta": {}} @@ -171,9 +170,9 @@ async def set_ansible_forks(self: Self, value: int) -> Self: # nodes and managers to access - @cached_property - def mapping(self: Self) -> ClusterMapping: - return ClusterMapping() + @async_cached_property + async def mapping(self: Self) -> ClusterMapping: + return await ClusterMapping.for_cluster(owner=self) @cached_property def services(self: Self) -> "ServicesNode": diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 57fec10a..c43bfdb1 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -61,6 +61,13 @@ async def delete(self: Self, *path: PathPart) -> RequesterResponse: ... # Objects +type ComponentID = int +type HostID = int + + +class WithID(Protocol): + id: int + class WithProtectedRequester(Protocol): _requester: Requester From 6baf0ef77df5438ba6d1e6136d6d0f29b6547c69 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Thu, 5 Dec 2024 16:35:55 +0300 Subject: [PATCH 26/46] ADCM-6158: Implement missing methods for HostNode (#30) Co-authored-by: astarovo --- adcm_aio_client/core/client.py | 4 +-- adcm_aio_client/core/objects/_base.py | 41 +++++++++++++++++++++++-- adcm_aio_client/core/objects/_common.py | 17 +++++++++- adcm_aio_client/core/objects/cm.py | 25 ++++++++++----- adcm_aio_client/core/types.py | 6 ++++ 5 files changed, 81 insertions(+), 12 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 98ad5c66..71bd625c 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsAccessor +from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsAccessor, HostsNode from adcm_aio_client.core.requesters import BundleRetriever, BundleRetrieverInterface, DefaultRequester, Requester from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify @@ -29,7 +29,7 @@ def clusters(self: Self) -> ClustersNode: @cached_property def hosts(self: Self) -> HostsAccessor: - return HostsAccessor(path=("hosts",), requester=self._requester) + return HostsNode(path=("hosts",), requester=self._requester) @cached_property def hostproviders(self: Self) -> HostProvidersNode: diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index b9e748c4..578c2137 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -5,8 +5,14 @@ from asyncstdlib.functools import CachedProperty -from adcm_aio_client.core.requesters import Requester -from adcm_aio_client.core.types import AwareOfOwnPath, Endpoint, WithProtectedRequester, WithRequesterProperty +from adcm_aio_client.core.types import ( + AwareOfOwnPath, + Endpoint, + MaintenanceModeStatus, + Requester, + WithProtectedRequester, + WithRequesterProperty, +) class InteractiveObject(WithProtectedRequester, WithRequesterProperty, AwareOfOwnPath): @@ -84,3 +90,34 @@ def __init__(self: Self, parent: Parent, data: dict[str, Any]) -> None: def get_own_path(self: Self) -> Endpoint: return *self._parent.get_own_path(), self.PATH_PREFIX, self.id + + +class MaintenanceMode: + def __init__( + self: Self, maintenance_mode_status: MaintenanceModeStatus, requester: Requester, path: Endpoint + ) -> None: + self._maintenance_mode_status = maintenance_mode_status + self._requester = requester + self._path = path + + def __repr__(self: Self) -> MaintenanceModeStatus: + return self._maintenance_mode_status + + def __str__(self: Self) -> MaintenanceModeStatus: + return self._maintenance_mode_status + + @property + def value(self: Self) -> str: + return self._maintenance_mode_status.value + + async def on(self: Self) -> None: + current_mm_status = await self._requester.post( + *self._path, "maintenance-mode", data={"maintenanceMode": MaintenanceModeStatus.ON} + ) + self._maintenance_mode_status = current_mm_status.as_dict()["maintenanceMode"] + + async def off(self: Self) -> None: + current_mm_status = await self._requester.post( + *self._path, "maintenanceMode", data={"maintenanceMode": MaintenanceModeStatus.OFF} + ) + self._maintenance_mode_status = current_mm_status.as_dict()["maintenanceMode"] diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 85e71e4c..09aa93b6 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -1,10 +1,17 @@ from functools import cached_property from typing import Self +from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 + from adcm_aio_client.core.actions import ActionsAccessor from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner -from adcm_aio_client.core.types import ADCMEntityStatus, AwareOfOwnPath, WithProtectedRequester +from adcm_aio_client.core.objects._base import MaintenanceMode +from adcm_aio_client.core.types import ( + ADCMEntityStatus, + AwareOfOwnPath, + WithProtectedRequester, +) class Deletable(WithProtectedRequester, AwareOfOwnPath): @@ -48,3 +55,11 @@ def config_groups(self: Self) -> ...: ... class WithActionHostGroups(WithProtectedRequester, AwareOfOwnPath): @cached_property def action_host_groups(self: Self) -> ...: ... + + +class WithMaintenanceMode(WithProtectedRequester, AwareOfOwnPath): + @async_cached_property + async def maintenance_mode(self: Self) -> MaintenanceMode: + maintenance_mode = MaintenanceMode(self._data["maintenanceMode"], self._requester, self.get_own_path()) # pyright: ignore[reportAttributeAccessIssue] + self._data["maintenanceMode"] = maintenance_mode.value # pyright: ignore[reportAttributeAccessIssue] + return maintenance_mode diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 978a546d..1b8b2f09 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -11,19 +11,24 @@ PaginatedAccessor, PaginatedChildAccessor, ) -from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject, RootInteractiveObject +from adcm_aio_client.core.objects._base import ( + InteractiveChildObject, + InteractiveObject, + RootInteractiveObject, +) from adcm_aio_client.core.objects._common import ( Deletable, WithActionHostGroups, WithActions, WithConfig, WithConfigGroups, + WithMaintenanceMode, WithStatus, WithUpgrades, ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import ADCMEntityStatus, Endpoint, Requester, UrlPath, WithProtectedRequester +from adcm_aio_client.core.types import Endpoint, Requester, UrlPath, WithProtectedRequester type Filter = object # TODO: implement @@ -307,7 +312,7 @@ async def create(self: Self, bundle: Bundle, name: str, description: str = "") - return HostProvider(requester=self._requester, data=response.as_dict()) -class Host(Deletable, WithActions, RootInteractiveObject): +class Host(Deletable, WithActions, WithStatus, WithMaintenanceMode, RootInteractiveObject): PATH_PREFIX = "hosts" @property @@ -318,10 +323,6 @@ def name(self: Self) -> str: def description(self: Self) -> str: return str(self._data["description"]) - async def get_status(self: Self) -> ADCMEntityStatus: - response = await self._requester.get(*self.get_own_path()) - return ADCMEntityStatus(response.as_dict()["status"]) - @async_cached_property async def cluster(self: Self) -> Cluster | None: if not self._data["cluster"]: @@ -340,6 +341,16 @@ class HostsAccessor(PaginatedAccessor[Host, None]): class_type = Host +class HostsNode(HostsAccessor): + async def create( + self: Self, provider: HostProvider, name: str, description: str, cluster: Cluster | None = None + ) -> None: + data = {"hostproviderId": provider.id, "name": name, "description": description} + if cluster: + data["clusterId"] = cluster.id + await self._requester.post(*self._path, data=data) + + class HostsInClusterNode(HostsAccessor): async def add(self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None) -> None: hosts = await self._get_hosts_from_arg_or_filter(host=host, filters=filters) diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index c43bfdb1..2cc6e2b9 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -95,3 +95,9 @@ class MappingOperation(str, Enum): class UrlPath(str): pass + + +class MaintenanceModeStatus(str, Enum): + ON = "on" + OFF = "off" + CHANGING = "changing" From c194bfb3deaa50b60867fe3eeaddc855a9bf3629 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Thu, 5 Dec 2024 16:59:27 +0300 Subject: [PATCH 27/46] ADCM-6155: Implement missing methods for ServiceNode (#28) Co-authored-by: astarovo --- adcm_aio_client/core/filters.py | 1 + adcm_aio_client/core/objects/_common.py | 8 ++--- adcm_aio_client/core/objects/cm.py | 39 ++++++++++++++++++++++++- 3 files changed, 41 insertions(+), 7 deletions(-) diff --git a/adcm_aio_client/core/filters.py b/adcm_aio_client/core/filters.py index 27ed00f5..79b4c8de 100644 --- a/adcm_aio_client/core/filters.py +++ b/adcm_aio_client/core/filters.py @@ -11,3 +11,4 @@ # limitations under the License. # TODO: Prepare API for filters +class Filter: ... diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 09aa93b6..5a24300d 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -6,12 +6,8 @@ from adcm_aio_client.core.actions import ActionsAccessor from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner -from adcm_aio_client.core.objects._base import MaintenanceMode -from adcm_aio_client.core.types import ( - ADCMEntityStatus, - AwareOfOwnPath, - WithProtectedRequester, -) +from adcm_aio_client.core.objects._base import AwareOfOwnPath, MaintenanceMode, WithProtectedRequester +from adcm_aio_client.core.types import ADCMEntityStatus class Deletable(WithProtectedRequester, AwareOfOwnPath): diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 1b8b2f09..5828d74b 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -28,7 +28,12 @@ ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import Endpoint, Requester, UrlPath, WithProtectedRequester +from adcm_aio_client.core.types import ( + Endpoint, + Requester, + UrlPath, + WithProtectedRequester, +) type Filter = object # TODO: implement @@ -230,10 +235,42 @@ def cluster(self: Self) -> Cluster: def components(self: Self) -> "ComponentsNode": return ComponentsNode(parent=self, path=(*self.get_own_path(), "components"), requester=self._requester) + @property + def license(self: Self) -> License: + return License(self._requester, self._data) + class ServicesNode(PaginatedChildAccessor[Cluster, Service, None]): class_type = Service + def _get_ids_and_license_state_by_filter( + self: Self, + service_prototypes: dict, + ) -> dict[int, str]: + # todo: implement retrieving of ids when filter is implemented + if not service_prototypes: + raise NotFoundError + return {s["id"]: s["license"]["status"] for s in service_prototypes} + + async def add( + self: Self, + accept_license: bool = False, # noqa: FBT001, FBT002 + ) -> Service: + candidates_prototypes = ( + await self._requester.get(*self._parent.get_own_path(), "service-candidates") + ).as_dict() + services_data = self._get_ids_and_license_state_by_filter(candidates_prototypes) + if accept_license: + for prototype_id, license_status in services_data.items(): + if license_status == "unaccepted": + await self._requester.post("prototypes", prototype_id, "license", "accept", data={}) + + response = await self._requester.post( + "services", data=[{"prototypeId": prototype_id} for prototype_id in services_data] + ) + + return Service(data=response.as_dict(), parent=self._parent) + class Component( WithStatus, WithActions, WithConfig, WithActionHostGroups, WithConfigGroups, InteractiveChildObject[Service] From 8bdac722736b75bf1b27337637790a98bae69563 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 6 Dec 2024 16:07:49 +0300 Subject: [PATCH 28/46] ADCM-6176: Implement node for getting jobs (#34) Co-authored-by: astarovo --- adcm_aio_client/core/objects/_common.py | 8 +++- adcm_aio_client/core/objects/cm.py | 61 ++++++++++++++++++++++--- adcm_aio_client/core/types.py | 10 ++++ 3 files changed, 71 insertions(+), 8 deletions(-) diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 5a24300d..f60d018d 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -7,7 +7,7 @@ from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner from adcm_aio_client.core.objects._base import AwareOfOwnPath, MaintenanceMode, WithProtectedRequester -from adcm_aio_client.core.types import ADCMEntityStatus +from adcm_aio_client.core.types import ADCMEntityStatus, JobStatus class Deletable(WithProtectedRequester, AwareOfOwnPath): @@ -59,3 +59,9 @@ async def maintenance_mode(self: Self) -> MaintenanceMode: maintenance_mode = MaintenanceMode(self._data["maintenanceMode"], self._requester, self.get_own_path()) # pyright: ignore[reportAttributeAccessIssue] self._data["maintenanceMode"] = maintenance_mode.value # pyright: ignore[reportAttributeAccessIssue] return maintenance_mode + + +class WithJobStatus(WithProtectedRequester, AwareOfOwnPath): + async def get_job_status(self: Self) -> JobStatus: + response = await self._requester.get(*self.get_own_path()) + return JobStatus(response.as_dict()["status"]) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 5828d74b..e62d50f9 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,10 +1,12 @@ +from datetime import datetime from functools import cached_property from pathlib import Path -from typing import Iterable, Literal, Self +from typing import Callable, Iterable, Literal, Self import asyncio from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 +from adcm_aio_client.core.actions._objects import Action from adcm_aio_client.core.errors import NotFoundError, OperationError, ResponseError from adcm_aio_client.core.mapping import ClusterMapping from adcm_aio_client.core.objects._accessors import ( @@ -22,18 +24,14 @@ WithActions, WithConfig, WithConfigGroups, + WithJobStatus, WithMaintenanceMode, WithStatus, WithUpgrades, ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import ( - Endpoint, - Requester, - UrlPath, - WithProtectedRequester, -) +from adcm_aio_client.core.types import Endpoint, JobStatus, Requester, UrlPath, WithProtectedRequester type Filter = object # TODO: implement @@ -422,3 +420,52 @@ async def _get_hosts_from_arg_or_filter( hosts = await self.filter(filters) # type: ignore # TODO return hosts + + +class Job[Object: "InteractiveObject"](WithStatus, WithActions, WithJobStatus, RootInteractiveObject): + PATH_PREFIX = "tasks" + + @property + def name(self: Self) -> str: + return str(self._data["name"]) + + @property + def start_time(self: Self) -> datetime: + return self._data["startTime"] + + @property + def finish_time(self: Self) -> datetime: + return self._data["endTime"] + + @property + def object(self: Self) -> Object: + obj_data = self._data["objects"][0] + obj_type = obj_data["type"] + + obj_dict = { + "host": Host, + "component": Component, + "provider": HostProvider, + "cluster": Cluster, + "service": Service, + "adcm": ADCM, + } + + return self._construct(what=obj_dict[obj_type], from_data=obj_data) + + @property + def action(self: Self) -> Action: + return self._construct(what=Action, from_data=self._data["action"]) + + async def wait(self: Self, status_predicate: Callable[[], bool], timeout: int = 30, poll: int = 5) -> None: + if self._data["status"] not in (JobStatus.RUNNING, JobStatus.CREATED): + return + + for _ in range(timeout // poll): + await asyncio.sleep(poll) + if status_predicate(): + self._data["status"] = self.get_status() + return + + async def terminate(self: Self) -> None: + await self._requester.post(*self.get_own_path(), "terminate", data={}) diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 2cc6e2b9..4c5414c5 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -93,6 +93,16 @@ class MappingOperation(str, Enum): REMOVE = "remove" +class JobStatus(str, Enum): + CREATED = "created" + SUCCESS = "success" + FAILED = "failed" + RUNNING = "running" + LOCKED = "locked" + ABORTED = "aborted" + BROKEN = "broken" + + class UrlPath(str): pass From 82e0b1fb4797520c6895a202d10e14b9cb273188 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 6 Dec 2024 16:08:11 +0300 Subject: [PATCH 29/46] ADCM-6177: Implement node for getting upgrades (#37) Co-authored-by: astarovo --- adcm_aio_client/core/actions/__init__.py | 4 ++-- adcm_aio_client/core/actions/_objects.py | 23 ++++++++++++++++++++++- adcm_aio_client/core/objects/_common.py | 5 +++-- 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/adcm_aio_client/core/actions/__init__.py b/adcm_aio_client/core/actions/__init__.py index 25395066..10a45c9f 100644 --- a/adcm_aio_client/core/actions/__init__.py +++ b/adcm_aio_client/core/actions/__init__.py @@ -1,3 +1,3 @@ -from adcm_aio_client.core.actions._objects import ActionsAccessor +from adcm_aio_client.core.actions._objects import ActionsAccessor, UpgradeNode -__all__ = ["ActionsAccessor"] +__all__ = ["ActionsAccessor", "UpgradeNode"] diff --git a/adcm_aio_client/core/actions/_objects.py b/adcm_aio_client/core/actions/_objects.py index f864013d..bf0c11f5 100644 --- a/adcm_aio_client/core/actions/_objects.py +++ b/adcm_aio_client/core/actions/_objects.py @@ -11,7 +11,7 @@ from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject if TYPE_CHECKING: - from adcm_aio_client.core.objects.cm import Cluster + from adcm_aio_client.core.objects.cm import Bundle, Cluster class Action(InteractiveChildObject): @@ -66,6 +66,27 @@ class ActionsAccessor(NonPaginatedChildAccessor): class_type = Action +class Upgrade(Action): + PATH_PREFIX = "upgrades" + + @property + def bundle(self: Self) -> Bundle: + from adcm_aio_client.core.objects.cm import Bundle + + return Bundle(requester=self._requester, data=self._data["bundle"]) + + @async_cached_property # TODO: Config class + async def config(self: Self) -> ...: + return (await self._rich_data)["configuration"] + + def validate(self: Self) -> bool: + return True + + +class UpgradeNode(NonPaginatedChildAccessor): + class_type = Upgrade + + async def detect_cluster(owner: InteractiveObject) -> Cluster: from adcm_aio_client.core.objects.cm import Cluster, Component, Host, Service diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index f60d018d..0387a903 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -3,7 +3,7 @@ from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 -from adcm_aio_client.core.actions import ActionsAccessor +from adcm_aio_client.core.actions import ActionsAccessor, UpgradeNode from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner from adcm_aio_client.core.objects._base import AwareOfOwnPath, MaintenanceMode, WithProtectedRequester @@ -40,7 +40,8 @@ def config_history(self: Self) -> ConfigHistoryNode: class WithUpgrades(WithProtectedRequester, AwareOfOwnPath): @cached_property - def upgrades(self: Self) -> ...: ... + def upgrades(self: Self) -> UpgradeNode: + return UpgradeNode(parent=self, path=(*self.get_own_path(), "upgrades"), requester=self._requester) class WithConfigGroups(WithProtectedRequester, AwareOfOwnPath): From 42a2e7ff6281357a99b305f611e3540a9130b5f1 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Mon, 9 Dec 2024 10:39:45 +0300 Subject: [PATCH 30/46] ADCM-6161: Implement Config/ActionHostGroup objects with nodes (#36) --- adcm_aio_client/core/host_groups/__init__.py | 4 + adcm_aio_client/core/host_groups/_common.py | 97 +++++++++++++++++++ .../core/host_groups/action_group.py | 49 ++++++++++ .../core/host_groups/config_group.py | 45 +++++++++ adcm_aio_client/core/objects/_base.py | 12 +++ adcm_aio_client/core/objects/_common.py | 10 -- adcm_aio_client/core/objects/cm.py | 36 +++---- adcm_aio_client/core/utils.py | 17 ++++ 8 files changed, 238 insertions(+), 32 deletions(-) create mode 100644 adcm_aio_client/core/host_groups/__init__.py create mode 100644 adcm_aio_client/core/host_groups/_common.py create mode 100644 adcm_aio_client/core/host_groups/action_group.py create mode 100644 adcm_aio_client/core/host_groups/config_group.py create mode 100644 adcm_aio_client/core/utils.py diff --git a/adcm_aio_client/core/host_groups/__init__.py b/adcm_aio_client/core/host_groups/__init__.py new file mode 100644 index 00000000..90174a91 --- /dev/null +++ b/adcm_aio_client/core/host_groups/__init__.py @@ -0,0 +1,4 @@ +from adcm_aio_client.core.host_groups.action_group import WithActionHostGroups +from adcm_aio_client.core.host_groups.config_group import WithConfigHostGroups + +__all__ = ["WithActionHostGroups", "WithConfigHostGroups"] diff --git a/adcm_aio_client/core/host_groups/_common.py b/adcm_aio_client/core/host_groups/_common.py new file mode 100644 index 00000000..0bda191f --- /dev/null +++ b/adcm_aio_client/core/host_groups/_common.py @@ -0,0 +1,97 @@ +from typing import TYPE_CHECKING, Iterable, Self, Union + +from adcm_aio_client.core.objects._accessors import AccessorFilter, PaginatedAccessor, PaginatedChildAccessor +from adcm_aio_client.core.objects._base import InteractiveChildObject +from adcm_aio_client.core.types import Endpoint, QueryParameters, Requester, RequesterResponse +from adcm_aio_client.core.utils import safe_gather + +if TYPE_CHECKING: + from adcm_aio_client.core.host_groups.action_group import ActionHostGroup + from adcm_aio_client.core.host_groups.config_group import ConfigHostGroup + from adcm_aio_client.core.objects.cm import Cluster, Component, Host, HostProvider, Service + + +class Filter: ... # TODO: implement + + +class HostInHostGroupNode(PaginatedAccessor["Host", None]): + group_type: str + + def __new__(cls: type[Self], path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None) -> Self: + _ = path, requester, accessor_filter + if not hasattr(cls, "class_type"): + from adcm_aio_client.core.objects.cm import Host + + cls.class_type = Host + + return super().__new__(cls) + + async def add(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None: + hosts = await self._get_hosts_from_args(host=host) + error = await safe_gather( + coros=(self._requester.post(*self._path, data={"hostId": host.id}) for host in hosts), + msg=f"Some hosts can't be added to {self.group_type} host group", + ) + if error is not None: + raise error + + async def remove(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None: + hosts = await self._get_hosts_from_args(host=host) + error = await safe_gather( + coros=(self._requester.delete(*self._path, host.id) for host in hosts), + msg=f"Some hosts can't be removed from {self.group_type} host group", + ) + + if error is not None: + raise error + + async def set(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None: + hosts = await self._get_hosts_from_args(host=host) + in_group_ids = {host["id"] for host in (await super()._request_endpoint(query={})).as_list()} + + to_remove_ids = {host_id for host_id in in_group_ids if host_id not in (host.id for host in hosts)} + to_add_ids = {host.id for host in hosts if host.id not in in_group_ids} + + if to_remove_ids: + await self.remove(host=Filter(id__in=to_remove_ids)) # type: ignore # TODO: implement + if to_add_ids: + await self.add(host=Filter(id__in=to_add_ids)) # type: ignore # TODO: implement + + async def _get_hosts_from_args(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> list["Host"]: + if isinstance(host, Filter): + return await self.filter(host) # type: ignore # TODO + + return list(host) if isinstance(host, Iterable) else [host] + + async def _request_endpoint(self: Self, query: QueryParameters) -> RequesterResponse: + """HostGroup/hosts response have too little information to construct Host""" + + data = (await super()._request_endpoint(query)).as_list() + ids = ",".join(str(host["id"]) for host in data) + query = {"id__in": ids} if ids else {"id__in": "-1"} # non-existent id to fetch 0 hosts + + return await self._requester.get("hosts", query=query) + + +class HostGroupNode[ + Parent: Cluster | Service | Component | HostProvider, + Child: ConfigHostGroup | ActionHostGroup, +](PaginatedChildAccessor[Parent, Child, None]): + async def create( # TODO: can create HG with subset of `hosts` if adding some of them leads to an error + self: Self, name: str, description: str = "", hosts: list["Host"] | None = None + ) -> InteractiveChildObject: + response = await self._requester.post(*self._path, data={"name": name, "description": description}) + host_group = self.class_type(parent=self._parent, data=response.as_dict()) + + if not hosts: + return host_group + + path = *host_group.get_own_path(), "hosts" + error = await safe_gather( + coros=(self._requester.post(*path, data={"hostId": host.id}) for host in hosts), + msg=f"Some hosts can't be added to {host_group}", + ) + if error is not None: + raise error + + return host_group diff --git a/adcm_aio_client/core/host_groups/action_group.py b/adcm_aio_client/core/host_groups/action_group.py new file mode 100644 index 00000000..33b8e0e5 --- /dev/null +++ b/adcm_aio_client/core/host_groups/action_group.py @@ -0,0 +1,49 @@ +from functools import cached_property +from typing import TYPE_CHECKING, Self, Union + +from adcm_aio_client.core.actions import ActionsAccessor +from adcm_aio_client.core.host_groups._common import HostGroupNode, HostInHostGroupNode +from adcm_aio_client.core.objects._base import InteractiveChildObject +from adcm_aio_client.core.objects._common import Deletable +from adcm_aio_client.core.types import AwareOfOwnPath, WithProtectedRequester + +if TYPE_CHECKING: + from adcm_aio_client.core.objects.cm import Cluster, Component, Service + + +class ActionHostGroup(InteractiveChildObject, Deletable): + PATH_PREFIX = "action-host-groups" + + @property + def name(self: Self) -> str: + return self._data["name"] + + @property + def description(self: Self) -> str: + return self._data["description"] + + @cached_property + def hosts(self: Self) -> "HostsInActionHostGroupNode": + return HostsInActionHostGroupNode(path=(*self.get_own_path(), "hosts"), requester=self._requester) + + @cached_property + def actions(self: Self) -> ActionsAccessor: + return ActionsAccessor(parent=self, path=(*self.get_own_path(), "actions"), requester=self._requester) + + +class ActionHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"], ActionHostGroup]): + class_type = ActionHostGroup + + +class HostsInActionHostGroupNode(HostInHostGroupNode): + group_type = "action" + + +class WithActionHostGroups(WithProtectedRequester, AwareOfOwnPath): + @cached_property + def action_host_groups(self: Self) -> ActionHostGroupNode: + return ActionHostGroupNode( + parent=self, # pyright: ignore[reportArgumentType] easier to ignore than fix this typing + path=(*self.get_own_path(), "action-host-groups"), + requester=self._requester, + ) diff --git a/adcm_aio_client/core/host_groups/config_group.py b/adcm_aio_client/core/host_groups/config_group.py new file mode 100644 index 00000000..df919662 --- /dev/null +++ b/adcm_aio_client/core/host_groups/config_group.py @@ -0,0 +1,45 @@ +from functools import cached_property +from typing import TYPE_CHECKING, Self, Union + +from adcm_aio_client.core.host_groups._common import HostGroupNode, HostInHostGroupNode +from adcm_aio_client.core.objects._base import InteractiveChildObject +from adcm_aio_client.core.objects._common import Deletable, WithConfig +from adcm_aio_client.core.types import AwareOfOwnPath, WithProtectedRequester + +if TYPE_CHECKING: + from adcm_aio_client.core.objects.cm import Cluster, Component, Service + + +class ConfigHostGroup(InteractiveChildObject, Deletable, WithConfig): + PATH_PREFIX = "config-groups" + + @property + def name(self: Self) -> str: + return self._data["name"] + + @property + def description(self: Self) -> str: + return self._data["description"] + + @cached_property + def hosts(self: Self) -> "HostsInConfigHostGroupNode": + return HostsInConfigHostGroupNode(path=(*self.get_own_path(), "hosts"), requester=self._requester) + + +class ConfigHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"], ConfigHostGroup]): + class_type = ConfigHostGroup + # TODO: create() with `config` arg + + +class HostsInConfigHostGroupNode(HostInHostGroupNode): + group_type = "config" + + +class WithConfigHostGroups(WithProtectedRequester, AwareOfOwnPath): + @cached_property + def config_host_groups(self: Self) -> ConfigHostGroupNode: + return ConfigHostGroupNode( + parent=self, # pyright: ignore[reportArgumentType] easier to ignore than fix this typing + path=(*self.get_own_path(), "config-groups"), + requester=self._requester, + ) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 578c2137..684b3039 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -64,6 +64,18 @@ def _clear_cache(self: Self) -> None: with suppress(AttributeError): delattr(self, name) + def __str__(self: Self) -> str: + return self._repr + + def __repr__(self: Self) -> str: + return self._repr + + @property + def _repr(self: Self) -> str: + name = getattr(self, "name", None) + name = f" {name}" if isinstance(name, str) else "" + return f"<{self.__class__.__name__} #{self.id}{name}>" + class RootInteractiveObject(InteractiveObject): PATH_PREFIX: str diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 0387a903..9d95ea00 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -44,16 +44,6 @@ def upgrades(self: Self) -> UpgradeNode: return UpgradeNode(parent=self, path=(*self.get_own_path(), "upgrades"), requester=self._requester) -class WithConfigGroups(WithProtectedRequester, AwareOfOwnPath): - @cached_property - def config_groups(self: Self) -> ...: ... - - -class WithActionHostGroups(WithProtectedRequester, AwareOfOwnPath): - @cached_property - def action_host_groups(self: Self) -> ...: ... - - class WithMaintenanceMode(WithProtectedRequester, AwareOfOwnPath): @async_cached_property async def maintenance_mode(self: Self) -> MaintenanceMode: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index e62d50f9..1ed0bc51 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -7,7 +7,8 @@ from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 from adcm_aio_client.core.actions._objects import Action -from adcm_aio_client.core.errors import NotFoundError, OperationError, ResponseError +from adcm_aio_client.core.errors import NotFoundError +from adcm_aio_client.core.host_groups import WithActionHostGroups, WithConfigHostGroups from adcm_aio_client.core.mapping import ClusterMapping from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, @@ -20,10 +21,8 @@ ) from adcm_aio_client.core.objects._common import ( Deletable, - WithActionHostGroups, WithActions, WithConfig, - WithConfigGroups, WithJobStatus, WithMaintenanceMode, WithStatus, @@ -32,6 +31,7 @@ from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface from adcm_aio_client.core.types import Endpoint, JobStatus, Requester, UrlPath, WithProtectedRequester +from adcm_aio_client.core.utils import safe_gather type Filter = object # TODO: implement @@ -141,7 +141,7 @@ class Cluster( WithUpgrades, WithConfig, WithActionHostGroups, - WithConfigGroups, + WithConfigHostGroups, RootInteractiveObject, ): PATH_PREFIX = "clusters" @@ -212,7 +212,7 @@ class Service( WithActions, WithConfig, WithActionHostGroups, - WithConfigGroups, + WithConfigHostGroups, InteractiveChildObject[Cluster], ): PATH_PREFIX = "services" @@ -271,7 +271,7 @@ async def add( class Component( - WithStatus, WithActions, WithConfig, WithActionHostGroups, WithConfigGroups, InteractiveChildObject[Service] + WithStatus, WithActions, WithConfig, WithActionHostGroups, WithConfigHostGroups, InteractiveChildObject[Service] ): PATH_PREFIX = "components" @@ -313,7 +313,7 @@ class ComponentsNode(PaginatedChildAccessor[Service, Component, None]): class_type = Component -class HostProvider(Deletable, WithActions, WithUpgrades, WithConfig, RootInteractiveObject): +class HostProvider(Deletable, WithActions, WithUpgrades, WithConfig, WithConfigHostGroups, RootInteractiveObject): PATH_PREFIX = "hostproviders" # data-based properties @@ -368,9 +368,6 @@ async def cluster(self: Self) -> Cluster | None: async def hostprovider(self: Self) -> HostProvider: return await HostProvider.with_id(requester=self._requester, object_id=self._data["hostprovider"]["id"]) - def __str__(self: Self) -> str: - return f"<{self.__class__.__name__} #{self.id} {self.name}>" - class HostsAccessor(PaginatedAccessor[Host, None]): class_type = Host @@ -395,27 +392,22 @@ async def add(self: Self, host: Host | Iterable[Host] | None = None, filters: Fi async def remove(self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None) -> None: hosts = await self._get_hosts_from_arg_or_filter(host=host, filters=filters) - results = await asyncio.gather( - *(self._requester.delete(*self._path, host_.id) for host_ in hosts), return_exceptions=True + error = await safe_gather( + coros=(self._requester.delete(*self._path, host_.id) for host_ in hosts), + msg="Some hosts can't be deleted from cluster", ) - errors = set() - for host_, result in zip(hosts, results): - if isinstance(result, ResponseError): - errors.add(str(host_)) - - if errors: - errors = ", ".join(errors) - raise OperationError(f"Some hosts can't be deleted from cluster: {errors}") + if error is not None: + raise error async def _get_hosts_from_arg_or_filter( self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None - ) -> Iterable[Host]: + ) -> list[Host]: if all((host, filters)): raise ValueError("`host` and `filters` arguments are mutually exclusive.") if host: - hosts = [host] if isinstance(host, Host) else host + hosts = list(host) if isinstance(host, Iterable) else [host] else: hosts = await self.filter(filters) # type: ignore # TODO diff --git a/adcm_aio_client/core/utils.py b/adcm_aio_client/core/utils.py new file mode 100644 index 00000000..86179cdb --- /dev/null +++ b/adcm_aio_client/core/utils.py @@ -0,0 +1,17 @@ +from typing import Awaitable, Iterable +import asyncio + +from adcm_aio_client.core.types import RequesterResponse + + +async def safe_gather(coros: Iterable[Awaitable[RequesterResponse]], msg: str) -> ExceptionGroup | None: # noqa: F821 + """ + Performs asyncio.gather() on coros, returns combined in ExceptionGroup errors + """ + results = await asyncio.gather(*coros, return_exceptions=True) + exceptions = [res for res in results if isinstance(res, Exception)] + + if exceptions: + return ExceptionGroup(msg, exceptions) # noqa: F821 # TODO: tool.ruff.target-version = "py312" & run linters + + return None From 4a75af2c99b1fbd89eeb711e3dd172c90634d602 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 9 Dec 2024 14:33:16 +0500 Subject: [PATCH 31/46] ADCM-6077 Implement basic filters (#35) --- adcm_aio_client/core/client.py | 4 +- adcm_aio_client/core/errors.py | 12 ++ adcm_aio_client/core/filters.py | 142 +++++++++++++++++- adcm_aio_client/core/host_groups/_common.py | 76 ++++++---- .../core/host_groups/action_group.py | 4 +- .../core/host_groups/config_group.py | 4 +- adcm_aio_client/core/mapping/_objects.py | 26 ++-- adcm_aio_client/core/objects/__init__.py | 3 - adcm_aio_client/core/objects/_accessors.py | 62 +++++--- adcm_aio_client/core/objects/cm.py | 133 +++++++++------- tests/unit/test_accessors.py | 46 +++++- 11 files changed, 383 insertions(+), 129 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 71bd625c..bca3a5c6 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsAccessor, HostsNode +from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsNode from adcm_aio_client.core.requesters import BundleRetriever, BundleRetrieverInterface, DefaultRequester, Requester from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify @@ -28,7 +28,7 @@ def clusters(self: Self) -> ClustersNode: return ClustersNode(path=("clusters",), requester=self._requester) @cached_property - def hosts(self: Self) -> HostsAccessor: + def hosts(self: Self) -> HostsNode: return HostsNode(path=("hosts",), requester=self._requester) @cached_property diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index d71207e9..0a910330 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -105,3 +105,15 @@ class ConfigComparisonError(ConfigError): ... class NoMappingRulesForActionError(ADCMClientError): ... + + +# Filtering + + +class FilterError(ADCMClientError): ... + + +class FilterPreparationError(FilterError): ... + + +class InvalidFilterError(FilterError): ... diff --git a/adcm_aio_client/core/filters.py b/adcm_aio_client/core/filters.py index 79b4c8de..51444853 100644 --- a/adcm_aio_client/core/filters.py +++ b/adcm_aio_client/core/filters.py @@ -10,5 +10,143 @@ # See the License for the specific language governing permissions and # limitations under the License. -# TODO: Prepare API for filters -class Filter: ... +from collections import deque +from dataclasses import dataclass +from typing import Generator, Iterable, Self + +from adcm_aio_client.core.errors import InvalidFilterError +from adcm_aio_client.core.objects._base import InteractiveObject +from adcm_aio_client.core.types import QueryParameters + +# Filters +EQUAL_OPERATIONS = frozenset(("eq", "ieq")) +MULTI_OPERATIONS = frozenset(("in", "iin", "exclude", "iexclude")) + + +COMMON_OPERATIONS = frozenset(("eq", "ne", "in", "exclude")) +ALL_OPERATIONS = frozenset(("contains", "icontains", *COMMON_OPERATIONS, *tuple(f"i{op}" for op in COMMON_OPERATIONS))) + +type FilterSingleValue = str | int | InteractiveObject +type FilterValue = FilterSingleValue | Iterable[FilterSingleValue] +type SimplifiedValue = str | int | tuple[str | int, ...] + + +@dataclass(slots=True) +class Filter: + attr: str + op: str + value: FilterValue + + +@dataclass(slots=True, frozen=True) +class FilterBy: + attr: str + operations: set[str] | frozenset[str] | tuple[str, ...] + single_input: type + + +class Filtering: + def __init__(self: Self, *allowed: FilterBy) -> None: + self._allowed = {entry.attr: entry for entry in allowed} + + def inline_filters_to_query(self: Self, filters: dict[str, FilterValue]) -> QueryParameters: + converted_filters = deque() + + for inline_filter, value in filters.items(): + attr, op = inline_filter.rsplit("__", maxsplit=1) + filter_ = Filter(attr=attr, op=op, value=value) + converted_filters.append(filter_) + + return self.to_query(filters=converted_filters) + + def to_query(self: Self, filters: Iterable[Filter]) -> QueryParameters: + query = {} + + for filter_ in filters: + # make value persistent + if isinstance(filter_.value, Generator): + filter_.value = tuple(filter_.value) + + self._check_allowed(filter_) + + name = self._attribute_name_to_camel_case(name=filter_.attr) + simplified_value = self._simplify_value(value=filter_.value) + self._check_no_operation_value_conflict(operation=filter_.op, value=simplified_value) + operation = filter_.op + value = self._prepare_query_param_value(value=simplified_value) + + query[f"{name}__{operation}"] = value + + return query + + def _check_allowed(self: Self, filter_: Filter) -> None: + allowed_filter = self._allowed.get(filter_.attr) + if not allowed_filter: + message = f"Filter by {filter_.attr} is not allowed. Allowed: {', '.join(self._allowed)}" + raise InvalidFilterError(message) + + if filter_.op not in allowed_filter.operations: + message = f"Operation {filter_.op} is not allowed. Allowed: {', '.join(sorted(allowed_filter.operations))}" + raise InvalidFilterError(message) + + expected_type = allowed_filter.single_input + if isinstance(filter_.value, Iterable): + if not all(isinstance(entry, expected_type) for entry in filter_.value): + message = f"At least one entry is not {expected_type}: {filter_.value}" + raise InvalidFilterError(message) + else: + if not isinstance(filter_.value, expected_type): + message = f"Value {filter_.value} is not {expected_type}" + raise InvalidFilterError(message) + + def _attribute_name_to_camel_case(self: Self, name: str) -> str: + first, *rest = name.split("_") + return f"{first}{''.join(map(str.capitalize, rest))}" + + def _simplify_value(self: Self, value: FilterValue) -> SimplifiedValue: + if isinstance(value, (str, int)): + return value + + if isinstance(value, InteractiveObject): + return value.id + + simplified_collection = deque() + + for entry in value: + if isinstance(entry, (str, int)): + simplified_collection.append(entry) + elif isinstance(entry, InteractiveObject): + simplified_collection.append(entry.id) + else: + message = f"Failed to simplify: {entry}" + raise TypeError(message) + + return tuple(simplified_collection) + + def _check_no_operation_value_conflict(self: Self, operation: str, value: SimplifiedValue) -> None: + is_collection = isinstance(value, tuple) + + if operation in MULTI_OPERATIONS: + if not is_collection: + message = f"Multiple values expected for {operation}" + raise InvalidFilterError(message) + + if not value: + message = "Collection for filter shouldn't be empty" + raise InvalidFilterError(message) + + else: + if is_collection: + message = f"Only one value is expected for {operation}" + raise InvalidFilterError(message) + + def _prepare_query_param_value(self: Self, value: SimplifiedValue) -> str: + if isinstance(value, tuple): + return ",".join(map(str, value)) + + return str(value) + + +FilterByName = FilterBy("name", ALL_OPERATIONS, str) +FilterByDisplayName = FilterBy("display_name", ALL_OPERATIONS, str) +FilterByStatus = FilterBy("status", COMMON_OPERATIONS, str) diff --git a/adcm_aio_client/core/host_groups/_common.py b/adcm_aio_client/core/host_groups/_common.py index 0bda191f..185c7261 100644 --- a/adcm_aio_client/core/host_groups/_common.py +++ b/adcm_aio_client/core/host_groups/_common.py @@ -1,8 +1,17 @@ -from typing import TYPE_CHECKING, Iterable, Self, Union - -from adcm_aio_client.core.objects._accessors import AccessorFilter, PaginatedAccessor, PaginatedChildAccessor +from functools import partial +from typing import TYPE_CHECKING, Any, Iterable, Self, Union + +from adcm_aio_client.core.filters import Filter +from adcm_aio_client.core.objects._accessors import ( + DefaultQueryParams as AccessorFilter, +) +from adcm_aio_client.core.objects._accessors import ( + PaginatedAccessor, + PaginatedChildAccessor, + filters_to_inline, +) from adcm_aio_client.core.objects._base import InteractiveChildObject -from adcm_aio_client.core.types import Endpoint, QueryParameters, Requester, RequesterResponse +from adcm_aio_client.core.types import Endpoint, HostID, QueryParameters, Requester, RequesterResponse from adcm_aio_client.core.utils import safe_gather if TYPE_CHECKING: @@ -11,39 +20,26 @@ from adcm_aio_client.core.objects.cm import Cluster, Component, Host, HostProvider, Service -class Filter: ... # TODO: implement - - -class HostInHostGroupNode(PaginatedAccessor["Host", None]): +class HostsInHostGroupNode(PaginatedAccessor["Host"]): group_type: str def __new__(cls: type[Self], path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None) -> Self: _ = path, requester, accessor_filter if not hasattr(cls, "class_type"): - from adcm_aio_client.core.objects.cm import Host + from adcm_aio_client.core.objects.cm import Host, HostsAccessor cls.class_type = Host + cls.filtering = HostsAccessor.filtering return super().__new__(cls) async def add(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None: hosts = await self._get_hosts_from_args(host=host) - error = await safe_gather( - coros=(self._requester.post(*self._path, data={"hostId": host.id}) for host in hosts), - msg=f"Some hosts can't be added to {self.group_type} host group", - ) - if error is not None: - raise error + await self._add_hosts_to_group(h.id for h in hosts) async def remove(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None: hosts = await self._get_hosts_from_args(host=host) - error = await safe_gather( - coros=(self._requester.delete(*self._path, host.id) for host in hosts), - msg=f"Some hosts can't be removed from {self.group_type} host group", - ) - - if error is not None: - raise error + await self._remove_hosts_from_group(h.id for h in hosts) async def set(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None: hosts = await self._get_hosts_from_args(host=host) @@ -53,20 +49,44 @@ async def set(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None to_add_ids = {host.id for host in hosts if host.id not in in_group_ids} if to_remove_ids: - await self.remove(host=Filter(id__in=to_remove_ids)) # type: ignore # TODO: implement + await self._remove_hosts_from_group(ids=to_remove_ids) if to_add_ids: - await self.add(host=Filter(id__in=to_add_ids)) # type: ignore # TODO: implement + await self._add_hosts_to_group(ids=to_add_ids) + + async def _add_hosts_to_group(self: Self, ids: Iterable[HostID]) -> None: + add_by_id = partial(self._requester.post, *self._path) + add_coros = map(add_by_id, ({"hostId": id_} for id_ in ids)) + error = await safe_gather( + coros=add_coros, + msg=f"Some hosts can't be added to {self.group_type} host group", + ) + if error is not None: + raise error + + async def _remove_hosts_from_group(self: Self, ids: Iterable[HostID]) -> None: + delete_by_id = partial(self._requester.delete, *self._path) + delete_coros = map(delete_by_id, ids) + error = await safe_gather( + coros=delete_coros, + msg=f"Some hosts can't be removed from {self.group_type} host group", + ) + + if error is not None: + raise error async def _get_hosts_from_args(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> list["Host"]: if isinstance(host, Filter): - return await self.filter(host) # type: ignore # TODO + inline_filters = filters_to_inline(host) + return await self.filter(**inline_filters) return list(host) if isinstance(host, Iterable) else [host] - async def _request_endpoint(self: Self, query: QueryParameters) -> RequesterResponse: + async def _request_endpoint( + self: Self, query: QueryParameters, filters: dict[str, Any] | None = None + ) -> RequesterResponse: """HostGroup/hosts response have too little information to construct Host""" - data = (await super()._request_endpoint(query)).as_list() + data = (await super()._request_endpoint(query, filters)).as_list() ids = ",".join(str(host["id"]) for host in data) query = {"id__in": ids} if ids else {"id__in": "-1"} # non-existent id to fetch 0 hosts @@ -76,7 +96,7 @@ async def _request_endpoint(self: Self, query: QueryParameters) -> RequesterResp class HostGroupNode[ Parent: Cluster | Service | Component | HostProvider, Child: ConfigHostGroup | ActionHostGroup, -](PaginatedChildAccessor[Parent, Child, None]): +](PaginatedChildAccessor[Parent, Child]): async def create( # TODO: can create HG with subset of `hosts` if adding some of them leads to an error self: Self, name: str, description: str = "", hosts: list["Host"] | None = None ) -> InteractiveChildObject: diff --git a/adcm_aio_client/core/host_groups/action_group.py b/adcm_aio_client/core/host_groups/action_group.py index 33b8e0e5..13b39008 100644 --- a/adcm_aio_client/core/host_groups/action_group.py +++ b/adcm_aio_client/core/host_groups/action_group.py @@ -2,7 +2,7 @@ from typing import TYPE_CHECKING, Self, Union from adcm_aio_client.core.actions import ActionsAccessor -from adcm_aio_client.core.host_groups._common import HostGroupNode, HostInHostGroupNode +from adcm_aio_client.core.host_groups._common import HostGroupNode, HostsInHostGroupNode from adcm_aio_client.core.objects._base import InteractiveChildObject from adcm_aio_client.core.objects._common import Deletable from adcm_aio_client.core.types import AwareOfOwnPath, WithProtectedRequester @@ -35,7 +35,7 @@ class ActionHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"] class_type = ActionHostGroup -class HostsInActionHostGroupNode(HostInHostGroupNode): +class HostsInActionHostGroupNode(HostsInHostGroupNode): group_type = "action" diff --git a/adcm_aio_client/core/host_groups/config_group.py b/adcm_aio_client/core/host_groups/config_group.py index df919662..aae59cd0 100644 --- a/adcm_aio_client/core/host_groups/config_group.py +++ b/adcm_aio_client/core/host_groups/config_group.py @@ -1,7 +1,7 @@ from functools import cached_property from typing import TYPE_CHECKING, Self, Union -from adcm_aio_client.core.host_groups._common import HostGroupNode, HostInHostGroupNode +from adcm_aio_client.core.host_groups._common import HostGroupNode, HostsInHostGroupNode from adcm_aio_client.core.objects._base import InteractiveChildObject from adcm_aio_client.core.objects._common import Deletable, WithConfig from adcm_aio_client.core.types import AwareOfOwnPath, WithProtectedRequester @@ -31,7 +31,7 @@ class ConfigHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"] # TODO: create() with `config` arg -class HostsInConfigHostGroupNode(HostInHostGroupNode): +class HostsInConfigHostGroupNode(HostsInHostGroupNode): group_type = "config" diff --git a/adcm_aio_client/core/mapping/_objects.py b/adcm_aio_client/core/mapping/_objects.py index 2d6ac6b3..ed5cd6b2 100644 --- a/adcm_aio_client/core/mapping/_objects.py +++ b/adcm_aio_client/core/mapping/_objects.py @@ -6,16 +6,19 @@ from typing import TYPE_CHECKING, Any, Iterable, Self import asyncio +from adcm_aio_client.core.filters import Filter, FilterByDisplayName, FilterByName, FilterByStatus, Filtering from adcm_aio_client.core.mapping.refresh import apply_local_changes, apply_remote_changes from adcm_aio_client.core.mapping.types import LocalMappings, MappingEntry, MappingPair, MappingRefreshStrategy -from adcm_aio_client.core.objects._accessors import NonPaginatedAccessor +from adcm_aio_client.core.objects._accessors import NonPaginatedAccessor, filters_to_inline from adcm_aio_client.core.types import ComponentID, HostID, Requester if TYPE_CHECKING: from adcm_aio_client.core.objects.cm import Cluster, Component, Host, HostsAccessor, Service -class ComponentsMappingNode(NonPaginatedAccessor["Component", None]): +class ComponentsMappingNode(NonPaginatedAccessor["Component"]): + filtering = Filtering(FilterByName, FilterByDisplayName, FilterByStatus) + def __new__(cls: type[Self], cluster: Cluster, requester: Requester) -> Self: _ = cluster, requester @@ -28,7 +31,7 @@ def __new__(cls: type[Self], cluster: Cluster, requester: Requester) -> Self: def __init__(self: Self, cluster: Cluster, requester: Requester) -> None: path = (*cluster.get_own_path(), "mapping", "components") - super().__init__(path=path, requester=requester, accessor_filter=None) + super().__init__(path=path, requester=requester, default_query=None) self._cluster = cluster def _create_object(self: Self, data: dict[str, Any]) -> Component: @@ -72,16 +75,18 @@ def iter(self: Self) -> Generator[MappingPair, None, None]: for entry in self._current: yield (self._components[entry.component_id], self._hosts[entry.host_id]) - async def add(self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host]) -> Self: - components, hosts = self._ensure_collections(component=component, host=host) + async def add(self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] | Filter) -> Self: + components, hosts = await self._get_components_and_hosts(component=component, host=host) to_add = self._to_entries(components=components, hosts=hosts) self._current |= to_add return self - async def remove(self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host]) -> Self: - components, hosts = self._ensure_collections(component=component, host=host) + async def remove( + self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] | Filter + ) -> Self: + components, hosts = await self._get_components_and_hosts(component=component, host=host) to_remove = self._to_entries(components=components, hosts=hosts) self._current -= to_remove @@ -100,14 +105,17 @@ def hosts(self: Self) -> HostsAccessor: return HostsAccessor(path=cluster_path, requester=self._owner.requester) - def _ensure_collections( - self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] + async def _get_components_and_hosts( + self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] | Filter ) -> tuple[Iterable[Component], Iterable[Host]]: if isinstance(component, Component): component = (component,) if isinstance(host, Host): host = (host,) + elif isinstance(host, Filter): + inline_filters = filters_to_inline(host) + host = await self.hosts.filter(**inline_filters) return component, host diff --git a/adcm_aio_client/core/objects/__init__.py b/adcm_aio_client/core/objects/__init__.py index 76d14def..e69de29b 100644 --- a/adcm_aio_client/core/objects/__init__.py +++ b/adcm_aio_client/core/objects/__init__.py @@ -1,3 +0,0 @@ -from adcm_aio_client.core.objects.cm import Bundle, Cluster, Service - -__all__ = ["Bundle", "Cluster", "Service"] diff --git a/adcm_aio_client/core/objects/_accessors.py b/adcm_aio_client/core/objects/_accessors.py index 53c95d3a..dc161c0e 100644 --- a/adcm_aio_client/core/objects/_accessors.py +++ b/adcm_aio_client/core/objects/_accessors.py @@ -15,29 +15,35 @@ from typing import Any, AsyncGenerator, Self from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.filters import Filter, Filtering, FilterValue from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject from adcm_aio_client.core.types import Endpoint, QueryParameters, Requester, RequesterResponse # filter for narrowing response objects -type AccessorFilter = QueryParameters | None +type DefaultQueryParams = QueryParameters | None -class Accessor[ReturnObject: InteractiveObject, Filter](ABC): +def filters_to_inline(*filters: Filter) -> dict: + return {f"{f.attr}__{f.op}": f.value for f in filters} + + +class Accessor[ReturnObject: InteractiveObject](ABC): class_type: type[ReturnObject] + filtering: Filtering - def __init__(self: Self, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None) -> None: + def __init__(self: Self, path: Endpoint, requester: Requester, default_query: DefaultQueryParams = None) -> None: self._path = path self._requester = requester - self._accessor_filter = accessor_filter or {} + self._default_query = default_query or {} @abstractmethod - async def iter(self: Self) -> AsyncGenerator[ReturnObject, None]: ... + async def iter(self: Self, **filters: FilterValue) -> AsyncGenerator[ReturnObject, None]: ... @abstractmethod def _extract_results_from_response(self: Self, response: RequesterResponse) -> list[dict]: ... - async def get(self: Self) -> ReturnObject: - response = await self._request_endpoint(query={"offset": 0, "limit": 2}) + async def get(self: Self, **filters: FilterValue) -> ReturnObject: + response = await self._request_endpoint(query={"offset": 0, "limit": 2}, filters=filters) results = self._extract_results_from_response(response=response) if not results: @@ -48,35 +54,41 @@ async def get(self: Self) -> ReturnObject: return self._create_object(results[0]) - async def get_or_none(self: Self) -> ReturnObject | None: + async def get_or_none(self: Self, **filters: FilterValue) -> ReturnObject | None: with suppress(ObjectDoesNotExistError): - return await self.get() + return await self.get(**filters) return None async def all(self: Self) -> list[ReturnObject]: return await self.filter() - async def filter(self: Self) -> list[ReturnObject]: - return [i async for i in self.iter()] + async def filter(self: Self, **filters: FilterValue) -> list[ReturnObject]: + return [i async for i in self.iter(**filters)] async def list(self: Self, query: dict | None = None) -> list[ReturnObject]: response = await self._request_endpoint(query=query or {}) results = self._extract_results_from_response(response) return [self._create_object(obj) for obj in results] - async def _request_endpoint(self: Self, query: QueryParameters) -> RequesterResponse: - return await self._requester.get(*self._path, query={**query, **self._accessor_filter}) + async def _request_endpoint( + self: Self, query: QueryParameters, filters: dict[str, Any] | None = None + ) -> RequesterResponse: + filters_query = self.filtering.inline_filters_to_query(filters=filters or {}) + + final_query = filters_query | query | self._default_query + + return await self._requester.get(*self._path, query=final_query) def _create_object(self: Self, data: dict[str, Any]) -> ReturnObject: return self.class_type(requester=self._requester, data=data) -class PaginatedAccessor[ReturnObject: InteractiveObject, Filter](Accessor[ReturnObject, Filter]): - async def iter(self: Self) -> AsyncGenerator[ReturnObject, None]: +class PaginatedAccessor[ReturnObject: InteractiveObject](Accessor[ReturnObject]): + async def iter(self: Self, **filters: FilterValue) -> AsyncGenerator[ReturnObject, None]: start, step = 0, 10 while True: - response = await self._request_endpoint(query={"offset": start, "limit": step}) + response = await self._request_endpoint(query={"offset": start, "limit": step}, filters=filters) results = self._extract_results_from_response(response=response) if not results: @@ -91,20 +103,20 @@ def _extract_results_from_response(self: Self, response: RequesterResponse) -> l return response.as_dict()["results"] -class PaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](PaginatedAccessor[Child, Filter]): +class PaginatedChildAccessor[Parent, Child: InteractiveChildObject](PaginatedAccessor[Child]): def __init__( - self: Self, parent: Parent, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None + self: Self, parent: Parent, path: Endpoint, requester: Requester, default_query: DefaultQueryParams = None ) -> None: - super().__init__(path, requester, accessor_filter) + super().__init__(path, requester, default_query) self._parent = parent def _create_object(self: Self, data: dict[str, Any]) -> Child: return self.class_type(parent=self._parent, data=data) -class NonPaginatedAccessor[Child: InteractiveObject, Filter](Accessor[Child, Filter]): - async def iter(self: Self) -> AsyncGenerator[Child, None]: - response = await self._request_endpoint(query={}) +class NonPaginatedAccessor[Child: InteractiveObject](Accessor[Child]): + async def iter(self: Self, **filters: FilterValue) -> AsyncGenerator[Child, None]: + response = await self._request_endpoint(query={}, filters=filters) results = self._extract_results_from_response(response=response) for record in results: yield self._create_object(record) @@ -113,11 +125,11 @@ def _extract_results_from_response(self: Self, response: RequesterResponse) -> l return response.as_list() -class NonPaginatedChildAccessor[Parent, Child: InteractiveChildObject, Filter](NonPaginatedAccessor[Child, Filter]): +class NonPaginatedChildAccessor[Parent, Child: InteractiveChildObject](NonPaginatedAccessor[Child]): def __init__( - self: Self, parent: Parent, path: Endpoint, requester: Requester, accessor_filter: AccessorFilter = None + self: Self, parent: Parent, path: Endpoint, requester: Requester, default_query: DefaultQueryParams = None ) -> None: - super().__init__(path, requester, accessor_filter) + super().__init__(path, requester, default_query) self._parent = parent def _create_object(self: Self, data: dict[str, Any]) -> Child: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 1ed0bc51..db75833a 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,3 +1,4 @@ +from collections import deque from datetime import datetime from functools import cached_property from pathlib import Path @@ -8,11 +9,22 @@ from adcm_aio_client.core.actions._objects import Action from adcm_aio_client.core.errors import NotFoundError +from adcm_aio_client.core.filters import ( + ALL_OPERATIONS, + COMMON_OPERATIONS, + Filter, + FilterBy, + FilterByDisplayName, + FilterByName, + FilterByStatus, + Filtering, +) from adcm_aio_client.core.host_groups import WithActionHostGroups, WithConfigHostGroups from adcm_aio_client.core.mapping import ClusterMapping from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, PaginatedChildAccessor, + filters_to_inline, ) from adcm_aio_client.core.objects._base import ( InteractiveChildObject, @@ -33,8 +45,6 @@ from adcm_aio_client.core.types import Endpoint, JobStatus, Requester, UrlPath, WithProtectedRequester from adcm_aio_client.core.utils import safe_gather -type Filter = object # TODO: implement - class ADCM(InteractiveObject, WithActions, WithConfig): @cached_property @@ -107,16 +117,22 @@ def _main_prototype_id(self: Self) -> int: return self._data["mainPrototype"]["id"] -class BundlesNode(PaginatedAccessor[Bundle, None]): +class BundlesNode(PaginatedAccessor[Bundle]): class_type = Bundle + filtering = Filtering( + FilterByName, + FilterByDisplayName, + FilterBy("version", ALL_OPERATIONS, str), + FilterBy("edition", ALL_OPERATIONS, str), + ) def __init__(self: Self, path: Endpoint, requester: Requester, retriever: BundleRetrieverInterface) -> None: super().__init__(path, requester) - self.retriever = retriever + self._bundle_retriever = retriever async def create(self: Self, source: Path | UrlPath, accept_license: bool = False) -> Bundle: # noqa: FBT001, FBT002 if isinstance(source, UrlPath): - file_content = await self.retriever.download_external_bundle(source) + file_content = await self._bundle_retriever.download_external_bundle(source) files = {"file": file_content} else: files = {"file": Path(source).read_bytes()} @@ -195,8 +211,12 @@ def imports(self: Self) -> ClusterImports: return ClusterImports() -class ClustersNode(PaginatedAccessor[Cluster, None]): +FilterByBundle = FilterBy("bundle", COMMON_OPERATIONS, Bundle) + + +class ClustersNode(PaginatedAccessor[Cluster]): class_type = Cluster + filtering = Filtering(FilterByName, FilterByBundle, FilterByStatus) async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> Cluster: response = await self._requester.post( @@ -238,36 +258,46 @@ def license(self: Self) -> License: return License(self._requester, self._data) -class ServicesNode(PaginatedChildAccessor[Cluster, Service, None]): +class ServicesNode(PaginatedChildAccessor[Cluster, Service]): class_type = Service + filtering = Filtering(FilterByName, FilterByDisplayName, FilterByStatus) + service_add_filtering = Filtering(FilterByName, FilterByDisplayName) + + async def add(self: Self, filter_: Filter, *, accept_license: bool = False) -> list[Service]: + candidates = await self._retrieve_service_candidates(filter_=filter_) + + if not candidates: + message = "No services to add by given filters" + raise NotFoundError(message) - def _get_ids_and_license_state_by_filter( - self: Self, - service_prototypes: dict, - ) -> dict[int, str]: - # todo: implement retrieving of ids when filter is implemented - if not service_prototypes: - raise NotFoundError - return {s["id"]: s["license"]["status"] for s in service_prototypes} - - async def add( - self: Self, - accept_license: bool = False, # noqa: FBT001, FBT002 - ) -> Service: - candidates_prototypes = ( - await self._requester.get(*self._parent.get_own_path(), "service-candidates") - ).as_dict() - services_data = self._get_ids_and_license_state_by_filter(candidates_prototypes) if accept_license: - for prototype_id, license_status in services_data.items(): - if license_status == "unaccepted": - await self._requester.post("prototypes", prototype_id, "license", "accept", data={}) + await self._accept_licenses_safe(candidates) - response = await self._requester.post( - "services", data=[{"prototypeId": prototype_id} for prototype_id in services_data] - ) + return await self._add_services(candidates) + + async def _retrieve_service_candidates(self: Self, filter_: Filter) -> list[dict]: + query = self.service_add_filtering.to_query(filters=(filter_,)) + response = await self._requester.get(*self._parent.get_own_path(), "service-candidates", query=query) + return response.as_list() - return Service(data=response.as_dict(), parent=self._parent) + async def _accept_licenses_safe(self: Self, candidates: list[dict]) -> None: + unaccepted: deque[int] = deque() + + for candidate in candidates: + if candidate["license"]["status"] == "unaccepted": + unaccepted.append(candidate["id"]) + + if unaccepted: + tasks = ( + self._requester.post("prototypes", prototype_id, "license", "accept", data={}) + for prototype_id in unaccepted + ) + await asyncio.gather(*tasks) + + async def _add_services(self: Self, candidates: list[dict]) -> list[Service]: + data = [{"prototypeId": candidate["id"]} for candidate in candidates] + response = await self._requester.post(*self._parent.get_own_path(), "services", data=data) + return [Service(data=entry, parent=self._parent) for entry in response.as_list()] class Component( @@ -305,16 +335,19 @@ def hosts(self: Self) -> "HostsAccessor": return HostsAccessor( path=(*self.cluster.get_own_path(), "hosts"), requester=self._requester, - accessor_filter={"componentId": self.id}, + default_query={"componentId": self.id}, ) -class ComponentsNode(PaginatedChildAccessor[Service, Component, None]): +class ComponentsNode(PaginatedChildAccessor[Service, Component]): class_type = Component + filtering = Filtering(FilterByName, FilterByDisplayName, FilterByStatus) class HostProvider(Deletable, WithActions, WithUpgrades, WithConfig, WithConfigHostGroups, RootInteractiveObject): PATH_PREFIX = "hostproviders" + filtering = Filtering(FilterByName, FilterByBundle) + # data-based properties @property @@ -331,12 +364,10 @@ def display_name(self: Self) -> str: @cached_property def hosts(self: Self) -> "HostsAccessor": - return HostsAccessor( - path=("hosts",), requester=self._requester, accessor_filter={"hostproviderName": self.name} - ) + return HostsAccessor(path=("hosts",), requester=self._requester, default_query={"hostproviderName": self.name}) -class HostProvidersNode(PaginatedAccessor[HostProvider, None]): +class HostProvidersNode(PaginatedAccessor[HostProvider]): class_type = HostProvider async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> HostProvider: @@ -362,6 +393,7 @@ def description(self: Self) -> str: async def cluster(self: Self) -> Cluster | None: if not self._data["cluster"]: return None + return await Cluster.with_id(requester=self._requester, object_id=self._data["cluster"]["id"]) @async_cached_property @@ -369,8 +401,9 @@ async def hostprovider(self: Self) -> HostProvider: return await HostProvider.with_id(requester=self._requester, object_id=self._data["hostprovider"]["id"]) -class HostsAccessor(PaginatedAccessor[Host, None]): +class HostsAccessor(PaginatedAccessor[Host]): class_type = Host + filtering = Filtering(FilterByName, FilterByStatus) class HostsNode(HostsAccessor): @@ -384,13 +417,13 @@ async def create( class HostsInClusterNode(HostsAccessor): - async def add(self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None) -> None: - hosts = await self._get_hosts_from_arg_or_filter(host=host, filters=filters) + async def add(self: Self, host: Host | Iterable[Host] | Filter) -> None: + hosts = await self._get_hosts(host=host) await self._requester.post(*self._path, data=[{"hostId": host.id} for host in hosts]) - async def remove(self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None) -> None: - hosts = await self._get_hosts_from_arg_or_filter(host=host, filters=filters) + async def remove(self: Self, host: Host | Iterable[Host] | Filter) -> None: + hosts = await self._get_hosts(host=host) error = await safe_gather( coros=(self._requester.delete(*self._path, host_.id) for host_ in hosts), @@ -400,16 +433,14 @@ async def remove(self: Self, host: Host | Iterable[Host] | None = None, filters: if error is not None: raise error - async def _get_hosts_from_arg_or_filter( - self: Self, host: Host | Iterable[Host] | None = None, filters: Filter | None = None - ) -> list[Host]: - if all((host, filters)): - raise ValueError("`host` and `filters` arguments are mutually exclusive.") - - if host: - hosts = list(host) if isinstance(host, Iterable) else [host] + async def _get_hosts(self: Self, host: Host | Iterable[Host] | Filter) -> Iterable[Host]: + if isinstance(host, Host): + hosts = [host] + elif isinstance(host, Filter): + inline_filters = filters_to_inline(host) + hosts = await self.filter(**inline_filters) else: - hosts = await self.filter(filters) # type: ignore # TODO + hosts = host return hosts diff --git a/tests/unit/test_accessors.py b/tests/unit/test_accessors.py index c1faa8b0..489ed38c 100644 --- a/tests/unit/test_accessors.py +++ b/tests/unit/test_accessors.py @@ -2,7 +2,8 @@ import pytest -from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.errors import InvalidFilterError, MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.filters import FilterBy, FilterByName, Filtering from adcm_aio_client.core.objects._accessors import ( Accessor, NonPaginatedChildAccessor, @@ -17,6 +18,9 @@ pytestmark = [pytest.mark.asyncio] +no_validation = Filtering() + + class _OwnPath: def get_own_path(self: Self) -> Endpoint: return () @@ -28,16 +32,24 @@ class Dummy(_OwnPath, InteractiveObject): ... class DummyChild(_OwnPath, InteractiveChildObject): ... -class DummyPaginatedAccessor(PaginatedAccessor[Dummy, None]): +class DummyPaginatedAccessor(PaginatedAccessor[Dummy]): class_type = Dummy + filtering = no_validation -class DummyChildPaginatedAccessor(PaginatedChildAccessor[Dummy, DummyChild, None]): +class DummyChildPaginatedAccessor(PaginatedChildAccessor[Dummy, DummyChild]): class_type = DummyChild + filtering = no_validation -class DummyChildNonPaginatedAccessor(NonPaginatedChildAccessor[Dummy, DummyChild, None]): +class DummyChildNonPaginatedAccessor(NonPaginatedChildAccessor[Dummy, DummyChild]): class_type = DummyChild + filtering = no_validation + + +class DummyAccessorWithFilter(PaginatedAccessor[Dummy]): + class_type = Dummy + filtering = Filtering(FilterByName, FilterBy("custom", {"eq"}, Dummy)) def create_paginated_response(amount: int) -> dict: @@ -100,7 +112,6 @@ async def test_non_paginated_child(queue_requester: QueueRequester) -> None: with pytest.raises(ObjectDoesNotExistError): await accessor.get() - requester.flush().queue_responses(create_response(2)) with pytest.raises(MultipleObjectsReturnedError): @@ -267,3 +278,28 @@ async def _test_paginated_accessor_common_methods[T: dict | list]( # now all results are read assert len(requester.queue) == 0 + + +async def test_filter_validation(queue_requester: QueueRequester) -> None: + accessor = DummyAccessorWithFilter(requester=queue_requester, path=()) + + with pytest.raises(InvalidFilterError, match="by notexist is not allowed"): + await accessor.get(notexist__eq="sd") + + with pytest.raises(InvalidFilterError, match="Operation in is not allowed"): + await accessor.get(custom__in=["sd"]) + + with pytest.raises(InvalidFilterError, match="At least one entry is not"): + await accessor.get(name__iin=("sdlfkj", 1)) + + with pytest.raises(InvalidFilterError, match=f"1 is not {str}"): + await accessor.get(name__eq=1) + + with pytest.raises(InvalidFilterError, match="Multiple values expected for exclude"): + await accessor.get(name__exclude="sd") + + with pytest.raises(InvalidFilterError, match="Collection for filter shouldn't be empty"): + await accessor.get(name__exclude=[]) + + with pytest.raises(InvalidFilterError, match="Only one value is expected for icontains"): + await accessor.get(name__icontains={"sldkfj"}) From 3bb3802fd0a6d0e1a0cbb0a5d241dfb6f10c5128 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 11 Dec 2024 18:05:57 +0500 Subject: [PATCH 32/46] ADCM-6187 Added basic test on mapping with real ADCM (#39) --- adcm_aio_client/core/client.py | 6 +- adcm_aio_client/core/mapping/_objects.py | 55 ++++-- adcm_aio_client/core/mapping/refresh.py | 2 + adcm_aio_client/core/objects/cm.py | 12 +- adcm_aio_client/core/requesters.py | 14 +- adcm_aio_client/core/types.py | 2 +- pyproject.toml | 3 + tests/integration/bundle.py | 12 ++ .../bundles/complex_cluster/config.yaml | 133 +++++++++++++++ .../bundles/complex_cluster/license.txt | 11 ++ .../bundles/complex_cluster/schema.yaml | 17 ++ .../bundles/complex_cluster/text.txt | 11 ++ .../bundles/simple_hostprovider/config.yaml | 7 + tests/integration/conftest.py | 28 +-- tests/integration/setup_environment.py | 99 ++++++----- tests/integration/test_config.py | 35 ++++ tests/integration/test_dummy.py | 14 -- tests/integration/test_mapping.py | 159 ++++++++++++++++++ tests/unit/mocks/requesters.py | 4 +- tests/unit/test_requesters.py | 1 + 20 files changed, 523 insertions(+), 102 deletions(-) create mode 100644 tests/integration/bundle.py create mode 100644 tests/integration/bundles/complex_cluster/config.yaml create mode 100644 tests/integration/bundles/complex_cluster/license.txt create mode 100644 tests/integration/bundles/complex_cluster/schema.yaml create mode 100644 tests/integration/bundles/complex_cluster/text.txt create mode 100644 tests/integration/bundles/simple_hostprovider/config.yaml create mode 100644 tests/integration/test_config.py delete mode 100644 tests/integration/test_dummy.py create mode 100644 tests/integration/test_mapping.py diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index bca3a5c6..4264c257 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -15,7 +15,7 @@ from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsNode from adcm_aio_client.core.requesters import BundleRetriever, BundleRetrieverInterface, DefaultRequester, Requester -from adcm_aio_client.core.types import AuthToken, Cert, Credentials, Verify +from adcm_aio_client.core.types import Cert, Credentials, Verify class ADCMClient: @@ -46,7 +46,7 @@ def bundles(self: Self) -> BundlesNode: async def build_client( url: str, - credentials: Credentials | AuthToken, # noqa: ARG001 + credentials: Credentials, *, verify: Verify | None = None, # noqa: ARG001 cert: Cert | None = None, # noqa: ARG001 @@ -55,5 +55,5 @@ async def build_client( retry_interval: float = 5.0, ) -> ADCMClient: requester = DefaultRequester(base_url=url, retries=retries, retry_interval=retry_interval, timeout=timeout) - await requester.login(credentials=Credentials(username="admin", password="admin")) # noqa: S106 + await requester.login(credentials=credentials) return ADCMClient(requester=requester, bundle_retriever=BundleRetriever()) diff --git a/adcm_aio_client/core/mapping/_objects.py b/adcm_aio_client/core/mapping/_objects.py index ed5cd6b2..a72b348a 100644 --- a/adcm_aio_client/core/mapping/_objects.py +++ b/adcm_aio_client/core/mapping/_objects.py @@ -3,7 +3,7 @@ from collections.abc import Generator from copy import copy from functools import cached_property -from typing import TYPE_CHECKING, Any, Iterable, Self +from typing import TYPE_CHECKING, Any, Callable, Coroutine, Iterable, Self import asyncio from adcm_aio_client.core.filters import Filter, FilterByDisplayName, FilterByName, FilterByStatus, Filtering @@ -73,10 +73,12 @@ def all(self: Self) -> list[MappingPair]: def iter(self: Self) -> Generator[MappingPair, None, None]: for entry in self._current: - yield (self._components[entry.component_id], self._hosts[entry.host_id]) + yield self._components[entry.component_id], self._hosts[entry.host_id] async def add(self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] | Filter) -> Self: - components, hosts = await self._get_components_and_hosts(component=component, host=host) + components, hosts = await self._resolve_components_and_hosts(component=component, host=host) + self._cache_components_and_hosts(components, hosts) + to_add = self._to_entries(components=components, hosts=hosts) self._current |= to_add @@ -86,7 +88,9 @@ async def add(self: Self, component: Component | Iterable[Component], host: Host async def remove( self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] | Filter ) -> Self: - components, hosts = await self._get_components_and_hosts(component=component, host=host) + components, hosts = await self._resolve_components_and_hosts(component=component, host=host) + self._cache_components_and_hosts(components, hosts) + to_remove = self._to_entries(components=components, hosts=hosts) self._current -= to_remove @@ -101,13 +105,15 @@ def components(self: Self) -> ComponentsMappingNode: def hosts(self: Self) -> HostsAccessor: from adcm_aio_client.core.objects.cm import HostsAccessor - cluster_path = self._cluster.get_own_path() + cluster_hosts_path = (*self._cluster.get_own_path(), "hosts") - return HostsAccessor(path=cluster_path, requester=self._owner.requester) + return HostsAccessor(path=cluster_hosts_path, requester=self._owner.requester) - async def _get_components_and_hosts( + async def _resolve_components_and_hosts( self: Self, component: Component | Iterable[Component], host: Host | Iterable[Host] | Filter ) -> tuple[Iterable[Component], Iterable[Host]]: + from adcm_aio_client.core.objects.cm import Component, Host + if isinstance(component, Component): component = (component,) @@ -119,6 +125,10 @@ async def _get_components_and_hosts( return component, host + def _cache_components_and_hosts(self: Self, components: Iterable[Component], hosts: Iterable[Host]) -> None: + self._components |= {component.id: component for component in components} + self._hosts |= {host.id: host for host in hosts} + def _to_entries(self: Self, components: Iterable[Component], hosts: Iterable[Host]) -> set[MappingEntry]: return {MappingEntry(host_id=host.id, component_id=component.id) for host in hosts for component in components} @@ -147,7 +157,9 @@ async def save(self: Self) -> Self: async def refresh(self: Self, strategy: MappingRefreshStrategy = apply_local_changes) -> Self: response = await self._requester.get(*self._cluster.get_own_path(), "mapping") - remote = {MappingEntry(**entry) for entry in response.as_list()} + remote = { + MappingEntry(component_id=entry["componentId"], host_id=entry["hostId"]) for entry in response.as_list() + } local = LocalMappings(initial=self._initial, current=self._current) merged_mapping = strategy(local=local, remote=remote) @@ -170,20 +182,27 @@ async def _fill_missing_objects(self: Self) -> None: if entry.component_id not in self._components: missing_components.add(entry.component_id) - hosts_task = None - if missing_hosts: - hosts_task = asyncio.create_task( - self.hosts.list(query={"id__in": missing_hosts, "limit": len(missing_hosts)}) - ) + hosts_task = self._run_task_if_objects_are_missing(method=self.hosts.list, missing_objects=missing_hosts) - components_task = None - if missing_components: - components_task = asyncio.create_task( - self.components.list(query={"id__in": missing_components, "limit": len(missing_components)}) - ) + components_task = self._run_task_if_objects_are_missing( + method=self.components.list, missing_objects=missing_components + ) if hosts_task is not None: self._hosts |= {host.id: host for host in await hosts_task} if components_task is not None: self._components |= {component.id: component for component in await components_task} + + def _run_task_if_objects_are_missing( + self: Self, method: Callable[[dict], Coroutine], missing_objects: set[int] + ) -> asyncio.Task | None: + if not missing_objects: + return None + + ids_str = ",".join(map(str, missing_objects)) + # limit in case there are more than 1 page of objects + records_amount = len(missing_objects) + query = {"id__in": ids_str, "limit": records_amount} + + return asyncio.create_task(method(query)) diff --git a/adcm_aio_client/core/mapping/refresh.py b/adcm_aio_client/core/mapping/refresh.py index 5048871d..3aa2c1f5 100644 --- a/adcm_aio_client/core/mapping/refresh.py +++ b/adcm_aio_client/core/mapping/refresh.py @@ -21,6 +21,8 @@ def apply_remote_changes(local: LocalMappings, remote: MappingData) -> MappingDa remote_added, remote_removed = _find_difference(previous=local.initial, current=remote) + # `to_add` feels impossible, because remote can't remove what we haven't added, + # yet it's general rule for this strategy, so we'll keep it for a time being to_add = local_added - remote_removed to_remove = local_removed - remote_added diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index db75833a..832a3dea 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -132,12 +132,12 @@ def __init__(self: Self, path: Endpoint, requester: Requester, retriever: Bundle async def create(self: Self, source: Path | UrlPath, accept_license: bool = False) -> Bundle: # noqa: FBT001, FBT002 if isinstance(source, UrlPath): - file_content = await self._bundle_retriever.download_external_bundle(source) - files = {"file": file_content} + file = await self._bundle_retriever.download_external_bundle(source) else: - files = {"file": Path(source).read_bytes()} + file = Path(source).read_bytes() - response = await self._requester.post("bundles", data=files) + data = {"file": file} + response = await self._requester.post("bundles", data=data, as_files=True) bundle = Bundle(requester=self._requester, data=response.as_dict()) @@ -408,9 +408,9 @@ class HostsAccessor(PaginatedAccessor[Host]): class HostsNode(HostsAccessor): async def create( - self: Self, provider: HostProvider, name: str, description: str, cluster: Cluster | None = None + self: Self, hostprovider: HostProvider, name: str, description: str = "", cluster: Cluster | None = None ) -> None: - data = {"hostproviderId": provider.id, "name": name, "description": description} + data = {"hostproviderId": hostprovider.id, "name": name, "description": description} if cluster: data["clusterId"] = cluster.id await self._requester.post(*self._path, data=data) diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index d693945e..618bf423 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -92,7 +92,13 @@ def convert_exceptions(func: DoRequestFunc) -> DoRequestFunc: async def wrapper(*arg: Params.args, **kwargs: Params.kwargs) -> httpx.Response: response = await func(*arg, **kwargs) if response.status_code >= 300: - raise STATUS_ERRORS_MAP.get(response.status_code, ResponseError) + error_cls = STATUS_ERRORS_MAP.get(response.status_code, ResponseError) + # not safe, because can be not json + try: + message = response.json() + except JSONDecodeError: + message = f"Request failed with > 300 response code: {response.content.decode('utf-8')}" + raise error_cls(message) return response @@ -150,12 +156,16 @@ async def login(self: Self, credentials: Credentials) -> Self: raise LoginError(message) self._credentials = credentials + self.client.headers["X-CSRFToken"] = response.cookies["csrftoken"] return self async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.get, params=query or {}) - async def post(self: Self, *path: PathPart, data: dict | list) -> HTTPXRequesterResponse: + async def post(self: Self, *path: PathPart, data: dict | list, as_files: bool = False) -> HTTPXRequesterResponse: + if as_files: + return await self.request(*path, method=self.client.post, files=data) + return await self.request(*path, method=self.client.post, json=data) async def patch(self: Self, *path: PathPart, data: dict | list) -> HTTPXRequesterResponse: diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 4c5414c5..efefbc6e 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -52,7 +52,7 @@ async def login(self: Self, credentials: Credentials) -> Self: ... async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> RequesterResponse: ... - async def post(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: ... + async def post(self: Self, *path: PathPart, data: dict | list, as_files: bool = False) -> RequesterResponse: ... async def patch(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: ... diff --git a/pyproject.toml b/pyproject.toml index 09ac94ed..585ee7ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,9 @@ testcontainers = "^4.8.2" requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" +[tool.pytest.ini_options] +asyncio_default_fixture_loop_scope = "function" + [tool.ruff] line-length = 120 diff --git a/tests/integration/bundle.py b/tests/integration/bundle.py new file mode 100644 index 00000000..33697e1b --- /dev/null +++ b/tests/integration/bundle.py @@ -0,0 +1,12 @@ +from pathlib import Path +from tarfile import TarFile + + +def pack_bundle(from_dir: Path, to: Path) -> Path: + archive = (to / from_dir.name).with_suffix(".tgz") + + with TarFile(name=archive, mode="w") as tar: + for entry in from_dir.iterdir(): + tar.add(entry) + + return archive diff --git a/tests/integration/bundles/complex_cluster/config.yaml b/tests/integration/bundles/complex_cluster/config.yaml new file mode 100644 index 00000000..df31eca5 --- /dev/null +++ b/tests/integration/bundles/complex_cluster/config.yaml @@ -0,0 +1,133 @@ +- type: cluster + name: Some Cluster + version: 1 + +- type: service + name: example_1 + display_name: First Example + version: 1.0 + + config: + - name: root_int + display_name: Integer At Root + type: integer + default: 100 + + components: &example_c + first: + display_name: First Component + second: + display_name: Second Component + third_one: + display_name: This Is Different + +- type: service + name: example_2 + version: "4.23.456" + + components: *example_c + +- type: service + name: complex_config + version: 0.3 + config_group_customization: yes + + config: + - &complexity_level + name: complexity_level + display_name: Complexity Level + type: integer + default: 4 + required: no + + - name: very_important_flag + display_name: Set me + type: float + required: true + + - name: cant_find + type: string + default: "cantCme" + ui_options: + invisible: true + + - &country_codes + name: country_codes + type: structure + yspec: ./schema.yaml + default: + - country: Greece + code: 30 + - country: France + code: 33 + - country: Spain + code: 34 + + - &a_lot_of_text + name: a_lot_of_text + display_name: A lot of text + type: group + subs: + - name: cant_find + type: string + default: "cantCme" + ui_options: + invisible: true + - name: simple_string + type: string + required: no + default: "simple string here" + - name: big_text + type: text + required: no + default: "" + - name: license + description: "Edit license as you want :3" + read_only: any + type: file + default: ./text.txt + - name: pass + type: password + required: no + + - &from_doc + name: from_doc + display_name: Examples from documentation + type: group + ui_options: + advanced: true + subs: + - name: memory_size + type: integer + default: 16 + min: 2 + max: 64 + ui_options: + invinsible: true + - name: cluster_host + type: variant + required: false + source: + type: builtin + name: host_in_cluster + - name: secret_mapping + display_name: Map Secrets + type: secretmap + required: no + - name: mount_points + type: list + default: + - /dev/rdisk0s1 + - /dev/rdisk0s2 + - /dev/rdisk0s3 + read_only: any + - name: person + type: map + default: + name: Joe + age: "24" + sex: m + - name: protocol + type: option + option: { http: "80", https: "443" } + default: "80" diff --git a/tests/integration/bundles/complex_cluster/license.txt b/tests/integration/bundles/complex_cluster/license.txt new file mode 100644 index 00000000..51fca54c --- /dev/null +++ b/tests/integration/bundles/complex_cluster/license.txt @@ -0,0 +1,11 @@ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/tests/integration/bundles/complex_cluster/schema.yaml b/tests/integration/bundles/complex_cluster/schema.yaml new file mode 100644 index 00000000..c6cb5736 --- /dev/null +++ b/tests/integration/bundles/complex_cluster/schema.yaml @@ -0,0 +1,17 @@ + +--- +root: + match: list + item: country_code + +country_code: + match: dict + items: + country: string + code: integer + +string: + match: string + +integer: + match: int diff --git a/tests/integration/bundles/complex_cluster/text.txt b/tests/integration/bundles/complex_cluster/text.txt new file mode 100644 index 00000000..51fca54c --- /dev/null +++ b/tests/integration/bundles/complex_cluster/text.txt @@ -0,0 +1,11 @@ +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/tests/integration/bundles/simple_hostprovider/config.yaml b/tests/integration/bundles/simple_hostprovider/config.yaml new file mode 100644 index 00000000..56628762 --- /dev/null +++ b/tests/integration/bundles/simple_hostprovider/config.yaml @@ -0,0 +1,7 @@ +- type: provider + name: simple_provider + version: 4 + +- type: host + name: simple_host + version: 2 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index c9e1445a..aa7a82f6 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,4 +1,7 @@ +from pathlib import Path from typing import AsyncGenerator, Generator +import random +import string from testcontainers.core.network import Network import pytest @@ -7,15 +10,16 @@ from adcm_aio_client.core.client import ADCMClient, build_client from adcm_aio_client.core.types import Credentials from tests.integration.setup_environment import ( + DB_USER, ADCMContainer, ADCMPostgresContainer, + DatabaseInfo, adcm_image_name, - db_name, - db_password, - db_user, postgres_image_name, ) +BUNDLES = Path(__file__).parent / "bundles" + @pytest.fixture(scope="session") def network() -> Generator[Network, None, None]: @@ -23,21 +27,25 @@ def network() -> Generator[Network, None, None]: yield network -@pytest.fixture(scope="function") +@pytest.fixture(scope="session") def postgres(network: Network) -> Generator[ADCMPostgresContainer, None, None]: - with ADCMPostgresContainer(postgres_image_name, network) as container: - container.setup_postgres(db_user, db_password, db_name) + with ADCMPostgresContainer(image=postgres_image_name, network=network) as container: yield container @pytest.fixture(scope="function") -def adcm(postgres: ADCMPostgresContainer) -> Generator[ADCMContainer, None, None]: - with ADCMContainer(adcm_image_name, postgres.network, postgres.adcm_env_kwargs) as container: - container.setup_container() +def adcm(network: Network, postgres: ADCMPostgresContainer) -> Generator[ADCMContainer, None, None]: + suffix = "".join(random.sample(string.ascii_letters, k=6)).lower() + db = DatabaseInfo(name=f"adcm_{suffix}", host=postgres.name) + postgres.execute_statement(f"CREATE DATABASE {db.name} OWNER {DB_USER}") + + with ADCMContainer(image=adcm_image_name, network=network, db=db) as container: yield container + postgres.execute_statement(f"DROP DATABASE {db.name}") + @pytest_asyncio.fixture(scope="function") async def adcm_client(adcm: ADCMContainer) -> AsyncGenerator[ADCMClient, None]: credentials = Credentials(username="admin", password="admin") # noqa: S106 - yield await build_client(url=adcm.url, credentials=credentials, retries=3, retry_interval=15, timeout=30) + yield await build_client(url=adcm.url, credentials=credentials, retries=1, retry_interval=1, timeout=10) diff --git a/tests/integration/setup_environment.py b/tests/integration/setup_environment.py index c5eebdc9..b0a8c63b 100644 --- a/tests/integration/setup_environment.py +++ b/tests/integration/setup_environment.py @@ -1,3 +1,4 @@ +from dataclasses import dataclass from typing import Self import socket @@ -5,17 +6,24 @@ from testcontainers.core.container import DockerContainer from testcontainers.core.network import Network from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for_logs -from testcontainers.postgres import PostgresContainer +from testcontainers.postgres import DbContainer, PostgresContainer postgres_image_name = "postgres:latest" -adcm_image_name = "hub.adsw.io/adcm/adcm:develop" -adcm_port_range = (8000, 8010) -postgres_port_range = (5432, 5442) +adcm_image_name = "hub.adsw.io/adcm/adcm:feature_ADCM-6181" adcm_container_name = "test_adcm" postgres_name = "test_pg_db" -db_user = "adcm" -db_name = "adcm" -db_password = "password" # noqa: S105 + +# for now runtime relies that those values are always used for their purpose +DB_USER = "adcm" +DB_PASSWORD = "password" # noqa: S105 + + +@dataclass(slots=True) +class DatabaseInfo: + name: str + + host: str + port: int = 5432 def find_free_port(start: int, end: int) -> int: @@ -30,61 +38,60 @@ def find_free_port(start: int, end: int) -> int: class ADCMPostgresContainer(PostgresContainer): def __init__(self: Self, image: str, network: Network) -> None: super().__init__(image) - self.adcm_env_kwargs = {"STATISTICS_ENABLED": 0} - self.network = network - - def setup_postgres(self: Self, username: str, password: str, adcm_db_name: str) -> None: - postgres_port = find_free_port(postgres_port_range[0], postgres_port_range[1]) - - self.adcm_env_kwargs = self.adcm_env_kwargs | { - "DB_HOST": f"{postgres_name}_{postgres_port}", - "DB_USER": db_user, - "DB_NAME": db_name, - "DB_PASS": db_password, - "DB_PORT": str(postgres_port), - } - - self.with_name(f"{postgres_name}_{postgres_port}") - self.password = password - self.with_network(self.network) - self.with_bind_ports(postgres_port, postgres_port) - - self.start() - wait_container_is_ready(self) + self.name = postgres_name + self.with_name(self.name) + self.with_network(network) + + def execute_statement(self: Self, statement: str) -> None: + exit_code, out = self.exec(f'psql --username test --dbname test -c "{statement}"') + if exit_code != 0: + output = out.decode("utf-8") + message = f"Failed to execute psql statement: {output}" + raise RuntimeError(message) - self.exec( - f"psql --username test --dbname postgres " - f"-c \"CREATE USER {username} WITH ENCRYPTED PASSWORD '{db_password}';\"" - ) - self.exec(f"psql --username test --dbname postgres " f'-c "CREATE DATABASE {adcm_db_name} OWNER {username};"') + def start(self: Self) -> DbContainer: + super().start() + wait_container_is_ready(self) wait_for_logs(self, "database system is ready to accept connections") + self.execute_statement(f"CREATE USER {DB_USER} WITH ENCRYPTED PASSWORD '{DB_PASSWORD}'") + + return self + class ADCMContainer(DockerContainer): url: str - def __init__(self: Self, image: str, network: Network, env_kwargs: dict) -> None: + def __init__(self: Self, image: str, network: Network, db: DatabaseInfo) -> None: super().__init__(image) - self.postgres_env_kwargs = {} - self.network = network - self.adcm_env_kwarg = env_kwargs + self._db = db - def setup_container(self: Self) -> None: - adcm_port = find_free_port(adcm_port_range[0], adcm_port_range[1]) - self.with_name(f"{adcm_container_name}_{adcm_port}") - self.with_network(self.network) - self.with_bind_ports(adcm_port, adcm_port) + self.with_network(network) - for key, value in self.postgres_env_kwargs.items(): - self.with_env(key, value) + self.with_env("STATISTICS_ENABLED", "0") + self.with_env("DB_USER", DB_USER) + self.with_env("DB_PASS", DB_PASSWORD) + self.with_env("DB_NAME", self._db.name) + self.with_env("DB_HOST", self._db.host) + self.with_env("DB_PORT", str(self._db.port)) - self.start() + def start(self: Self) -> Self: + adcm_port = find_free_port(start=8000, end=8080) + self.with_bind_ports(8000, adcm_port) + + self.with_name(f"{adcm_container_name}_{adcm_port}") + + super().start() wait_container_is_ready(self) wait_for_logs(self, "Run Nginx ...") - self.url = f"http://{self.get_container_host_ip()}:{self.get_exposed_port(adcm_port)}" + ip = self.get_container_host_ip() + port = self.get_exposed_port(8000) + self.url = f"http://{ip}:{port}" + + return self class DockerContainerError(DockerException): diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py new file mode 100644 index 00000000..d9f68752 --- /dev/null +++ b/tests/integration/test_config.py @@ -0,0 +1,35 @@ +from pathlib import Path + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.config import Parameter +from adcm_aio_client.core.filters import Filter +from adcm_aio_client.core.objects.cm import Bundle, Cluster +from tests.integration.bundle import pack_bundle +from tests.integration.conftest import BUNDLES + +pytestmark = [pytest.mark.asyncio] + + +@pytest_asyncio.fixture() +async def cluster_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_cluster", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + +@pytest_asyncio.fixture() +async def cluster(adcm_client: ADCMClient, cluster_bundle: Bundle) -> Cluster: + cluster = await adcm_client.clusters.create(bundle=cluster_bundle, name="Awesome Cluster") + await cluster.services.add(filter_=Filter(attr="name", op="eq", value="complex_config")) + return cluster + + +async def test_config(cluster: Cluster) -> None: + service = await cluster.services.get() + + config = await service.config + + field = config["Complexity Level"] + assert isinstance(field, Parameter) diff --git a/tests/integration/test_dummy.py b/tests/integration/test_dummy.py deleted file mode 100644 index 24ad75af..00000000 --- a/tests/integration/test_dummy.py +++ /dev/null @@ -1,14 +0,0 @@ -import logging - -import pytest - -from adcm_aio_client.core.client import ADCMClient - -logging.basicConfig(level=logging.DEBUG) - - -@pytest.mark.asyncio -async def test_clusters_page(adcm_client: ADCMClient) -> None: - clusters = await adcm_client.clusters.list() - - assert len(clusters) == 0 diff --git a/tests/integration/test_mapping.py b/tests/integration/test_mapping.py new file mode 100644 index 00000000..6fac943d --- /dev/null +++ b/tests/integration/test_mapping.py @@ -0,0 +1,159 @@ +from collections.abc import Iterable +from itertools import chain +from pathlib import Path +import asyncio + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.filters import Filter +from adcm_aio_client.core.mapping.refresh import apply_local_changes, apply_remote_changes +from adcm_aio_client.core.mapping.types import MappingPair +from adcm_aio_client.core.objects.cm import Bundle, Cluster, Host +from tests.integration.bundle import pack_bundle +from tests.integration.conftest import BUNDLES + +pytestmark = [pytest.mark.asyncio] + +type FiveHosts = tuple[Host, Host, Host, Host, Host] + + +def build_name_mapping(*iterables: Iterable[MappingPair]) -> set[tuple[str, str, str]]: + return {(c.service.name, c.name, h.name) for c, h in chain.from_iterable(iterables)} + + +@pytest_asyncio.fixture() +async def cluster_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_cluster", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + +@pytest_asyncio.fixture() +async def hostprovider_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "simple_hostprovider", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + +@pytest_asyncio.fixture() +async def cluster(adcm_client: ADCMClient, cluster_bundle: Bundle) -> Cluster: + cluster = await adcm_client.clusters.create(bundle=cluster_bundle, name="Awesome Cluster") + await cluster.services.add(filter_=Filter(attr="name", op="contains", value="example")) + return cluster + + +@pytest_asyncio.fixture() +async def hosts(adcm_client: ADCMClient, hostprovider_bundle: Bundle) -> FiveHosts: + hp = await adcm_client.hostproviders.create(bundle=hostprovider_bundle, name="Awesome HostProvider") + coros = (adcm_client.hosts.create(hostprovider=hp, name=f"host-{i+1}") for i in range(5)) + await asyncio.gather(*coros) + hosts = await adcm_client.hosts.all() + return tuple(hosts) # type: ignore[reportReturnType] + + +async def test_cluster_mapping(adcm_client: ADCMClient, cluster: Cluster, hosts: FiveHosts) -> None: + mapping = await cluster.mapping + + assert len(mapping.all()) == 0 + assert len(await mapping.hosts.all()) == 0 + assert len(await mapping.components.all()) == 6 + + await cluster.hosts.add(host=hosts) + host_1, host_2, host_3, host_4, host_5 = await mapping.hosts.all() + + service_1 = await cluster.services.get(display_name__eq="First Example") + service_2 = await cluster.services.get(name__eq="example_2") + + component_1_s1 = await service_1.components.get(name__eq="first") + component_2_s2 = await service_2.components.get(display_name__in=["Second Component"]) + + # local mapping editing + + await mapping.add(component=component_1_s1, host=host_1) + assert len(tuple(mapping.iter())) == 1 + + await mapping.add(component=(component_1_s1, component_2_s2), host=(host_1, host_3, host_4)) + assert len(mapping.all()) == 6 + + await mapping.remove(component=component_2_s2, host=(host_2, host_3)) + assert len(mapping.all()) == 5 + + await mapping.remove(component=(component_1_s1, component_2_s2), host=host_1) + assert len(mapping.all()) == 3 + + await mapping.add( + component=await mapping.components.filter(display_name__icontains="different"), + host=Filter(attr="name", op="in", value=(host_2.name, host_5.name)), + ) + assert len(mapping.all()) == 7 + + await mapping.remove( + component=await mapping.components.filter(display_name__icontains="different"), + host=Filter(attr="name", op="in", value=(host_2.name, host_5.name)), + ) + assert len(mapping.all()) == 3 + + mapping.empty() + assert mapping.all() == [] + + # saving + + all_components = await mapping.components.all() + + await mapping.add(component=all_components, host=host_5) + await mapping.add(component=component_1_s1, host=(host_2, host_3)) + await mapping.save() + + expected_mapping = build_name_mapping( + ((c, host_5) for c in all_components), ((component_1_s1, h) for h in (host_2, host_3)) + ) + actual_mapping = build_name_mapping(mapping.iter()) + assert actual_mapping == expected_mapping + + # refreshing + + cluster_alt = await adcm_client.clusters.get(name__eq=cluster.name) + mapping_alt = await cluster_alt.mapping + + assert build_name_mapping(mapping.iter()) == build_name_mapping(mapping_alt.iter()) + + component_3_s2 = await service_2.components.get(name__eq="third_one") + components_except_3_s2 = tuple(c for c in all_components if c.id != component_3_s2.id) + + await mapping_alt.remove(component_1_s1, host_3) + await mapping_alt.add(component_3_s2, (host_2, host_4)) + + await mapping.add((component_1_s1, component_3_s2), host_1) + await mapping.remove(component_3_s2, host_5) + + await mapping_alt.save() + + await mapping.refresh(strategy=apply_remote_changes) + + expected_mapping = build_name_mapping( + ((c, host_5) for c in components_except_3_s2), + ((component_1_s1, h) for h in (host_1, host_2)), + ((component_3_s2, h) for h in (host_1, host_2, host_4)), + ) + actual_mapping = build_name_mapping(mapping.iter()) + assert actual_mapping == expected_mapping + + # drop cached mapping and apply the same local changes + await cluster.refresh() + mapping = await cluster.mapping + + await mapping.add((component_1_s1, component_3_s2), host_1) + await mapping.remove(component_3_s2, host_5) + + await mapping.refresh(strategy=apply_local_changes) + + expected_mapping = ( + # base is remote, but with local changes + build_name_mapping(mapping_alt.iter()) + # remove what's removed locally + - build_name_mapping(((component_3_s2, host_5),)) + # add what's added locally + | build_name_mapping(((component_1_s1, host_1), (component_3_s2, host_1))) + ) + actual_mapping = build_name_mapping(mapping.iter()) + assert actual_mapping == expected_mapping diff --git a/tests/unit/mocks/requesters.py b/tests/unit/mocks/requesters.py index 5adadf68..41fe23e4 100644 --- a/tests/unit/mocks/requesters.py +++ b/tests/unit/mocks/requesters.py @@ -39,8 +39,8 @@ async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) _ = path, query return self._return_next_response() - async def post(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: - _ = path, data + async def post(self: Self, *path: PathPart, data: dict | list, **_kw) -> RequesterResponse: # noqa: ANN003 + _ = path, data, _kw return self._return_next_response() async def patch(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: diff --git a/tests/unit/test_requesters.py b/tests/unit/test_requesters.py index 28798f28..5a2cce7c 100644 --- a/tests/unit/test_requesters.py +++ b/tests/unit/test_requesters.py @@ -15,6 +15,7 @@ class HTTPXLikeResponse: status_code: int = 200 data: str = "{}" + content: bytes = b"" def json(self: Self) -> Any: # noqa: ANN401 return json.loads(self.data) From 54ddc122f6763fa92440c07a25c63a64bca49852 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Thu, 12 Dec 2024 12:06:47 +0300 Subject: [PATCH 33/46] ADCM-6195: Add supported ADCM version in client (#41) --- adcm_aio_client/core/client.py | 40 +++++++++++++++++++++++++++--- adcm_aio_client/core/errors.py | 11 ++++++++ adcm_aio_client/core/objects/cm.py | 14 ++++++----- poetry.lock | 27 +++++++++++++++++++- pyproject.toml | 1 + 5 files changed, 82 insertions(+), 11 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 4264c257..61d91eea 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -12,16 +12,26 @@ from functools import cached_property from typing import Self +from urllib.parse import urljoin +from adcm_version import compare_adcm_versions +import httpx + +from adcm_aio_client.core.errors import NotSupportedVersionError, VersionRetrievalError from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsNode from adcm_aio_client.core.requesters import BundleRetriever, BundleRetrieverInterface, DefaultRequester, Requester from adcm_aio_client.core.types import Cert, Credentials, Verify +MIN_ADCM_VERSION = "2.5.0" + class ADCMClient: - def __init__(self: Self, requester: Requester, bundle_retriever: BundleRetrieverInterface) -> None: + def __init__( + self: Self, requester: Requester, bundle_retriever: BundleRetrieverInterface, adcm_version: str + ) -> None: self._requester = requester self.bundle_retriever = bundle_retriever + self._adcm_version = adcm_version @cached_property def clusters(self: Self) -> ClustersNode: @@ -37,7 +47,7 @@ def hostproviders(self: Self) -> HostProvidersNode: @cached_property def adcm(self: Self) -> ADCM: - return ADCM(requester=self._requester, data={}) + return ADCM(requester=self._requester, data={}, version=self._adcm_version) @cached_property def bundles(self: Self) -> BundlesNode: @@ -50,10 +60,32 @@ async def build_client( *, verify: Verify | None = None, # noqa: ARG001 cert: Cert | None = None, # noqa: ARG001 - timeout: float = 0.5, + timeout: float = 1.5, retries: int = 5, retry_interval: float = 5.0, ) -> ADCMClient: + adcm_version = await _get_and_check_adcm_version(url=url, timeout=timeout) requester = DefaultRequester(base_url=url, retries=retries, retry_interval=retry_interval, timeout=timeout) await requester.login(credentials=credentials) - return ADCMClient(requester=requester, bundle_retriever=BundleRetriever()) + return ADCMClient(requester=requester, bundle_retriever=BundleRetriever(), adcm_version=adcm_version) + + +async def _get_and_check_adcm_version(url: str, timeout: float) -> str: + try: + adcm_version = await _get_adcm_version(url=url, timeout=timeout) + except VersionRetrievalError as e: + message = f"Can't get ADCM version for {url}. Most likely ADCM version is lesser than {MIN_ADCM_VERSION}" + raise NotSupportedVersionError(message) from e + + if compare_adcm_versions(adcm_version, MIN_ADCM_VERSION) < 0: + message = f"Minimal supported ADCM version is {MIN_ADCM_VERSION}. Got {adcm_version}" + raise NotSupportedVersionError(message) + + return adcm_version + + +async def _get_adcm_version(url: str, timeout: float) -> str: + try: + return (await httpx.AsyncClient(timeout=timeout).get(urljoin(url, "versions/"))).json()["adcm"]["version"] + except Exception as e: + raise VersionRetrievalError from e diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index 0a910330..e3cf9eff 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -117,3 +117,14 @@ class FilterPreparationError(FilterError): ... class InvalidFilterError(FilterError): ... + + +# Version + + +class VersionRetrievalError(ADCMClientError): + pass + + +class NotSupportedVersionError(ADCMClientError): + pass diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 832a3dea..68a2c565 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -2,7 +2,7 @@ from datetime import datetime from functools import cached_property from pathlib import Path -from typing import Callable, Iterable, Literal, Self +from typing import Any, Callable, Iterable, Literal, Self import asyncio from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 @@ -47,15 +47,17 @@ class ADCM(InteractiveObject, WithActions, WithConfig): + def __init__(self: Self, requester: Requester, data: dict[str, Any], version: str) -> None: + super().__init__(requester=requester, data=data) + self._version = version + @cached_property def id(self: Self) -> int: return 1 - @async_cached_property - async def version(self: Self) -> str: - # TODO: override root_path for being without /api/v2 - response = await self._requester.get("versions") - return response.as_dict()["adcm"]["version"] + @property + def version(self: Self) -> str: + return self._version def get_own_path(self: Self) -> Endpoint: return ("adcm",) diff --git a/poetry.lock b/poetry.lock index f6b76599..a6368ee6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,19 @@ # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +[[package]] +name = "adcm-version" +version = "1.0.3" +description = "" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "adcm_version-1.0.3-py3-none-any.whl", hash = "sha256:e59bc0e6ed23ee0bc870a5a6b5c1a6ea7c671914e43c2cee6a5fb0d1e3c314ab"}, + {file = "adcm_version-1.0.3.tar.gz", hash = "sha256:2052d7c17ef72f1e32971e939ec972426978025e5c052320062f4ed3a2c90bc5"}, +] + +[package.dependencies] +version-utils = ">=0.3.2,<0.4.0" + [[package]] name = "anyio" version = "4.6.2.post1" @@ -537,6 +551,17 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "version-utils" +version = "0.3.2" +description = "Library for parsing system package strings and comparing package versions" +optional = false +python-versions = "*" +files = [ + {file = "version_utils-0.3.2-py2.py3-none-any.whl", hash = "sha256:4e0f3dff669d7a081dd66d8b616752dc309e7246a7b2c5ac800dde5ec0d9a555"}, + {file = "version_utils-0.3.2.tar.gz", hash = "sha256:308191f111395ac19ec5ef4650764af29962a6415d8391785027ae5328579299"}, +] + [[package]] name = "wrapt" version = "1.17.0" @@ -614,4 +639,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "1ef31ee0a5363bd5fe7d3cdaf63fd956a3d84471cdbaf206b3f792b9fc173bce" +content-hash = "68d447594e6bae71334a44c39ed3738483686abe91bf476dbf4eddc4ad6326f5" diff --git a/pyproject.toml b/pyproject.toml index 585ee7ba..78bb7b64 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ readme = "README.md" python = "^3.12" httpx = "^0.27.2" asyncstdlib = "^3.13.0" +adcm-version = "^1.0.3" [tool.poetry.group.dev] optional = true From d350478d876bc661df3f7713fa27275b9c1aeb11 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 13 Dec 2024 12:14:40 +0500 Subject: [PATCH 34/46] ADCM-6187 Basic test on config with real ADCM (#42) --- adcm_aio_client/core/config/_objects.py | 12 ++ adcm_aio_client/core/config/types.py | 2 +- adcm_aio_client/core/objects/cm.py | 1 + .../bundles/complex_cluster/config.yaml | 14 ++ .../bundles/complex_cluster/sag.yaml | 17 +++ tests/integration/test_config.py | 130 +++++++++++++++++- 6 files changed, 173 insertions(+), 3 deletions(-) create mode 100644 tests/integration/bundles/complex_cluster/sag.yaml diff --git a/adcm_aio_client/core/config/_objects.py b/adcm_aio_client/core/config/_objects.py index d959a465..45400426 100644 --- a/adcm_aio_client/core/config/_objects.py +++ b/adcm_aio_client/core/config/_objects.py @@ -38,6 +38,9 @@ def __init__( self._schema = schema self._data = data + def _on_data_change(self: Self) -> None: + pass + class _Group(_ConfigWrapper): __slots__ = ("_name", "_schema", "_data", "_wrappers_cache") @@ -78,6 +81,12 @@ def _find_and_wrap_config_entry[ValueW: _ConfigWrapper, GroupW: _ConfigWrapper, return wrapper + def _on_data_change(self: Self) -> None: + # need to drop caches when data is changed, + # because each entry may already point to a different data + # and return incorrect nodes for a seach (=> can't be edited too) + self._wrappers_cache = {} + class Parameter[T](_ConfigWrapper): @property @@ -192,6 +201,7 @@ def config(self: Self) -> ConfigData: def change_data(self: Self, new_data: ConfigData) -> ConfigData: self._data = new_data + self._on_data_change() return self._data @@ -257,6 +267,8 @@ async def save(self: Self, description: str = "") -> Self: # config isn't saved, no data update is in play, # returning "pre-saved" parsed values self._parse_json_fields_inplace_safe(config_to_save) + + raise else: new_config = ConfigData.from_v2_response(data_in_v2_format=response.as_dict()) self._initial_config = self._parse_json_fields_inplace_safe(new_config) diff --git a/adcm_aio_client/core/config/types.py b/adcm_aio_client/core/config/types.py index 8dd817fa..cb121bb0 100644 --- a/adcm_aio_client/core/config/types.py +++ b/adcm_aio_client/core/config/types.py @@ -141,7 +141,7 @@ class ConfigDifference: @property def is_empty(self: Self) -> bool: - return bool(self.values or self.attributes) + return not bool(self.values or self.attributes) def __str__(self: Self) -> str: values_nested = self._to_nested_dict(self.values) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 68a2c565..d06033e7 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -371,6 +371,7 @@ def hosts(self: Self) -> "HostsAccessor": class HostProvidersNode(PaginatedAccessor[HostProvider]): class_type = HostProvider + filtering = Filtering(FilterByName, FilterByBundle) async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> HostProvider: response = await self._requester.post( diff --git a/tests/integration/bundles/complex_cluster/config.yaml b/tests/integration/bundles/complex_cluster/config.yaml index df31eca5..cea3391d 100644 --- a/tests/integration/bundles/complex_cluster/config.yaml +++ b/tests/integration/bundles/complex_cluster/config.yaml @@ -89,6 +89,11 @@ - name: pass type: password required: no + - name: sag + display_name: Group-like stucture + type: structure + yspec: ./sag.yaml + required: false - &from_doc name: from_doc @@ -131,3 +136,12 @@ type: option option: { http: "80", https: "443" } default: "80" + - name: agroup + display_name: Optional + type: group + activatable: true + active: false + subs: + - name: justhere + type: integer + required: false diff --git a/tests/integration/bundles/complex_cluster/sag.yaml b/tests/integration/bundles/complex_cluster/sag.yaml new file mode 100644 index 00000000..928ae644 --- /dev/null +++ b/tests/integration/bundles/complex_cluster/sag.yaml @@ -0,0 +1,17 @@ +root: + match: dict + items: + nested: inner_group + quantity: integer + +inner_group: + match: dict + items: + attr: string + op: string + +integer: + match: int + +string: + match: string diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index d9f68752..9e5326a9 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -4,7 +4,8 @@ import pytest_asyncio from adcm_aio_client.core.client import ADCMClient -from adcm_aio_client.core.config import Parameter +from adcm_aio_client.core.config import ActivatableParameterGroup, Parameter, ParameterGroup +from adcm_aio_client.core.config.refresh import apply_local_changes, apply_remote_changes from adcm_aio_client.core.filters import Filter from adcm_aio_client.core.objects.cm import Bundle, Cluster from tests.integration.bundle import pack_bundle @@ -27,9 +28,134 @@ async def cluster(adcm_client: ADCMClient, cluster_bundle: Bundle) -> Cluster: async def test_config(cluster: Cluster) -> None: + # save two configs for later refresh usage service = await cluster.services.get() + config_1 = await service.config + service = await cluster.services.get() + config_2 = await service.config + + # change and save + service = await cluster.services.get() config = await service.config - field = config["Complexity Level"] + required_value = 100 + codes_value = [{"country": "Unknown", "code": 32}] + multiline_value = "A lot of text\nOn multiple lines\n\tAnd it's perfectly fine\n" + secret_map_value = {"pass1": "verysecret", "pass2": "evenmoresecret"} + + field = config["Set me"] + assert isinstance(field, Parameter) + assert field.value is None + field.set(required_value) + assert field.value == required_value + assert field.value == config["very_important_flag", Parameter].value + + field = config["country_codes"] + # structure with "list" root is a parameter + assert isinstance(field, Parameter) + assert isinstance(field.value, list) + assert all(isinstance(e, dict) for e in field.value) + field.set(codes_value) + + group = config["A lot of text"] + assert isinstance(group, ParameterGroup) + + # group_like = group["Group-like structure"] + # # structure with "dict" root is a group + # assert isinstance(group_like, ParameterGroup) + # assert isinstance(group_like["quantity"], Parameter) + # nested_group = group_like["nested"] + # assert isinstance(nested_group, ParameterGroup) + # nested_group["attr", Parameter].set("something") + # nested_group["op", Parameter].set("good") + + field = group["big_text"] + assert isinstance(field, Parameter) + assert field.value is None + field.set(multiline_value) + assert field.value == multiline_value + + field = config["from_doc", ParameterGroup]["Map Secrets"] assert isinstance(field, Parameter) + assert field.value is None + field.set(secret_map_value) + + config["agroup", ActivatableParameterGroup].activate() + + pre_save_id = config.id + + await config.save() + + assert config.id != pre_save_id + assert config_1.id == pre_save_id + assert config_2.id == pre_save_id + + # check values are updated, so values are encrypted coming from server + field = config["from_doc", ParameterGroup]["Map Secrets"] + assert field.value.keys() == secret_map_value.keys() # type: ignore + assert field.value.values() != secret_map_value.values() # type: ignore + + # refresh + + non_conflicting_value_1 = 200 + non_conflicting_value_2 = "megapass" + conflict_value_1 = "very fun\n" + conflict_value_2 = 43.2 + + for config_ in (config_1, config_2): + config_["Complexity Level", Parameter].set(non_conflicting_value_1) + group_ = config_["a_lot_of_text", ParameterGroup] + group_["pass", Parameter].set(non_conflicting_value_2) + group_["big_text", Parameter].set(conflict_value_1) + config_["Set me", Parameter].set(conflict_value_2) + + await config_1.refresh(strategy=apply_local_changes) + + config_ = config_1 + assert config_.id == config.id + assert config_["Complexity Level", Parameter].value == non_conflicting_value_1 + assert config_["Set me", Parameter].value == conflict_value_2 + group_ = config_["a_lot_of_text", ParameterGroup] + assert group_["pass", Parameter].value == non_conflicting_value_2 + assert group_["big_text", Parameter].value == conflict_value_1 + secret_map = config_["from_doc", ParameterGroup]["Map Secrets", Parameter] + assert isinstance(secret_map.value, dict) + assert secret_map.value.keys() == secret_map_value.keys() + assert config_["country_codes", Parameter].value == codes_value + + await config_2.refresh(strategy=apply_remote_changes) + + config_ = config_2 + assert config_.id == config.id + assert config_.id == config.id + assert config_["Complexity Level", Parameter].value == non_conflicting_value_1 + assert config_["Set me", Parameter].value == required_value + group_ = config_["a_lot_of_text", ParameterGroup] + assert group_["pass", Parameter].value == non_conflicting_value_2 + assert group_["big_text", Parameter].value == multiline_value + secret_map = config_["from_doc", ParameterGroup]["Map Secrets", Parameter] + assert isinstance(secret_map.value, dict) + assert secret_map.value.keys() == secret_map_value.keys() + assert config_["country_codes", Parameter].value == codes_value + + # history + + config_1["agroup", ActivatableParameterGroup].deactivate() + + await config_1.save() + + assert config_1.id != config.id + + latest_config = await service.config_history[-1] + earliest_config = await service.config_history[0] + + assert latest_config.id == config_1.id + assert earliest_config.id == pre_save_id + + diff = latest_config.difference(earliest_config) + # group was activated, then deactivated, so returned to initial state + # => no diff + assert len(diff.attributes) == 0 + # field values changed from earliest to latest + assert len(diff.values) == 6 From 19442223725f89a46dfc6d3261e857b9aac97ada Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 13 Dec 2024 10:39:09 +0300 Subject: [PATCH 35/46] ADCM-6151: Make status an arbitraty string for all objects (#40) --- adcm_aio_client/core/objects/_base.py | 8 ++++---- adcm_aio_client/core/objects/_common.py | 9 ++++----- adcm_aio_client/core/objects/cm.py | 4 ++-- adcm_aio_client/core/types.py | 15 --------------- 4 files changed, 10 insertions(+), 26 deletions(-) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 684b3039..169a2315 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -112,11 +112,11 @@ def __init__( self._requester = requester self._path = path - def __repr__(self: Self) -> MaintenanceModeStatus: - return self._maintenance_mode_status + def __repr__(self: Self) -> str: + return self._maintenance_mode_status.value - def __str__(self: Self) -> MaintenanceModeStatus: - return self._maintenance_mode_status + def __str__(self: Self) -> str: + return self._maintenance_mode_status.value @property def value(self: Self) -> str: diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index 9d95ea00..b57c77d7 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -7,7 +7,6 @@ from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner from adcm_aio_client.core.objects._base import AwareOfOwnPath, MaintenanceMode, WithProtectedRequester -from adcm_aio_client.core.types import ADCMEntityStatus, JobStatus class Deletable(WithProtectedRequester, AwareOfOwnPath): @@ -16,9 +15,9 @@ async def delete(self: Self) -> None: class WithStatus(WithProtectedRequester, AwareOfOwnPath): - async def get_status(self: Self) -> ADCMEntityStatus: + async def get_status(self: Self) -> str: response = await self._requester.get(*self.get_own_path()) - return ADCMEntityStatus(response.as_dict()["status"]) + return response.as_dict()["status"] class WithActions(WithProtectedRequester, AwareOfOwnPath): @@ -53,6 +52,6 @@ async def maintenance_mode(self: Self) -> MaintenanceMode: class WithJobStatus(WithProtectedRequester, AwareOfOwnPath): - async def get_job_status(self: Self) -> JobStatus: + async def get_job_status(self: Self) -> str: response = await self._requester.get(*self.get_own_path()) - return JobStatus(response.as_dict()["status"]) + return response.as_dict()["status"] diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index d06033e7..943e80fe 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -42,7 +42,7 @@ ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import Endpoint, JobStatus, Requester, UrlPath, WithProtectedRequester +from adcm_aio_client.core.types import Endpoint, Requester, UrlPath, WithProtectedRequester from adcm_aio_client.core.utils import safe_gather @@ -484,7 +484,7 @@ def action(self: Self) -> Action: return self._construct(what=Action, from_data=self._data["action"]) async def wait(self: Self, status_predicate: Callable[[], bool], timeout: int = 30, poll: int = 5) -> None: - if self._data["status"] not in (JobStatus.RUNNING, JobStatus.CREATED): + if self._data["status"] not in ("running", "created"): return for _ in range(timeout // poll): diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index efefbc6e..c853a976 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -83,26 +83,11 @@ class AwareOfOwnPath(Protocol): def get_own_path(self: Self) -> Endpoint: ... -class ADCMEntityStatus(str, Enum): - UP = "up" - DOWN = "down" - - class MappingOperation(str, Enum): ADD = "add" REMOVE = "remove" -class JobStatus(str, Enum): - CREATED = "created" - SUCCESS = "success" - FAILED = "failed" - RUNNING = "running" - LOCKED = "locked" - ABORTED = "aborted" - BROKEN = "broken" - - class UrlPath(str): pass From b9db24124f00b24a077a5051b4f47e786d983386 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Fri, 13 Dec 2024 17:45:53 +0500 Subject: [PATCH 36/46] ADCM-6219 Change defaults for builder; fix hosts addition to cluster; load bundle from URL as plain str (#44) --- adcm_aio_client/core/client.py | 6 +++--- adcm_aio_client/core/objects/cm.py | 34 ++++++++++++++++++------------ adcm_aio_client/core/requesters.py | 6 +++--- adcm_aio_client/core/types.py | 3 +-- 4 files changed, 28 insertions(+), 21 deletions(-) diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 61d91eea..e83689a3 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -60,9 +60,9 @@ async def build_client( *, verify: Verify | None = None, # noqa: ARG001 cert: Cert | None = None, # noqa: ARG001 - timeout: float = 1.5, - retries: int = 5, - retry_interval: float = 5.0, + timeout: float = 600.0, + retries: int = 3, + retry_interval: float = 1.0, ) -> ADCMClient: adcm_version = await _get_and_check_adcm_version(url=url, timeout=timeout) requester = DefaultRequester(base_url=url, retries=retries, retry_interval=retry_interval, timeout=timeout) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 943e80fe..201c5fee 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -2,7 +2,7 @@ from datetime import datetime from functools import cached_property from pathlib import Path -from typing import Any, Callable, Iterable, Literal, Self +from typing import Any, Awaitable, Callable, Iterable, Literal, Self import asyncio from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 @@ -42,7 +42,7 @@ ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import Endpoint, Requester, UrlPath, WithProtectedRequester +from adcm_aio_client.core.types import Endpoint, Requester, URLStr, WithProtectedRequester from adcm_aio_client.core.utils import safe_gather @@ -132,11 +132,11 @@ def __init__(self: Self, path: Endpoint, requester: Requester, retriever: Bundle super().__init__(path, requester) self._bundle_retriever = retriever - async def create(self: Self, source: Path | UrlPath, accept_license: bool = False) -> Bundle: # noqa: FBT001, FBT002 - if isinstance(source, UrlPath): - file = await self._bundle_retriever.download_external_bundle(source) - else: + async def create(self: Self, source: Path | URLStr, *, accept_license: bool = False) -> Bundle: + if isinstance(source, Path): file = Path(source).read_bytes() + else: + file = await self._bundle_retriever.download_external_bundle(source) data = {"file": file} response = await self._requester.post("bundles", data=data, as_files=True) @@ -206,7 +206,7 @@ def services(self: Self) -> "ServicesNode": @cached_property def hosts(self: Self) -> "HostsInClusterNode": - return HostsInClusterNode(path=(*self.get_own_path(), "hosts"), requester=self._requester) + return HostsInClusterNode(cluster=self) @cached_property def imports(self: Self) -> ClusterImports: @@ -420,13 +420,19 @@ async def create( class HostsInClusterNode(HostsAccessor): + def __init__(self: Self, cluster: Cluster) -> None: + path = (*cluster.get_own_path(), "hosts") + super().__init__(path=path, requester=cluster.requester) + + self._root_host_filter = HostsAccessor(path=("hosts",), requester=cluster.requester).filter + async def add(self: Self, host: Host | Iterable[Host] | Filter) -> None: - hosts = await self._get_hosts(host=host) + hosts = await self._get_hosts(host=host, filter_func=self._root_host_filter) await self._requester.post(*self._path, data=[{"hostId": host.id} for host in hosts]) async def remove(self: Self, host: Host | Iterable[Host] | Filter) -> None: - hosts = await self._get_hosts(host=host) + hosts = await self._get_hosts(host=host, filter_func=self.filter) error = await safe_gather( coros=(self._requester.delete(*self._path, host_.id) for host_ in hosts), @@ -436,16 +442,18 @@ async def remove(self: Self, host: Host | Iterable[Host] | Filter) -> None: if error is not None: raise error - async def _get_hosts(self: Self, host: Host | Iterable[Host] | Filter) -> Iterable[Host]: + async def _get_hosts( + self: Self, host: Host | Iterable[Host] | Filter, filter_func: Callable[..., Awaitable[list[Host]]] + ) -> tuple[Host, ...]: if isinstance(host, Host): - hosts = [host] + hosts = (host,) elif isinstance(host, Filter): inline_filters = filters_to_inline(host) - hosts = await self.filter(**inline_filters) + hosts = await filter_func(**inline_filters) else: hosts = host - return hosts + return tuple(hosts) class Job[Object: "InteractiveObject"](WithStatus, WithActions, WithJobStatus, RootInteractiveObject): diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 618bf423..d5c2a5a7 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -35,7 +35,7 @@ UnauthorizedError, WrongCredentialsError, ) -from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, UrlPath +from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, URLStr Json: TypeAlias = Any Params = ParamSpec("Params") @@ -198,12 +198,12 @@ def _ensure_credentials(self: Self) -> Credentials: class BundleRetrieverInterface(ABC): @abstractmethod - async def download_external_bundle(self: Self, url: UrlPath) -> bytes: + async def download_external_bundle(self: Self, url: URLStr) -> bytes: pass class BundleRetriever(BundleRetrieverInterface): - async def download_external_bundle(self: Self, url: UrlPath) -> bytes: + async def download_external_bundle(self: Self, url: URLStr) -> bytes: try: async with httpx.AsyncClient() as client: response = await client.get(url) diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index c853a976..25a754aa 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -88,8 +88,7 @@ class MappingOperation(str, Enum): REMOVE = "remove" -class UrlPath(str): - pass +type URLStr = str class MaintenanceModeStatus(str, Enum): From 307fa2e96d8770e5411ec7678e2c78ba7e43060a Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Fri, 13 Dec 2024 16:33:15 +0300 Subject: [PATCH 37/46] ADCM-6200: Implement wait() method for Job (#43) --- adcm_aio_client/core/objects/_base.py | 7 +++-- adcm_aio_client/core/objects/cm.py | 39 ++++++++++++++++++--------- adcm_aio_client/core/types.py | 3 +++ 3 files changed, 34 insertions(+), 15 deletions(-) diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 169a2315..f1acfb95 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -44,12 +44,15 @@ def id(self: Self) -> int: return int(self._data["id"]) async def refresh(self: Self) -> Self: - response = await self._requester.get(*self.get_own_path()) - self._data = response.as_dict() + self._data = await self._retrieve_data() self._clear_cache() return self + async def _retrieve_data(self: Self) -> dict: + response = await self._requester.get(*self.get_own_path()) + return response.as_dict() + def _construct[Object: "InteractiveObject"](self: Self, what: type[Object], from_data: dict[str, Any]) -> Object: return what(requester=self._requester, data=from_data) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 201c5fee..df66690f 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -1,5 +1,5 @@ from collections import deque -from datetime import datetime +from datetime import datetime, timedelta from functools import cached_property from pathlib import Path from typing import Any, Awaitable, Callable, Iterable, Literal, Self @@ -35,14 +35,19 @@ Deletable, WithActions, WithConfig, - WithJobStatus, WithMaintenanceMode, WithStatus, WithUpgrades, ) from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface -from adcm_aio_client.core.types import Endpoint, Requester, URLStr, WithProtectedRequester +from adcm_aio_client.core.types import ( + DEFAULT_JOB_TERMINAL_STATUSES, + Endpoint, + Requester, + URLStr, + WithProtectedRequester, +) from adcm_aio_client.core.utils import safe_gather @@ -456,7 +461,11 @@ async def _get_hosts( return tuple(hosts) -class Job[Object: "InteractiveObject"](WithStatus, WithActions, WithJobStatus, RootInteractiveObject): +def default_exit_condition(job: "Job") -> bool: + return job.get_status() in DEFAULT_JOB_TERMINAL_STATUSES + + +class Job[Object: "InteractiveObject"](WithStatus, WithActions, RootInteractiveObject): PATH_PREFIX = "tasks" @property @@ -491,15 +500,19 @@ def object(self: Self) -> Object: def action(self: Self) -> Action: return self._construct(what=Action, from_data=self._data["action"]) - async def wait(self: Self, status_predicate: Callable[[], bool], timeout: int = 30, poll: int = 5) -> None: - if self._data["status"] not in ("running", "created"): - return - - for _ in range(timeout // poll): - await asyncio.sleep(poll) - if status_predicate(): - self._data["status"] = self.get_status() - return + async def wait( + self: Self, + timeout: int | None = None, + poll_interval: int = 10, + exit_condition: Callable[[Self], bool] = default_exit_condition, + ) -> Self: + timeout_condition = datetime.max if timeout is None else (datetime.now() + timedelta(seconds=timeout)) # noqa: DTZ005 + while datetime.now() < timeout_condition: # noqa: DTZ005 + if exit_condition(self): + return self + await asyncio.sleep(poll_interval) + + raise TimeoutError async def terminate(self: Self) -> None: await self._requester.post(*self.get_own_path(), "terminate", data={}) diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index 25a754aa..e9149b22 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -91,6 +91,9 @@ class MappingOperation(str, Enum): type URLStr = str +DEFAULT_JOB_TERMINAL_STATUSES = frozenset(("broken", "aborted", "failed", "success")) + + class MaintenanceModeStatus(str, Enum): ON = "on" OFF = "off" From 481906af99d1aca8e169aaf7f43f20d44cec7775 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Mon, 16 Dec 2024 17:31:14 +0500 Subject: [PATCH 38/46] ADCM-6218 Implement `ADCMSession` (#46) --- adcm_aio_client/__init__.py | 4 + adcm_aio_client/_session.py | 143 +++++++++++++++++++++++++++++ adcm_aio_client/core/client.py | 51 +--------- adcm_aio_client/core/errors.py | 33 ++++--- adcm_aio_client/core/objects/cm.py | 3 +- adcm_aio_client/core/requesters.py | 83 ++++++++++------- adcm_aio_client/core/types.py | 36 +++++++- tests/integration/conftest.py | 7 +- tests/unit/mocks/requesters.py | 8 +- tests/unit/test_requesters.py | 13 ++- 10 files changed, 278 insertions(+), 103 deletions(-) create mode 100644 adcm_aio_client/_session.py diff --git a/adcm_aio_client/__init__.py b/adcm_aio_client/__init__.py index 4d9a9249..ab4d2777 100644 --- a/adcm_aio_client/__init__.py +++ b/adcm_aio_client/__init__.py @@ -9,3 +9,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +from adcm_aio_client._session import ADCMSession + +__all__ = ["ADCMSession"] diff --git a/adcm_aio_client/_session.py b/adcm_aio_client/_session.py new file mode 100644 index 00000000..824e3d66 --- /dev/null +++ b/adcm_aio_client/_session.py @@ -0,0 +1,143 @@ +from json import JSONDecodeError +from types import TracebackType +from typing import Self + +import httpx +import adcm_version + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.errors import ClientInitError, NotSupportedVersionError +from adcm_aio_client.core.requesters import BundleRetriever, DefaultRequester +from adcm_aio_client.core.types import Cert, ConnectionSecurity, Credentials, RequestPolicy, RetryPolicy, SessionInfo + +MIN_ADCM_VERSION = "2.5.0" + + +class ADCMSession: + def __init__( + self: Self, + # basics + url: str, + credentials: Credentials, + *, + # security + verify: str | bool = True, + cert: Cert | None = None, + # requesting behavior + timeout: int = 600, + retry_attempts: int = 3, + retry_interval: int = 1, + ) -> None: + self._session_info = SessionInfo( + url=url, credentials=credentials, security=ConnectionSecurity(verify=verify, certificate=cert) + ) + self._request_policy = RequestPolicy( + timeout=timeout, retry=RetryPolicy(attempts=retry_attempts, interval=retry_interval) + ) + + self._http_client = None + self._requester = None + self._adcm_client = None + + # Context Manager + + async def __aenter__(self: Self) -> ADCMClient: + self._http_client = await self._prepare_http_client_for_running_adcm() + adcm_version_ = await _ensure_adcm_version_is_supported(client=self._http_client) + + try: + self._requester = self._prepare_api_v2_requester() + await self._requester.login(self._session_info.credentials) + except Exception as e: + await self.__aexit__(exc_type=type(e), exc_value=e) + raise + + self._adcm_client = self._prepare_adcm_client(version=adcm_version_) + return self._adcm_client + + async def __aexit__( + self: Self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.__close_requester_safe(exc_type, exc_value, traceback) + await self.__close_http_client_safe(exc_type, exc_value, traceback) + + async def __close_requester_safe( + self: Self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + if self._requester: + try: + await self._requester.logout() + except: + await self.__close_http_client_safe(exc_type, exc_value, traceback) + + raise + + async def __close_http_client_safe( + self: Self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + if self._http_client: + await self._http_client.__aexit__(exc_type, exc_value, traceback) + + # Steps + + async def _prepare_http_client_for_running_adcm(self: Self) -> httpx.AsyncClient: + client = httpx.AsyncClient( + base_url=self._session_info.url, + timeout=self._request_policy.timeout, + verify=self._session_info.security.verify, + cert=self._session_info.security.certificate, + ) + + try: + await client.head("/") + except httpx.NetworkError as e: + await client.__aexit__(type(e), e) + message = f"Failed to connect to ADCM at URL {self._session_info.url}" + raise ClientInitError(message) from e + + return client + + def _prepare_api_v2_requester(self: Self) -> DefaultRequester: + if self._http_client is None: + message = "Failed to prepare requester: HTTP client is not initialized" + raise RuntimeError(message) + + return DefaultRequester(http_client=self._http_client, retries=self._request_policy.retry) + + def _prepare_adcm_client(self: Self, version: str) -> ADCMClient: + if self._requester is None: + message = "Failed to prepare ADCM client: requester is not initialized" + raise RuntimeError(message) + + bundle_retriever = BundleRetriever() + + return ADCMClient(requester=self._requester, bundle_retriever=bundle_retriever, adcm_version=version) + + +async def _ensure_adcm_version_is_supported(client: httpx.AsyncClient) -> str: + try: + # todo check for VERY old versions if that request will raise error + response = await client.get("/versions/") + data = response.json() + version = str(data["adcm"]["version"]) + except (JSONDecodeError, KeyError) as e: + message = ( + f"Failed to detect ADCM version at {client.base_url}. " + f"Most likely ADCM version is lesser than {MIN_ADCM_VERSION}" + ) + raise NotSupportedVersionError(message) from e + + if adcm_version.compare_adcm_versions(version, MIN_ADCM_VERSION) < 0: + message = f"Minimal supported ADCM version is {MIN_ADCM_VERSION}. Got {adcm_version}" + raise NotSupportedVersionError(message) + + return version diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index e83689a3..5fbdf20f 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -12,15 +12,9 @@ from functools import cached_property from typing import Self -from urllib.parse import urljoin -from adcm_version import compare_adcm_versions -import httpx - -from adcm_aio_client.core.errors import NotSupportedVersionError, VersionRetrievalError from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsNode -from adcm_aio_client.core.requesters import BundleRetriever, BundleRetrieverInterface, DefaultRequester, Requester -from adcm_aio_client.core.types import Cert, Credentials, Verify +from adcm_aio_client.core.requesters import BundleRetrieverInterface, Requester MIN_ADCM_VERSION = "2.5.0" @@ -30,7 +24,7 @@ def __init__( self: Self, requester: Requester, bundle_retriever: BundleRetrieverInterface, adcm_version: str ) -> None: self._requester = requester - self.bundle_retriever = bundle_retriever + self._retrieve_bundle_from_remote_url = bundle_retriever self._adcm_version = adcm_version @cached_property @@ -51,41 +45,6 @@ def adcm(self: Self) -> ADCM: @cached_property def bundles(self: Self) -> BundlesNode: - return BundlesNode(path=("bundles",), requester=self._requester, retriever=self.bundle_retriever) - - -async def build_client( - url: str, - credentials: Credentials, - *, - verify: Verify | None = None, # noqa: ARG001 - cert: Cert | None = None, # noqa: ARG001 - timeout: float = 600.0, - retries: int = 3, - retry_interval: float = 1.0, -) -> ADCMClient: - adcm_version = await _get_and_check_adcm_version(url=url, timeout=timeout) - requester = DefaultRequester(base_url=url, retries=retries, retry_interval=retry_interval, timeout=timeout) - await requester.login(credentials=credentials) - return ADCMClient(requester=requester, bundle_retriever=BundleRetriever(), adcm_version=adcm_version) - - -async def _get_and_check_adcm_version(url: str, timeout: float) -> str: - try: - adcm_version = await _get_adcm_version(url=url, timeout=timeout) - except VersionRetrievalError as e: - message = f"Can't get ADCM version for {url}. Most likely ADCM version is lesser than {MIN_ADCM_VERSION}" - raise NotSupportedVersionError(message) from e - - if compare_adcm_versions(adcm_version, MIN_ADCM_VERSION) < 0: - message = f"Minimal supported ADCM version is {MIN_ADCM_VERSION}. Got {adcm_version}" - raise NotSupportedVersionError(message) - - return adcm_version - - -async def _get_adcm_version(url: str, timeout: float) -> str: - try: - return (await httpx.AsyncClient(timeout=timeout).get(urljoin(url, "versions/"))).json()["adcm"]["version"] - except Exception as e: - raise VersionRetrievalError from e + return BundlesNode( + path=("bundles",), requester=self._requester, retriever=self._retrieve_bundle_from_remote_url + ) diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index e3cf9eff..9d519571 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -15,6 +15,24 @@ class ADCMClientError(Exception): pass +# Session + + +class ClientInitError(ADCMClientError): + pass + + +# Version + + +class VersionRetrievalError(ADCMClientError): + pass + + +class NotSupportedVersionError(ADCMClientError): + pass + + # Requester @@ -34,6 +52,10 @@ class LoginError(RequesterError): pass +class LogoutError(RequesterError): + pass + + class RetryRequestError(RequesterError): pass @@ -117,14 +139,3 @@ class FilterPreparationError(FilterError): ... class InvalidFilterError(FilterError): ... - - -# Version - - -class VersionRetrievalError(ADCMClientError): - pass - - -class NotSupportedVersionError(ADCMClientError): - pass diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index df66690f..d5f95658 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -143,8 +143,7 @@ async def create(self: Self, source: Path | URLStr, *, accept_license: bool = Fa else: file = await self._bundle_retriever.download_external_bundle(source) - data = {"file": file} - response = await self._requester.post("bundles", data=data, as_files=True) + response = await self._requester.post_files("bundles", files={"file": file}) bundle = Bundle(requester=self._requester, data=response.as_dict()) diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index d5c2a5a7..8b405371 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -25,6 +25,7 @@ ConflictError, ForbiddenError, LoginError, + LogoutError, NoCredentialsError, NotFoundError, OperationError, @@ -35,7 +36,7 @@ UnauthorizedError, WrongCredentialsError, ) -from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, URLStr +from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, RetryPolicy, URLStr Json: TypeAlias = Any Params = ParamSpec("Params") @@ -108,64 +109,83 @@ async def wrapper(*arg: Params.args, **kwargs: Params.kwargs) -> httpx.Response: def retry_request(request_func: RequestFunc) -> RequestFunc: @wraps(request_func) async def wrapper(self: "DefaultRequester", *args: Params.args, **kwargs: Params.kwargs) -> HTTPXRequesterResponse: - for attempt in range(self.retries): + retries = self._retries + for attempt in range(retries.attempts): try: response = await request_func(self, *args, **kwargs) except (UnauthorizedError, httpx.NetworkError, httpx.TransportError): - if attempt >= self.retries - 1: + if attempt >= retries.attempts - 1: continue - await sleep(self.retry_interval) + + await sleep(retries.interval) + with suppress(httpx.NetworkError, httpx.TransportError): await self.login(self._ensure_credentials()) else: break else: - message = f"Request failed in {self.retries} attempts" + message = f"Request failed in {retries.interval} attempts" raise RetryRequestError(message) + return response return wrapper class DefaultRequester(Requester): - __slots__ = ("_credentials", "api_root", "client", "retries", "retry_interval") - - def __init__( - self: Self, - base_url: str, - root_path: str = "/api/v2/", - timeout: float = 5.0, - retries: int = 5, - retry_interval: float = 5.0, - ) -> None: - self.retries = retries - self.retry_interval = retry_interval - self.api_root = self._make_url(root_path, base=base_url) - self.client = httpx.AsyncClient(timeout=timeout) + __slots__ = ("_credentials", "_client", "_retries", "_prefix") + + def __init__(self: Self, http_client: httpx.AsyncClient, retries: RetryPolicy) -> None: + self._retries = retries + self._client = http_client + self._prefix = "/api/v2/" + self._credentials = None + + @property + def client(self: Self) -> httpx.AsyncClient: + return self._client async def login(self: Self, credentials: Credentials) -> Self: - login_url = self._make_url("login", base=self.api_root) + login_url = self._make_url("login") try: response = await self._do_request(self.client.post(url=login_url, data=credentials.dict())) except UnauthorizedError as e: - raise WrongCredentialsError from e - - if response.status_code != 200: - message = f"Authentication error: {response.status_code} for url: {login_url}" - raise LoginError(message) + message = ( + f"Login to ADCM at {self.client.base_url} has failed for " + f"user {credentials.username} most likely due to incorrect credentials" + ) + raise WrongCredentialsError(message) from e + except ResponseError as e: + message = f"Login to ADCM at {self.client.base_url} has failed for user {credentials.username}: {e}" + raise LoginError(message) from e self._credentials = credentials self.client.headers["X-CSRFToken"] = response.cookies["csrftoken"] + + return self + + async def logout(self: Self) -> Self: + logout_url = self._make_url("logout") + + try: + request_coro = self.client.post(url=logout_url, data={}) + await self._do_request(request_coro) + except ResponseError as e: + message = f"Logout from ADCM at {self.client.base_url} has failed" + raise LogoutError(message) from e + + self.client.headers.pop("X-CSRFToken", None) + return self async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.get, params=query or {}) - async def post(self: Self, *path: PathPart, data: dict | list, as_files: bool = False) -> HTTPXRequesterResponse: - if as_files: - return await self.request(*path, method=self.client.post, files=data) + async def post_files(self: Self, *path: PathPart, files: dict) -> HTTPXRequesterResponse: + return await self.request(*path, method=self.client.post, files=files) + async def post(self: Self, *path: PathPart, data: dict | list) -> HTTPXRequesterResponse: return await self.request(*path, method=self.client.post, json=data) async def patch(self: Self, *path: PathPart, data: dict | list) -> HTTPXRequesterResponse: @@ -176,14 +196,13 @@ async def delete(self: Self, *path: PathPart) -> HTTPXRequesterResponse: @retry_request async def request(self: Self, *path: PathPart, method: Callable, **kwargs: dict) -> HTTPXRequesterResponse: - url = self._make_url(*path, base=self.api_root) + url = self._make_url(*path) response = await self._do_request(method(url, **kwargs)) return HTTPXRequesterResponse(response=response) - @staticmethod - def _make_url(*path: PathPart, base: str) -> str: - return urljoin(base, "/".join(map(str, (*path, "")))) + def _make_url(self: Self, *path: PathPart) -> str: + return urljoin(self._prefix, "/".join(map(str, (*path, "")))) @convert_exceptions async def _do_request(self: Self, request_coro: Coroutine[Any, Any, httpx.Response]) -> httpx.Response: diff --git a/adcm_aio_client/core/types.py b/adcm_aio_client/core/types.py index e9149b22..ea7100d4 100644 --- a/adcm_aio_client/core/types.py +++ b/adcm_aio_client/core/types.py @@ -17,7 +17,7 @@ # Init / Authorization type AuthToken = str -type Cert = str | tuple[str, Optional[str], Optional[str]] | None +type Cert = str | tuple[str, Optional[str], Optional[str]] type Verify = str | bool @@ -48,17 +48,45 @@ def as_dict(self: Self) -> dict: ... class Requester(Protocol): - async def login(self: Self, credentials: Credentials) -> Self: ... - async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) -> RequesterResponse: ... - async def post(self: Self, *path: PathPart, data: dict | list, as_files: bool = False) -> RequesterResponse: ... + async def post_files(self: Self, *path: PathPart, files: dict) -> RequesterResponse: ... + + async def post(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: ... async def patch(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: ... async def delete(self: Self, *path: PathPart) -> RequesterResponse: ... +# Session + + +@dataclass(slots=True) +class ConnectionSecurity: + verify: str | bool + certificate: Cert | None + + +@dataclass(slots=True) +class SessionInfo: + url: str + credentials: Credentials + security: ConnectionSecurity + + +@dataclass(slots=True) +class RetryPolicy: + attempts: int + interval: int + + +@dataclass(slots=True) +class RequestPolicy: + timeout: int + retry: RetryPolicy + + # Objects type ComponentID = int diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index aa7a82f6..cfbcbcc3 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -7,7 +7,8 @@ import pytest import pytest_asyncio -from adcm_aio_client.core.client import ADCMClient, build_client +from adcm_aio_client._session import ADCMSession +from adcm_aio_client.core.client import ADCMClient from adcm_aio_client.core.types import Credentials from tests.integration.setup_environment import ( DB_USER, @@ -48,4 +49,6 @@ def adcm(network: Network, postgres: ADCMPostgresContainer) -> Generator[ADCMCon @pytest_asyncio.fixture(scope="function") async def adcm_client(adcm: ADCMContainer) -> AsyncGenerator[ADCMClient, None]: credentials = Credentials(username="admin", password="admin") # noqa: S106 - yield await build_client(url=adcm.url, credentials=credentials, retries=1, retry_interval=1, timeout=10) + url = adcm.url + async with ADCMSession(url=url, credentials=credentials, timeout=10, retry_interval=1, retry_attempts=1) as client: + yield client diff --git a/tests/unit/mocks/requesters.py b/tests/unit/mocks/requesters.py index 41fe23e4..adba9357 100644 --- a/tests/unit/mocks/requesters.py +++ b/tests/unit/mocks/requesters.py @@ -39,8 +39,12 @@ async def get(self: Self, *path: PathPart, query: QueryParameters | None = None) _ = path, query return self._return_next_response() - async def post(self: Self, *path: PathPart, data: dict | list, **_kw) -> RequesterResponse: # noqa: ANN003 - _ = path, data, _kw + async def post_files(self: Self, *path: PathPart, files: dict | list) -> RequesterResponse: + _ = path, files + return self._return_next_response() + + async def post(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: + _ = path, data return self._return_next_response() async def patch(self: Self, *path: PathPart, data: dict | list) -> RequesterResponse: diff --git a/tests/unit/test_requesters.py b/tests/unit/test_requesters.py index 5a2cce7c..0fc1d42e 100644 --- a/tests/unit/test_requesters.py +++ b/tests/unit/test_requesters.py @@ -1,12 +1,15 @@ from dataclasses import dataclass from functools import partial -from typing import Any, Self +from typing import Any, AsyncGenerator, Self import json +from httpx import AsyncClient import pytest +import pytest_asyncio from adcm_aio_client.core.errors import ResponseDataConversionError, ResponseError from adcm_aio_client.core.requesters import DefaultRequester, HTTPXRequesterResponse +from adcm_aio_client.core.types import RetryPolicy pytestmark = [pytest.mark.asyncio] @@ -29,9 +32,11 @@ async def return_response(*a, **kw) -> HTTPXLikeResponse: # noqa: ANN002, ANN00 return return_response -@pytest.fixture() -def httpx_requester() -> DefaultRequester: - return DefaultRequester(base_url="dummy", retries=1, retry_interval=0) +@pytest_asyncio.fixture() +async def httpx_requester() -> AsyncGenerator[DefaultRequester, None]: + retry_policy = RetryPolicy(1, 1) + async with AsyncClient() as dummy_client: + yield DefaultRequester(http_client=dummy_client, retries=retry_policy) @pytest.mark.parametrize( From 1be9a0032533c620b9b07df2896fe1ecb913ebe3 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Tue, 17 Dec 2024 11:59:27 +0300 Subject: [PATCH 39/46] ADCM-6189: Implement missing methods for imports (#38) Co-authored-by: Aleksandr Alferov --- adcm_aio_client/core/objects/_common.py | 7 ++++ adcm_aio_client/core/objects/_imports.py | 48 +++++++++++++++++++++++- adcm_aio_client/core/objects/cm.py | 8 ++-- 3 files changed, 57 insertions(+), 6 deletions(-) diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index b57c77d7..e51d7b4a 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -7,6 +7,7 @@ from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig from adcm_aio_client.core.config._objects import ConfigOwner from adcm_aio_client.core.objects._base import AwareOfOwnPath, MaintenanceMode, WithProtectedRequester +from adcm_aio_client.core.objects._imports import Imports class Deletable(WithProtectedRequester, AwareOfOwnPath): @@ -55,3 +56,9 @@ class WithJobStatus(WithProtectedRequester, AwareOfOwnPath): async def get_job_status(self: Self) -> str: response = await self._requester.get(*self.get_own_path()) return response.as_dict()["status"] + + +class WithImports(WithProtectedRequester, AwareOfOwnPath): + @async_cached_property + async def imports(self: Self) -> Imports: + return Imports(requester=self._requester, path=(*self.get_own_path(), "imports")) diff --git a/adcm_aio_client/core/objects/_imports.py b/adcm_aio_client/core/objects/_imports.py index 31dda734..97a16821 100644 --- a/adcm_aio_client/core/objects/_imports.py +++ b/adcm_aio_client/core/objects/_imports.py @@ -1 +1,47 @@ -class ClusterImports: ... +from typing import TYPE_CHECKING, Collection, Iterable, Self, Union + +from adcm_aio_client.core.types import Endpoint, Requester + +if TYPE_CHECKING: + from adcm_aio_client.core.objects.cm import Cluster, Service + + +class Imports: + def __init__(self: Self, requester: Requester, path: Endpoint) -> None: + self._requester = requester + self._path = path + + async def _get_source_binds(self: Self) -> set[tuple[int, str]]: + response = await self._requester.get(*self._path) + data_binds = set() + + for import_data in response.as_dict()["results"]: + binds = import_data.get("binds", []) + for bind in binds: + bind_id = int(bind["source"]["id"]) + bind_type = bind["source"]["type"] + data_binds.add((bind_id, bind_type)) + + return data_binds + + def _create_post_data(self: Self, binds: Iterable[tuple[int, str]]) -> list[dict[str, dict[str, int | str]]]: + return [{"source": {"id": source[0], "type": source[1]}} for source in binds] + + def _sources_to_binds(self: Self, sources: Collection[Union["Cluster", "Service"]]) -> set[tuple[int, str]]: + return {(s.id, s.__class__.__name__.lower()) for s in sources} + + async def add(self: Self, sources: Collection[Union["Cluster", "Service"]]) -> None: + current_binds = await self._get_source_binds() + sources_binds = self._sources_to_binds(sources) + binds_to_set = current_binds.union(sources_binds) + await self._requester.post(*self._path, data=self._create_post_data(binds_to_set)) + + async def set(self: Self, sources: Collection[Union["Cluster", "Service"]]) -> None: + binds_to_set = self._sources_to_binds(sources) + await self._requester.post(*self._path, data=self._create_post_data(binds_to_set)) + + async def remove(self: Self, sources: Collection[Union["Cluster", "Service"]]) -> None: + current_binds = await self._get_source_binds() + sources_binds = self._sources_to_binds(sources) + binds_to_set = current_binds.difference(sources_binds) + await self._requester.post(*self._path, data=self._create_post_data(binds_to_set)) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index d5f95658..c2e6151c 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -35,11 +35,11 @@ Deletable, WithActions, WithConfig, + WithImports, WithMaintenanceMode, WithStatus, WithUpgrades, ) -from adcm_aio_client.core.objects._imports import ClusterImports from adcm_aio_client.core.requesters import BundleRetrieverInterface from adcm_aio_client.core.types import ( DEFAULT_JOB_TERMINAL_STATUSES, @@ -162,6 +162,7 @@ class Cluster( WithActions, WithUpgrades, WithConfig, + WithImports, WithActionHostGroups, WithConfigHostGroups, RootInteractiveObject, @@ -212,10 +213,6 @@ def services(self: Self) -> "ServicesNode": def hosts(self: Self) -> "HostsInClusterNode": return HostsInClusterNode(cluster=self) - @cached_property - def imports(self: Self) -> ClusterImports: - return ClusterImports() - FilterByBundle = FilterBy("bundle", COMMON_OPERATIONS, Bundle) @@ -237,6 +234,7 @@ class Service( Deletable, WithActions, WithConfig, + WithImports, WithActionHostGroups, WithConfigHostGroups, InteractiveChildObject[Cluster], From a5b5d0ded78db2d9fd33e900ccbd6e6575a8a516 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Tue, 17 Dec 2024 18:19:16 +0500 Subject: [PATCH 40/46] ADCM-6184 Improve interactions with config (#47) --- adcm_aio_client/core/config/_objects.py | 55 ++++++++++-- adcm_aio_client/core/config/_operations.py | 10 +-- adcm_aio_client/core/config/types.py | 90 ++++++++++++++++--- adcm_aio_client/core/errors.py | 3 + .../bundles/complex_cluster/config.yaml | 2 +- .../bundles/complex_cluster/sag.yaml | 2 + tests/integration/test_config.py | 84 +++++++++++++---- 7 files changed, 205 insertions(+), 41 deletions(-) diff --git a/adcm_aio_client/core/config/_objects.py b/adcm_aio_client/core/config/_objects.py index 45400426..c36773a6 100644 --- a/adcm_aio_client/core/config/_objects.py +++ b/adcm_aio_client/core/config/_objects.py @@ -15,7 +15,7 @@ LevelNames, LocalConfigs, ) -from adcm_aio_client.core.errors import ConfigComparisonError, RequesterError +from adcm_aio_client.core.errors import ConfigComparisonError, ConfigNoParameterError, RequesterError from adcm_aio_client.core.types import AwareOfOwnPath, WithRequesterProperty @@ -71,6 +71,12 @@ def _find_and_wrap_config_entry[ValueW: _ConfigWrapper, GroupW: _ConfigWrapper, parameter_full_name = (*self._name, level_name) + if not self._schema.is_visible_parameter(parameter_full_name): + message = f"No parameter named {name}" + if self._name: + message = f"{message} in group {'/'.join(self._name)}" + raise ConfigNoParameterError(message) + class_ = value_class if self._schema.is_group(parameter_full_name): class_ = a_group_class if self._schema.is_activatable_group(parameter_full_name) else group_class @@ -84,7 +90,7 @@ def _find_and_wrap_config_entry[ValueW: _ConfigWrapper, GroupW: _ConfigWrapper, def _on_data_change(self: Self) -> None: # need to drop caches when data is changed, # because each entry may already point to a different data - # and return incorrect nodes for a seach (=> can't be edited too) + # and return incorrect nodes for a search (=> can't be edited too) self._wrappers_cache = {} @@ -92,12 +98,50 @@ class Parameter[T](_ConfigWrapper): @property def value(self: Self) -> T: # todo probably want to return read-only proxies for list/dict - return self._data.get_value(parameter=self._name) + try: + return self._data.get_value(parameter=self._name) + except (TypeError, KeyError): + if len(self._name) == 1: + # not in any sort of group, should continue with exception + raise + + return self._schema.get_default(self._name) def set(self: Self, value: Any) -> Self: # noqa: ANN401 - self._data.set_value(parameter=self._name, value=value) + try: + self._data.set_value(parameter=self._name, value=value) + except (TypeError, KeyError) as err: + if len(self._name) == 1: + # not in any sort of group, should continue with exception + raise + + self._set_parent_groups_to_defaults(err=err) + self._data.set_value(parameter=self._name, value=value) + return self + def _set_parent_groups_to_defaults(self: Self, err: Exception) -> None: + # find first `None` group + root_group_name, *rest = self._name[:-1] + group = (root_group_name,) + + while rest: + value_ = self._data.get_value(group) + if value_ is None: + break + + next_group_name, *rest = rest + group = (*group, next_group_name) + + value_ = self._data.get_value(group) + if value_ is not None: + # error was legit and not about None group + raise err + + # actually build defaults + defaults = self._schema.get_default(group) + self._data.set_value(group, defaults) + class _Desyncable(_ConfigWrapper): def sync(self: Self) -> Self: @@ -254,7 +298,8 @@ def difference(self: Self, other: Self, *, other_is_previous: bool = True) -> Co previous = self current = other - return find_config_difference(previous=previous.data, current=current.data, schema=self._schema) + full_diff = find_config_difference(previous=previous.data, current=current.data, schema=self._schema) + return ConfigDifference.from_full_format(full_diff) async def save(self: Self, description: str = "") -> Self: config_to_save = self._current_config.config diff --git a/adcm_aio_client/core/config/_operations.py b/adcm_aio_client/core/config/_operations.py index d7994af4..5cf639a4 100644 --- a/adcm_aio_client/core/config/_operations.py +++ b/adcm_aio_client/core/config/_operations.py @@ -1,6 +1,6 @@ from adcm_aio_client.core.config.types import ( - ConfigDifference, ConfigSchema, + FullConfigDifference, GenericConfigData, LevelNames, ValueChange, @@ -11,8 +11,8 @@ # Difference def find_config_difference( previous: GenericConfigData, current: GenericConfigData, schema: ConfigSchema -) -> ConfigDifference: - diff = ConfigDifference(schema=schema) +) -> FullConfigDifference: + diff = FullConfigDifference(schema=schema) _fill_values_diff_at_level(level=(), diff=diff, previous=previous.values, current=current.values) _fill_attributes_diff(diff=diff, previous=previous.attributes, current=current.attributes) @@ -20,7 +20,7 @@ def find_config_difference( return diff -def _fill_values_diff_at_level(level: LevelNames, diff: ConfigDifference, previous: dict, current: dict) -> None: +def _fill_values_diff_at_level(level: LevelNames, diff: FullConfigDifference, previous: dict, current: dict) -> None: missing = object() for key, cur_value in current.items(): level_names = (*level, key) @@ -41,7 +41,7 @@ def _fill_values_diff_at_level(level: LevelNames, diff: ConfigDifference, previo _fill_values_diff_at_level(diff=diff, level=level_names, previous=prev_value, current=cur_value) -def _fill_attributes_diff(diff: ConfigDifference, previous: dict, current: dict) -> None: +def _fill_attributes_diff(diff: FullConfigDifference, previous: dict, current: dict) -> None: missing = object() for full_name, cur_value in current.items(): prev_value = previous.get(full_name, missing) diff --git a/adcm_aio_client/core/config/types.py b/adcm_aio_client/core/config/types.py index cb121bb0..0a7d782d 100644 --- a/adcm_aio_client/core/config/types.py +++ b/adcm_aio_client/core/config/types.py @@ -134,7 +134,7 @@ def recursive_defaultdict() -> defaultdict: @dataclass(slots=True) -class ConfigDifference: +class FullConfigDifference: schema: "ConfigSchema" values: dict[LevelNames, ValueChange] = field(default_factory=dict) attributes: dict[LevelNames, ValueChange] = field(default_factory=dict) @@ -143,9 +143,29 @@ class ConfigDifference: def is_empty(self: Self) -> bool: return not bool(self.values or self.attributes) + +class ConfigDifference: + __slots__ = ("_schema", "_values", "_attributes") + + def __init__( + self: Self, + schema: "ConfigSchema", + values: dict[LevelNames, ValueChange], + attributes: dict[LevelNames, ValueChange], + ) -> None: + self._schema = schema + self._values = values + self._attributes = attributes + + @classmethod + def from_full_format(cls: type[Self], diff: FullConfigDifference) -> Self: + visible_value_changes = {k: v for k, v in diff.values.items() if not diff.schema.is_invisible(k)} + visible_attr_changes = {k: v for k, v in diff.attributes.items() if not diff.schema.is_invisible(k)} + return cls(schema=diff.schema, values=visible_value_changes, attributes=visible_attr_changes) + def __str__(self: Self) -> str: - values_nested = self._to_nested_dict(self.values) - attributes_nested = self._to_nested_dict(self.attributes) + values_nested = self._to_nested_dict(self._values) + attributes_nested = self._to_nested_dict(self._attributes) if not (values_nested or attributes_nested): return "No Changes" @@ -159,17 +179,47 @@ def _to_nested_dict(self: Self, changes: dict[LevelNames, ValueChange]) -> dict: result = recursive_defaultdict() for names, change in changes.items(): - changes_tuple = (change.previous, change.current) + changes_repr = self._prepare_change(change) if len(names) == 1: - result[names[0]] = changes_tuple + result[names[0]] = changes_repr continue *groups, name = names group_node = reduce(dict.__getitem__, groups, result) - group_node[name] = changes_tuple + group_node[name] = changes_repr + + # get rid of `defaultdict` in favor of `dict` + # may be not optimal + return self._simplify_dict(result) + + def _prepare_change(self: Self, change: ValueChange) -> tuple | dict: + if not (isinstance(change.previous, dict) and isinstance(change.current, dict)): + return (change.previous, change.current) + + dict_diff = {} - return result + for key, cur_value in change.current.items(): + prev_value = change.previous.get(key) + if prev_value != cur_value: + dict_diff[key] = self._prepare_change(change=ValueChange(previous=prev_value, current=cur_value)) + + missing_in_current = set(change.previous.keys()).difference(change.current.keys()) + for key in missing_in_current: + dict_diff[key] = self._prepare_change(change=ValueChange(previous=change.previous[key], current=None)) + + return dict_diff + + def _simplify_dict(self: Self, dd: dict) -> dict: + simplified = {} + + for k, v in dd.items(): + if isinstance(v, dict): + v = self._simplify_dict(v) + + simplified[k] = v + + return simplified class ConfigSchema: @@ -179,7 +229,9 @@ def __init__(self: Self, spec_as_jsonschema: dict) -> None: self._jsons: set[LevelNames] = set() self._groups: set[LevelNames] = set() self._activatable_groups: set[LevelNames] = set() + self._invisible_fields: set[LevelNames] = set() self._display_name_map: dict[tuple[LevelNames, ParameterDisplayName], ParameterName] = {} + self._param_map: dict[LevelNames, dict] = {} self._analyze_schema() @@ -202,10 +254,23 @@ def is_group(self: Self, parameter_name: LevelNames) -> bool: def is_activatable_group(self: Self, parameter_name: LevelNames) -> bool: return parameter_name in self._activatable_groups + def is_invisible(self: Self, parameter_name: LevelNames) -> bool: + return parameter_name in self._invisible_fields + + def is_visible_parameter(self: Self, parameter_name: LevelNames) -> bool: + return parameter_name in self._param_map and not self.is_invisible(parameter_name) + def get_level_name(self: Self, group: LevelNames, display_name: ParameterDisplayName) -> ParameterName | None: key = (group, display_name) return self._display_name_map.get(key) + def get_default(self: Self, parameter_name: LevelNames) -> Any: # noqa: ANN401 + param_spec = self._param_map[parameter_name] + if not self.is_group(parameter_name): + return param_spec.get("default", None) + + return {child_name: self.get_default((*parameter_name, child_name)) for child_name in param_spec["properties"]} + def _analyze_schema(self: Self) -> None: for level_names, param_spec in self._iterate_parameters(object_schema=self._raw): if is_group_v2(param_spec): @@ -217,9 +282,13 @@ def _analyze_schema(self: Self) -> None: elif is_json_v2(param_spec): self._jsons.add(level_names) + if param_spec.get("adcmMeta", {}).get("isInvisible"): + self._invisible_fields.add(level_names) + *group, own_level_name = level_names display_name = param_spec["title"] self._display_name_map[tuple(group), display_name] = own_level_name + self._param_map[level_names] = param_spec def _retrieve_name_type_mapping(self: Self) -> dict[LevelNames, str]: return { @@ -248,12 +317,7 @@ def _unwrap_optional(self: Self, attributes: dict) -> dict: def is_group_v2(attributes: dict) -> bool: - # todo need to check group-like structures, because they are almost impossible to distinct from groups - return ( - attributes.get("type") == "object" - and attributes.get("additionalProperties") is False - and attributes.get("default") == {} - ) + return attributes.get("type") == "object" and attributes.get("additionalProperties") is False def is_activatable_v2(attributes: dict) -> bool: diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index 9d519571..a92a6c33 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -123,6 +123,9 @@ class ConfigError(ADCMClientError): ... class ConfigComparisonError(ConfigError): ... +class ConfigNoParameterError(ConfigError): ... + + # Mapping diff --git a/tests/integration/bundles/complex_cluster/config.yaml b/tests/integration/bundles/complex_cluster/config.yaml index cea3391d..02ebca07 100644 --- a/tests/integration/bundles/complex_cluster/config.yaml +++ b/tests/integration/bundles/complex_cluster/config.yaml @@ -90,7 +90,7 @@ type: password required: no - name: sag - display_name: Group-like stucture + display_name: Group-like structure type: structure yspec: ./sag.yaml required: false diff --git a/tests/integration/bundles/complex_cluster/sag.yaml b/tests/integration/bundles/complex_cluster/sag.yaml index 928ae644..2420bc5e 100644 --- a/tests/integration/bundles/complex_cluster/sag.yaml +++ b/tests/integration/bundles/complex_cluster/sag.yaml @@ -9,6 +9,8 @@ inner_group: items: attr: string op: string + tech: string + invisible_items: [ "tech" ] integer: match: int diff --git a/tests/integration/test_config.py b/tests/integration/test_config.py index 9e5326a9..c4d24220 100644 --- a/tests/integration/test_config.py +++ b/tests/integration/test_config.py @@ -6,8 +6,9 @@ from adcm_aio_client.core.client import ADCMClient from adcm_aio_client.core.config import ActivatableParameterGroup, Parameter, ParameterGroup from adcm_aio_client.core.config.refresh import apply_local_changes, apply_remote_changes +from adcm_aio_client.core.errors import ConfigNoParameterError from adcm_aio_client.core.filters import Filter -from adcm_aio_client.core.objects.cm import Bundle, Cluster +from adcm_aio_client.core.objects.cm import Bundle, Cluster, Service from tests.integration.bundle import pack_bundle from tests.integration.conftest import BUNDLES @@ -27,16 +28,74 @@ async def cluster(adcm_client: ADCMClient, cluster_bundle: Bundle) -> Cluster: return cluster +async def get_service_with_config(cluster: Cluster) -> Service: + return await cluster.services.get(name__eq="complex_config") + + +async def test_invisible_fields(cluster: Cluster) -> None: + expected_error = ConfigNoParameterError + + service = await get_service_with_config(cluster) + config = await service.config + + # invisible fields can't be found via `__getitem__` interface + + with pytest.raises(expected_error): + config["cant_find"] + + group = config["A lot of text", ParameterGroup] + with pytest.raises(expected_error): + group["cantCme"] + + # non initialized structure-based group + structure_group = group["sag", ParameterGroup] + inner_group = structure_group["nested", ParameterGroup] + with pytest.raises(expected_error): + inner_group["tech"] + + # they aren't displayed in difference + + # this change uses "internal" implementation + # and isn't supposed to be used in production code + data = config.data._values + data["very_important_flag"] = 2 + data["cant_find"] = "changed value" + data["a_lot_of_text"]["cant_find"] = "also changed" + + await config.save() + + first_config = await service.config_history[0] + second_config = await service.config_history[-1] + + diff = first_config.difference(second_config) + assert len(diff._values) == 1 + assert ("very_important_flag",) in diff._values + assert first_config.data._values["cant_find"] != second_config.data._values["cant_find"] + + +async def test_structure_groups(cluster: Cluster) -> None: + service = await get_service_with_config(cluster) + config = await service.config + group = config["A lot of text"] + assert isinstance(group, ParameterGroup) + group_like = group["Group-like structure"] + # structure with "dict" root is a group + assert isinstance(group_like, ParameterGroup) + assert isinstance(group_like["quantity"], Parameter) + nested_group = group_like["nested"] + assert isinstance(nested_group, ParameterGroup) + nested_group["attr", Parameter].set("something") + nested_group["op", Parameter].set("good") + + async def test_config(cluster: Cluster) -> None: # save two configs for later refresh usage - service = await cluster.services.get() - config_1 = await service.config - service = await cluster.services.get() - config_2 = await service.config + service = await get_service_with_config(cluster) + config_1 = await service.config_history.current() + config_2 = await service.config_history.current() # change and save - service = await cluster.services.get() config = await service.config required_value = 100 @@ -61,15 +120,6 @@ async def test_config(cluster: Cluster) -> None: group = config["A lot of text"] assert isinstance(group, ParameterGroup) - # group_like = group["Group-like structure"] - # # structure with "dict" root is a group - # assert isinstance(group_like, ParameterGroup) - # assert isinstance(group_like["quantity"], Parameter) - # nested_group = group_like["nested"] - # assert isinstance(nested_group, ParameterGroup) - # nested_group["attr", Parameter].set("something") - # nested_group["op", Parameter].set("good") - field = group["big_text"] assert isinstance(field, Parameter) assert field.value is None @@ -156,6 +206,6 @@ async def test_config(cluster: Cluster) -> None: diff = latest_config.difference(earliest_config) # group was activated, then deactivated, so returned to initial state # => no diff - assert len(diff.attributes) == 0 + assert len(diff._attributes) == 0 # field values changed from earliest to latest - assert len(diff.values) == 6 + assert len(diff._values) == 6 From 547037b943010815a9c0f1048dbf9a6f46ccea7d Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 18 Dec 2024 18:34:43 +0500 Subject: [PATCH 41/46] ADCM-6232 Add more info on filter parsing error and only clean http client on requester init errors (#50) --- adcm_aio_client/_session.py | 2 +- adcm_aio_client/core/errors.py | 3 --- adcm_aio_client/core/filters.py | 11 ++++++++++- adcm_aio_client/core/requesters.py | 1 + 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/adcm_aio_client/_session.py b/adcm_aio_client/_session.py index 824e3d66..ec5cc16b 100644 --- a/adcm_aio_client/_session.py +++ b/adcm_aio_client/_session.py @@ -49,7 +49,7 @@ async def __aenter__(self: Self) -> ADCMClient: self._requester = self._prepare_api_v2_requester() await self._requester.login(self._session_info.credentials) except Exception as e: - await self.__aexit__(exc_type=type(e), exc_value=e) + await self.__close_http_client_safe(exc_type=type(e), exc_value=e) raise self._adcm_client = self._prepare_adcm_client(version=adcm_version_) diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index a92a6c33..f9fd4aa8 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -138,7 +138,4 @@ class NoMappingRulesForActionError(ADCMClientError): ... class FilterError(ADCMClientError): ... -class FilterPreparationError(FilterError): ... - - class InvalidFilterError(FilterError): ... diff --git a/adcm_aio_client/core/filters.py b/adcm_aio_client/core/filters.py index 51444853..eb7c5f2e 100644 --- a/adcm_aio_client/core/filters.py +++ b/adcm_aio_client/core/filters.py @@ -53,7 +53,16 @@ def inline_filters_to_query(self: Self, filters: dict[str, FilterValue]) -> Quer converted_filters = deque() for inline_filter, value in filters.items(): - attr, op = inline_filter.rsplit("__", maxsplit=1) + try: + attr, op = inline_filter.rsplit("__", maxsplit=1) + except ValueError: + message = ( + f"Invalid inline filter format: {inline_filter}. " + "Attribute and operation should be joined with `__` for inline filters. " + f"Maybe you've meant `{inline_filter}__eq={value}`" + ) + raise InvalidFilterError(message) from None + filter_ = Filter(attr=attr, op=op, value=value) converted_filters.append(filter_) diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 8b405371..91e82c60 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -162,6 +162,7 @@ async def login(self: Self, credentials: Credentials) -> Self: self._credentials = credentials self.client.headers["X-CSRFToken"] = response.cookies["csrftoken"] + self.client.headers["Referer"] = str(self.client.base_url) return self From 822c4012d27b169ab61a2ea375b0cf4bb9252392 Mon Sep 17 00:00:00 2001 From: Artem Starovoitov Date: Tue, 24 Dec 2024 11:34:30 +0300 Subject: [PATCH 42/46] ADCM-6212: Add tests for Bundle/HostProvider/Host public API (#49) --- adcm_aio_client/core/objects/_base.py | 6 +- adcm_aio_client/core/objects/cm.py | 14 +- poetry.lock | 64 +++++++- pyproject.toml | 1 + tests/integration/bundle.py | 55 +++++++ .../cluster_requires_component/config.yaml | 40 +++++ .../bundles/cluster_with_license/EULA.txt | 68 +++++++++ .../bundles/cluster_with_license/config.yaml | 138 ++++++++++++++++++ .../bundles/cluster_with_license/license.txt | 1 + .../cluster_with_license/playbook.yaml | 10 ++ .../bundles/complex_provider/config.yaml | 89 +++++++++++ .../bundles/complex_provider/playbook.yaml | 10 ++ .../bundles/complex_provider/schema.yaml | 15 ++ tests/integration/setup_environment.py | 2 +- tests/integration/test_bundle.py | 134 +++++++++++++++++ tests/integration/test_host.py | 138 ++++++++++++++++++ tests/integration/test_hostprovider.py | 102 +++++++++++++ 17 files changed, 880 insertions(+), 7 deletions(-) create mode 100644 tests/integration/bundles/cluster_requires_component/config.yaml create mode 100644 tests/integration/bundles/cluster_with_license/EULA.txt create mode 100644 tests/integration/bundles/cluster_with_license/config.yaml create mode 100644 tests/integration/bundles/cluster_with_license/license.txt create mode 100644 tests/integration/bundles/cluster_with_license/playbook.yaml create mode 100644 tests/integration/bundles/complex_provider/config.yaml create mode 100644 tests/integration/bundles/complex_provider/playbook.yaml create mode 100755 tests/integration/bundles/complex_provider/schema.yaml create mode 100644 tests/integration/test_bundle.py create mode 100644 tests/integration/test_host.py create mode 100644 tests/integration/test_hostprovider.py diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index f1acfb95..3d3a1566 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -116,14 +116,14 @@ def __init__( self._path = path def __repr__(self: Self) -> str: - return self._maintenance_mode_status.value + return self._maintenance_mode_status def __str__(self: Self) -> str: - return self._maintenance_mode_status.value + return self._maintenance_mode_status @property def value(self: Self) -> str: - return self._maintenance_mode_status.value + return self._maintenance_mode_status async def on(self: Self) -> None: current_mm_status = await self._requester.post( diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index c2e6151c..f7408cdf 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -223,7 +223,12 @@ class ClustersNode(PaginatedAccessor[Cluster]): async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> Cluster: response = await self._requester.post( - "clusters", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} + "clusters", + data={ + "prototypeId": bundle._main_prototype_id, + "name": name, + "description": description, + }, ) return Cluster(requester=self._requester, data=response.as_dict()) @@ -377,7 +382,12 @@ class HostProvidersNode(PaginatedAccessor[HostProvider]): async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> HostProvider: response = await self._requester.post( - "hostproviders", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} + "hostproviders", + data={ + "prototypeId": bundle._main_prototype_id, + "name": name, + "description": description, + }, ) return HostProvider(requester=self._requester, data=response.as_dict()) diff --git a/poetry.lock b/poetry.lock index a6368ee6..73fffa47 100644 --- a/poetry.lock +++ b/poetry.lock @@ -412,6 +412,68 @@ files = [ {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -639,4 +701,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "68d447594e6bae71334a44c39ed3738483686abe91bf476dbf4eddc4ad6326f5" +content-hash = "87a74f8686fa2e421979ffe2ffd12e46a54176e4852f184fd5782f252d117c1b" diff --git a/pyproject.toml b/pyproject.toml index 78bb7b64..8492d355 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,6 +26,7 @@ optional = true pytest = "^8.3.3" pytest-asyncio = "^0.24.0" testcontainers = "^4.8.2" +pyyaml = "^6.0.2" [build-system] requires = ["poetry-core"] diff --git a/tests/integration/bundle.py b/tests/integration/bundle.py index 33697e1b..3c52456f 100644 --- a/tests/integration/bundle.py +++ b/tests/integration/bundle.py @@ -1,5 +1,12 @@ from pathlib import Path from tarfile import TarFile +from typing import Any +import shutil + +import yaml + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.objects.cm import Bundle def pack_bundle(from_dir: Path, to: Path) -> Path: @@ -10,3 +17,51 @@ def pack_bundle(from_dir: Path, to: Path) -> Path: tar.add(entry) return archive + + +def modify_yaml_field( + yaml_content: list[dict[str, Any]], target_name: str, field_to_modify: str, new_value: str | int +) -> dict[str, Any]: + for entry in yaml_content: + # Check if the entry matches the specified 'name' + if entry.get(target_name) == field_to_modify: + entry[target_name] = new_value + return entry + raise ValueError(f"Field '{field_to_modify}' not found in config.yaml") + + +async def create_bundles_by_template( + adcm_client: ADCMClient, + tmp_path: Path, + path_to_template_bundle: Path, + target_name: str, + field_to_modify: str, + new_value: str, + number_of_bundles: int, +) -> list[Bundle]: + created_bundles = [] + for i in range(number_of_bundles): + # Create a new path for the temporary bundle + new_bundle_path = tmp_path / f"{path_to_template_bundle.name}_{i}" + + # Copy the whole directory of the template bundle to the new path + shutil.copytree(path_to_template_bundle, new_bundle_path) + + # Update the yaml field in the new config + new_config_path = new_bundle_path / "config.yaml" + with Path.open(new_config_path) as file: + new_config_data = yaml.safe_load(file) + + modify_yaml_field( + new_config_data, target_name=target_name, field_to_modify=field_to_modify, new_value=f"{new_value}_{i}" + ) + + with Path.open(new_config_path, "w") as file: + yaml.dump(new_config_data, file) + + (tmp_path / f"{new_bundle_path.name}_packed").mkdir() + bundle_path = pack_bundle(from_dir=new_bundle_path, to=(tmp_path / f"{new_bundle_path.name}_packed")) + created_bundle = await adcm_client.bundles.create(source=bundle_path, accept_license=False) + created_bundles.append(created_bundle) + + return created_bundles diff --git a/tests/integration/bundles/cluster_requires_component/config.yaml b/tests/integration/bundles/cluster_requires_component/config.yaml new file mode 100644 index 00000000..97c8aa8a --- /dev/null +++ b/tests/integration/bundles/cluster_requires_component/config.yaml @@ -0,0 +1,40 @@ +--- +- type: cluster + name: cluster_requires_component + version: '1.0' + edition: community + +- type: service + name: hbase + display_name: HBase + version: "2.2.7" + components: + master: + display_name: "HBase Master Server" + requires: + - service: zookeeper + component: SERVER + - service: hdfs + component: namenode + constraint: [ 1,+ ] + +- type: service + name: zookeeper + display_name: Zookeeper + version: 3.5.10 + components: + SERVER: + display_name: Zookeeper Server + constraint: [ odd ] + +- type: service + name: hdfs + display_name: HDFS + version: "3.1.2" + components: + namenode: + display_name: "HDFS NameNode" + requires: + - service: zookeeper + component: SERVER + constraint: [ 2,+ ] diff --git a/tests/integration/bundles/cluster_with_license/EULA.txt b/tests/integration/bundles/cluster_with_license/EULA.txt new file mode 100644 index 00000000..0ecb87a1 --- /dev/null +++ b/tests/integration/bundles/cluster_with_license/EULA.txt @@ -0,0 +1,68 @@ + +LICENSE AGREEMENT + + +1. DEFINITIONS. THE TERMS AND DEFINITIONS USED IN THE AGREEMENT ARE DESCRIBED IN SECTION 1. + 1.1. "Confidential Information" shall mean all disclosed information with regard to this Agreement or the Product whether furnished in oral, written or other tangible or intangible form. The Confidential Information shall include, but not limited to, components of business plans, products, inventions, design plans, financial plans, customer related information, strategies and other information of similar nature. + 1.2. "Documentation" shall mean all user manuals and administrator guides as well as other technical documents. + 1.3. "Product" shall mean any version of software for computers and data bases, including, but not limited to, the computer software Arenadata Hadoop Platform, that are built on an open SW basis managed by the Apache Software Foundation licensed under the terms and conditions of the Apache 2.0 License (http://www.apache.org/licenses/LICENSE-2.0). + 1.4. "Intellectual Property" shall mean all systems, methods, algorithms, structures, libraries, applications (supplementary software), components/parts of the Product (including texts and fonts), all modules, other elements of the Product copied and/or incorporated in all working software, any copies, documentation, authorship, logos and other information included in the Product. + 1.5. Right Owner - Arenadata Software LLC. + +2. INTELLECTUAL PROPERTY EXCLUSIVE RIGHTS + 2.1. The Product, systems, methods, algorithms, structures, libraries, applications (supplementary software), components/parts of the Product (including texts and fonts), all modules, other elements of the Product copied and/or incorporated in all working software, any copies, documentation, authorship, logos and other information included in the Product shall be objects of intellectual property and commercial secret of the Right Owner, i.e. Arenadata Software LLC and/or its affiliates and shall be protected according to the Russian Federation effective legislation on intellectual property, commercial secret as well as the provisions of this Agreement. + 2.2. The Right Owner shall guarantee that it has relevant rights to use the ARENADATA name and the ARENADATA logo protected under the Russian Federation legislation on copyrights. + 2.3. The use of the Product in violation of the terms and conditions hereof shall be deemed to be a breach of the Russian Federation legislation on copyrights and shall constitute a sufficient ground for depriving the Licensee of the rights granted in respect of the Product. + 2.4. The Right Owner shall assist the Licensee in defending against all claims filed by third parties in respect of the intellectual and industrial ownership of the Product package. Should any such claim be filed the Licensee shall immediately inform the Right Owner about all complaints made by the third party and provide all necessary information regarding the dispute. + 2.5. The Right Owner represents that as of the time of the transfer of the right to use the Product, to the best of its knowledge, there are no third party's rights that could be infringed upon through granting the Licensee with the non-exclusive right to use the Product hereunder. + 2.6. Within the term of this Agreement the Right Owner shall refrain from any actions that can hinder the Licensee from exercising the right to use the result of intellectual activity granted to it within the limits set forth in this Agreement. + +3. SUBJECT MATTER OF THE AGREEMENT + 3.1. The subject matter of this Agreement shall be Right Owner's provision to the Licensee (provided that the Licensee meets all technical requirements described in the technical and user documentation, and all terms and conditions and restrictions set forth herein) of non-exclusive rights to use the Product within the limits and by the methods specified herein. The description of the Product and instructions regarding the use thereof shall be included in the Product suite and may be provided to the Licensee upon request in hard copies. + 3.2. The terms and conditions and the procedure of paying remuneration for provision of the right to use the Product shall be set forth in separate agreements with the Right Owner or its authorized representatives/partners. + +4. SCOPE OF THE RIGHTS TO BE TRANSFERRED AND METHODS OF USE + 4.1. The Licensee shall be provided with the non-exclusive right to use the Product through installing and launching the Product on the relevant number of processors and through copying thereof only for the purpose of generating a back-up or archival copy of the Product. The number of processors shall be determined by separate agreements with the Right Owner or partners of the Right Owner. + 4.2. The right to use the Product granted to the Licensee shall not include: + 4.2.1. The right to use the Product or relevant documentation thereto for any purposes other than those permitted hereby. + 4.2.2. The right to modify, hide, delete or enter any changes into the trademarks, trade names, marks or notes made onto the software or constituting an inseparable part of the software or the relevant documentation thereto. While producing above mentioned copies the Licensee shall reproduce onto the copy/copies all information regarding the copyright or other marks made on the software or the relevant documentation thereto. + 4.2.3. The right to modify, supplement, decompile, reverse-engineer, disassemble, translate, adapt, reorganize, make corrections or enter any other changes to the Product, components of the Product or relevant documentation thereto. + 4.2.4. The right to assign the granted right to use the Product, including the media and documentation, to legal entities and individuals through selling, renting, letting, lending or otherwise alienating, including, but not limited to, the provision of the software as a "cloud" service. + 4.2.5. The right to transfer to third parties the activation and access codes (or copies thereof) for the Product. + 4.2.6. The right to use the Product on behalf of any third party. + 4.2.7. The right to make any actions with regard to the Product that violate the Russian and international norms of legislation on copyrights and use of software. + 4.3. The right to use the Product to be granted to the Licensee shall be in effect within the time period set forth in the SW Specifications (Annex No 1 to the Contract) and within the boundaries of the Russian Federation. + 4.4. The use of the Product shall be allowed within the standard SW suite, it shall also be allowed to build the Product into software created anew or existing. + 4.5. It shall be allowed to build in and produce derivative products subject to the rules of application and use of the free Apache SW. + +5. TRANSFER PROCEDURE + 5.1. A copy of the Product shall be furnished to the Licensee on an electronic medium. The user documentation shall be provided to the Licensee on an electronic medium unless set forth otherwise in the contract with the Licensee. + 5.2. Any changes, amendments and other actions related to the transfer, activation of, provision of access to the Product shall be made only by the Right Owner either directly or through its authorized representatives. + 5.2.1. The license agreement for the Product may be re-registered to another Licensee only by the Right Owner on the basis of the current Licensee's written application. The transfer shall be applicable to the entire Product (including all components, media, printed materials and updates). + 5.2.2. The person who obtained the Product as a result of such all-at-once transfer shall accept all terms and conditions of this Agreement, including the obligation not to assign the rights to the Product and this Agreement to any third parties. A written confirmation of acceptance of this condition shall be forwarded to the Right Owner. + 5.2.3. When the right to the Product is assigned to another person (a new Licensee), the Licensee under this Agreement shall delete all back-up copies, if any, of the Product to be transferred. + +6. WARRANTY OBLIGATIONS + 6.1. The Right Owner shall warrant that no copyrights, allied rights or any other rights of third parties are infringed upon through transferring the rights to the Product. + 6.2. The Product with the rights to use thereof specified in this Agreement shall be provided "as is" without any obligations on the part of the Right Owner regarding its fitness for the Licensee's purposes or for the use together with certain software. + 6.3. The Right Owner shall provide no warranties with regard to the software and hardware of other manufactures that may be supplied to the Licensee together with the Product or as an integral part of the Product or may be attached to the Product. + 6.4. The Right Owner may provide support services with regard to the Product under separate agreements made by the Licensee with the Right Owner or its partners according to the current terms and conditions of support services of the Right Owner. + +7. LIABILITY + 7.1. The Right Owner and its affiliates shall bear no liability and shall not reimburse any direct or indirect losses, including lost profits, loss of confidential information of the Licensee caused by faults and/or errors made in operating the Product, improper conduct of the personnel of the Licensee or third parties, or breakdowns of technical means and failures of electrical equipment. + 7.2. The Right Owner and its affiliates shall neither be liable for nor make any warranties with regard to any performance characteristics of the Product other than those committed and described in the user documentation, unless the Licensee bought the Product from the Right Owner or its authorized representatives. + 7.3. The Right Owner and its affiliates shall neither make any implicit warranties of the merchantability of the Product or its fitness for a particular purpose. This software shall be provided on an "as is" basis and Arenadata Software LLC shall not be obliged to provide any maintenance, support, updating, extension or modification services with regard to the Product. + 7.4. The Licensee shall bear liability for reimbursing any damages arising out of or caused by the use of the Product and the information contained therein or generated by the Product and resulting from its interaction (or a failure to interact in a proper manner) with any other hardware, array of systems or software provided by the Right Owner and/or third parties. + 7.5. The Licensee shall undertake to compensate the Right Owner for any costs, including legal expenses, attorney fees and hold the Right Owner harmless against any claims, disputes, litigations, losses, damages, expenses, costs, any other liability caused by any unlawful, illegal use of the Product (including the use by any party relating to the Licensee, as well as by persons who were authorized to act on behalf of the Licensee explicitly in violation of this Agreement and effective legislation). + +8. SPECIAL, ADDITIONAL TERMS AND CONDITIONS + 8.1. The rights to use the Product shall be in effect only if the Product is genuine. The Product bought lawfully shall be supplied with the unique identification number and the Licensee's data specified when the rights to use the Product are acquired. The Licensee shall bear full liability for correctness of the data transferred directly to the Right Owner or its authorized representative. The Licensee shall advise the Right Owner or its authorized representative of any deficiencies it finds in the provided data, changes in its address and banking details. + 8.2. This Agreement as described above shall be deemed to have been entered into and shall come into legal force from the time of commencement of the installation and use of the Product and shall constitute an entire agreement between the Licensee and the Right Owner regarding its terms and conditions. Should a competent court hold any provisions of this Agreement null and void, unlawful, all other provisions of the Agreement shall remain in force and effect. All disputes and contradictions of the parties to this Agreement shall be settled by means of negotiations, and if the negotiations fail to settle the dispute, such disputes shall be submitted to the Court of Arbitration of Moscow according to the Russian Federation effective legislation. + 8.3. Violation of the terms and conditions of this Agreement shall be subject to a liability according to the Russian Federation effective legislation and this Agreement. Without prejudice to any of its rights the Right Owner shall be entitled to unilaterally terminate this Agreement, should the Licensee fail to observe the terms, conditions and restrictions set forth herein. The money paid by the Licensee for the use of the Product shall not be returned. + 8.4. The Product may include software or any other code distributable subject to the terms and conditions of licenses of third party suppliers. The Customer shall accept and agree with the terms and conditions of such third party licensees applicable to the third party software and shall acknowledge that such third party suppliers disclaim any representations and warranties with regard to the products or any part thereof and assume no liability for any claims that may arise with regard to the products, as a result of the usage thereof by the customer or due to the impossibility to use thereof. Should the Licensee become aware of any information indicating the infringement of the Right Owner's copyright (unlawful resale of the Product, links to pirate websites) please forward such information to: https://support.arenadata.io/. + 8.5. For any additional information regarding the issues relating to this Agreement, any further explanations with regard to the use of the Product the Licensee may apply to the address below: https://support.arenadata.io/. + +9. AGREEMENT TERMINATION + 9.1. The term of this Agreement shall commence from the date of its being signed by the accepting party and shall be in effect for the effective period of the non-exclusive right to use the SW, unless it is terminated due to the Licensee's failure to observe any provision of this Agreement. If any provision of this Agreement is violated, the Right Owner may terminate this Agreement immediately. + 9.2. Upon termination of this Agreement all rights granted to the Licensee by this Agreement shall be forthwith terminated and the Licensee shall immediately return all confidential information to the Right Owner and then delete all confidential information on its side. + 9.3. This clause and sections 2, 3, 4, 6, 7, 8 shall survive any termination of this Agreement. \ No newline at end of file diff --git a/tests/integration/bundles/cluster_with_license/config.yaml b/tests/integration/bundles/cluster_with_license/config.yaml new file mode 100644 index 00000000..a8151915 --- /dev/null +++ b/tests/integration/bundles/cluster_with_license/config.yaml @@ -0,0 +1,138 @@ +--- +- type: cluster + name: cluster_with_license + # display_name: Cluster With License + version: &version '2.0' + edition: enterprise + config_group_customization: true + license: ./EULA.txt + upgrade: + - name: upgrade + versions: &correct_versions + min: '1.0' + max: '2.0' + states: &always_available + available: any + + - name: upgrade_via_action_simple + versions: *correct_versions + states: *always_available + scripts: &upgrade_scripts + - name: pre + script: ./playbook.yaml + script_type: ansible + - name: switch + script: bundle_switch + script_type: internal + - name: post + script: ./playbook.yaml + script_type: ansible + + - name: upgrade_via_action_complex + versions: *correct_versions + states: *always_available + scripts: *upgrade_scripts + config: + - name: simple + type: string + required: false + - name: grouped + type: group + subs: + - name: simple + type: integer + default: 4 + - name: second + type: float + default: 4.3 + - name: after + type: list + default: + - "1" + - "woohoo" + hc_acl: + - action: add + service: service_1 + component: component_1 + - action: remove + service: service_1 + component: component_2 + ui_options: + disclaimer: "Cool upgrade" + + - name: unfit_version + versions: + min_strict: '0' + max: '0.3' + states: *always_available + + - name: unfit_state + versions: *correct_versions + states: + available: + - doesnotexist + + config: &config + - name: string + type: string + required: false + default: string + - name: group + type: group + subs: + - name: string + type: string + required: false + default: string + actions: + action: &action + type: job + script: ./playbook.yaml + script_type: ansible + states: + available: any + +- name: service_1 + type: service + version: *version + config: *config + license: ./license.txt + actions: + action: *action + components: + component_1: + constraint: [ 0, + ] + config: *config + actions: + action_1_comp_1: *action + action_2_comp_1: *action + component_2: + constraint: [ 0, + ] + config: *config + actions: + action_1_comp_2: *action + +- name: service_2 + type: service + version: *version + config: *config + actions: + action_1_service_2: *action + action_2_service_2: *action + +- name: service_3_manual_add + type: service + version: *version + config: *config + + +- name: service_with_bound_to + type: service + version: "hehe" + + components: + will_have_bound_to: + description: This component will have `bound_to` constraint after upgrade + bound_to: + service: service_1 + component: component_1 diff --git a/tests/integration/bundles/cluster_with_license/license.txt b/tests/integration/bundles/cluster_with_license/license.txt new file mode 100644 index 00000000..2831d9e6 --- /dev/null +++ b/tests/integration/bundles/cluster_with_license/license.txt @@ -0,0 +1 @@ +License diff --git a/tests/integration/bundles/cluster_with_license/playbook.yaml b/tests/integration/bundles/cluster_with_license/playbook.yaml new file mode 100644 index 00000000..be7ea537 --- /dev/null +++ b/tests/integration/bundles/cluster_with_license/playbook.yaml @@ -0,0 +1,10 @@ +--- +- name: sleep + hosts: all + connection: local + gather_facts: no + + tasks: + - name: sleep + pause: + seconds: 1 diff --git a/tests/integration/bundles/complex_provider/config.yaml b/tests/integration/bundles/complex_provider/config.yaml new file mode 100644 index 00000000..cafef698 --- /dev/null +++ b/tests/integration/bundles/complex_provider/config.yaml @@ -0,0 +1,89 @@ +--- +- type: provider + name: complex_provider + version: &version '1.0' + config_group_customization: true + + config: + - name: json + type: json + required: false + default: + key: value + - name: group + type: group + subs: + - name: map + type: map + required: false + default: + integer_key: '10' + string_key: string + - name: activatable_group + type: group + activatable: True + active: True + subs: + - name: secretmap + type: secretmap + required: false + default: + integer_key: '10' + string_key: string + actions: + provider_action: + type: job + script: ./playbook.yaml + script_type: ansible + states: + available: any + +- type: host + name: host + version: *version + config: + - name: structure + type: structure + required: false + yspec: ./schema.yaml + default: + - integer: 1 + string: string1 + - integer: 2 + string: string2 + - name: variant + type: variant + source: + type: config + name: group/list + default: value1 + - name: group + type: group + subs: + - name: list + type: list + required: false + default: + - value1 + - value2 + - value3 + - name: activatable_group + type: group + activatable: True + active: True + subs: + - name: option + type: option + required: false + option: + string1: string1 + string2: string2 + default: string1 + actions: + host_action: + display_name: "host_action" + type: job + script: ./playbook.yaml + script_type: ansible + states: + available: any diff --git a/tests/integration/bundles/complex_provider/playbook.yaml b/tests/integration/bundles/complex_provider/playbook.yaml new file mode 100644 index 00000000..be7ea537 --- /dev/null +++ b/tests/integration/bundles/complex_provider/playbook.yaml @@ -0,0 +1,10 @@ +--- +- name: sleep + hosts: all + connection: local + gather_facts: no + + tasks: + - name: sleep + pause: + seconds: 1 diff --git a/tests/integration/bundles/complex_provider/schema.yaml b/tests/integration/bundles/complex_provider/schema.yaml new file mode 100755 index 00000000..ad629d13 --- /dev/null +++ b/tests/integration/bundles/complex_provider/schema.yaml @@ -0,0 +1,15 @@ +--- +root: + match: list + item: variable +variable: + match: dict + items: + string: string + integer: integer + required_items: + - string +string: + match: string +integer: + match: int diff --git a/tests/integration/setup_environment.py b/tests/integration/setup_environment.py index b0a8c63b..1b7d5196 100644 --- a/tests/integration/setup_environment.py +++ b/tests/integration/setup_environment.py @@ -9,7 +9,7 @@ from testcontainers.postgres import DbContainer, PostgresContainer postgres_image_name = "postgres:latest" -adcm_image_name = "hub.adsw.io/adcm/adcm:feature_ADCM-6181" +adcm_image_name = "hub.adsw.io/adcm/adcm:develop" adcm_container_name = "test_adcm" postgres_name = "test_pg_db" diff --git a/tests/integration/test_bundle.py b/tests/integration/test_bundle.py new file mode 100644 index 00000000..80d6b48e --- /dev/null +++ b/tests/integration/test_bundle.py @@ -0,0 +1,134 @@ +from pathlib import Path +from unittest.mock import AsyncMock +import os + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.errors import ObjectDoesNotExistError +from adcm_aio_client.core.objects.cm import Bundle +from adcm_aio_client.core.requesters import BundleRetrieverInterface, DefaultRequester +from tests.integration.bundle import create_bundles_by_template, pack_bundle +from tests.integration.conftest import BUNDLES + +pytestmark = [pytest.mark.asyncio] + + +@pytest_asyncio.fixture() +async def load_bundles(adcm_client: ADCMClient, tmp_path: Path) -> list[Bundle]: + created_bundles = [] + for folder_path in BUNDLES.iterdir(): + folder_path = BUNDLES / folder_path + if folder_path.is_dir(): + (tmp_path / folder_path.name).mkdir() + bundle_path = pack_bundle(from_dir=folder_path, to=(tmp_path / folder_path)) + created_bundle = await adcm_client.bundles.create(source=bundle_path, accept_license=False) + created_bundles.append(created_bundle) + + return created_bundles + + +async def test_bundle(adcm_client: ADCMClient, load_bundles: list[Bundle], tmp_path: Path) -> None: # noqa: ARG001 + await _test_bundle_create_delete(adcm_client, tmp_path) + await _test_bundle_properties(adcm_client) + await _test_bundle_accessors(adcm_client) + await _test_pagination(adcm_client, tmp_path) + + +async def _test_bundle_create_delete(adcm_client: ADCMClient, tmp_path: Path) -> None: + bundle = await adcm_client.bundles.get(name__eq="cluster_with_license") + assert bundle.license.state == "unaccepted" + await bundle.delete() + + bundle_path = pack_bundle(from_dir=BUNDLES / "cluster_with_license", to=tmp_path) + bundle = await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + assert bundle.license.state == "accepted" + + await _test_download_external_bundle_success() + + +async def _test_bundle_accessors(adcm_client: ADCMClient) -> None: + bundle = await adcm_client.bundles.get(name__eq="cluster_with_license") + assert isinstance(bundle, Bundle) + assert bundle.name == "cluster_with_license" + + with pytest.raises(ObjectDoesNotExistError): + await adcm_client.bundles.get(name__eq="fake_bundle") + + assert not await adcm_client.bundles.get_or_none(name__eq="fake_bundle") + assert isinstance(await adcm_client.bundles.get_or_none(name__contains="cluster_with"), Bundle) + + bundles_list = await adcm_client.bundles.list() + bundles_all = await adcm_client.bundles.all() + assert isinstance(bundles_list, list) + assert len(bundles_all) == len(bundles_list) == len(os.listdir(BUNDLES)) + + bundles_list = await adcm_client.bundles.list(query={"limit": 2, "offset": 1}) + assert isinstance(bundles_list, list) + assert len(bundles_list) == 2 + + bundles_list = await adcm_client.bundles.list(query={"offset": len(os.listdir(BUNDLES)) + 1}) + assert isinstance(bundles_list, list) + assert len(bundles_list) == 0 + + async for b in adcm_client.bundles.iter(name__icontains="cluster"): + assert isinstance(b, Bundle) + assert "cluster" in b.name.lower() + + assert len(await adcm_client.bundles.filter(name__icontains="cluster")) < len(os.listdir(BUNDLES)) + + +async def _test_bundle_properties(adcm_client: ADCMClient) -> None: + bundle = await adcm_client.bundles.get(name__eq="cluster_with_license") + assert bundle.name == "cluster_with_license" + assert bundle.license.state == "accepted" + assert "LICENSE AGREEMENT" in bundle.license.text + assert bundle.version == "2.0" + assert bundle.signature_status == "absent" + assert bundle.edition == "enterprise" + + await bundle.license.accept() + await bundle.refresh() + assert bundle.license.state == "accepted" + + +async def _test_download_external_bundle_success() -> None: + mock_requester = AsyncMock(spec=DefaultRequester) + mock_retriever = AsyncMock(spec=BundleRetrieverInterface) + url = "http://example.com/bundle.tar.gz" + + mock_retriever.download_external_bundle = AsyncMock(return_value=b"bundle content") + + adcm_client = ADCMClient(requester=mock_requester, bundle_retriever=mock_retriever, adcm_version="1.0") + + await adcm_client.bundles.create(source=url, accept_license=False) + + mock_retriever.download_external_bundle.assert_awaited_once_with(url) + + +async def _test_pagination(adcm_client: ADCMClient, tmp_path: Path) -> None: + await create_bundles_by_template( + adcm_client, + tmp_path, + BUNDLES / "simple_hostprovider", + target_name="name", + field_to_modify="simple_provider", + new_value="new_value", + number_of_bundles=55, + ) + bundles_list = await adcm_client.bundles.list() + assert len(bundles_list) == 50 + + bundles_list = await adcm_client.bundles.list(query={"offset": 55}) + assert len(bundles_list) == 5 + + bundles_list = await adcm_client.bundles.list(query={"offset": 60}) + assert len(bundles_list) == 0 + + bundles_list = await adcm_client.bundles.list(query={"limit": 10}) + assert len(bundles_list) == 10 + + assert len(await adcm_client.bundles.all()) == 60 + assert len(await adcm_client.bundles.filter()) == 60 diff --git a/tests/integration/test_host.py b/tests/integration/test_host.py new file mode 100644 index 00000000..47908317 --- /dev/null +++ b/tests/integration/test_host.py @@ -0,0 +1,138 @@ +from pathlib import Path + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.actions import ActionsAccessor +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.filters import Filter +from adcm_aio_client.core.objects.cm import ( + Bundle, + Cluster, + Host, + HostProvider, +) +from tests.integration.bundle import pack_bundle +from tests.integration.conftest import BUNDLES + +pytestmark = [pytest.mark.asyncio] + + +@pytest_asyncio.fixture() +async def cluster_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_cluster", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path) + + +@pytest_asyncio.fixture() +async def cluster(adcm_client: ADCMClient, cluster_bundle: Bundle) -> Cluster: + return await adcm_client.clusters.create(bundle=cluster_bundle, name="Cluster", description="Cluster description") + + +@pytest_asyncio.fixture() +async def hostprovider_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_provider", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path) + + +@pytest_asyncio.fixture() +async def hostprovider(adcm_client: ADCMClient, hostprovider_bundle: Bundle) -> HostProvider: + return await adcm_client.hostproviders.create( + bundle=hostprovider_bundle, name="Hostprovider name", description="Hostprovider description" + ) + + +async def test_host(adcm_client: ADCMClient, hostprovider: HostProvider, cluster: Cluster) -> None: + await _test_host_properties(adcm_client, hostprovider, cluster) + await _test_host_accessors(adcm_client, hostprovider, cluster) + await _test_pagination(adcm_client, hostprovider, cluster) + + +async def _test_host_properties(adcm_client: ADCMClient, hostprovider: HostProvider, cluster: Cluster) -> None: + await adcm_client.hosts.create(name="test-host", description="host description", hostprovider=hostprovider) + await cluster.hosts.add(host=await adcm_client.hosts.get(name__eq="test-host")) + + host = await adcm_client.hosts.get() + assert host.name == "test-host" + assert (await host.hostprovider).name == hostprovider.name + assert (await host.cluster).name == cluster.name # pyright: ignore[reportOptionalMemberAccess] + assert isinstance(host.actions, ActionsAccessor) + assert await host.get_status() == "down" + assert (await host.maintenance_mode).value == "off" + + +async def _test_host_accessors(adcm_client: ADCMClient, hostprovider: HostProvider, cluster: Cluster) -> None: + for new_host in ["host-1", "host-2", "host-3"]: + await adcm_client.hosts.create(name=new_host, description="host description", hostprovider=hostprovider) + + host = await adcm_client.hosts.get(name__eq="host-1") + assert isinstance(host, Host) + assert host.name == "host-1" + + with pytest.raises(ObjectDoesNotExistError): + await adcm_client.hosts.get(name__eq="fake_host") + + with pytest.raises(MultipleObjectsReturnedError): + await adcm_client.hosts.get(name__contains="host") + + assert not await adcm_client.hosts.get_or_none(name__eq="fake_host") + assert isinstance(await adcm_client.hosts.get_or_none(name__contains="-1"), Host) + + assert len(await adcm_client.hosts.all()) == len(await adcm_client.hosts.list()) == 4 + + hosts_list = await adcm_client.hosts.list(query={"limit": 2, "offset": 1}) + assert isinstance(hosts_list, list) + assert len(hosts_list) == 2 + + hosts_list = await adcm_client.hosts.list(query={"offset": 4}) + assert isinstance(hosts_list, list) + assert len(hosts_list) == 0 + + async for h in adcm_client.hosts.iter(): + assert isinstance(h, Host) + assert "host" in h.name + + await cluster.hosts.add(host=await adcm_client.hosts.get(name__eq="host-1")) + await cluster.hosts.add(host=Filter(attr="name", op="eq", value="host-2")) + + assert len(await cluster.hosts.all()) == 3 + + await cluster.hosts.remove(host=await adcm_client.hosts.get(name__eq="host-1")) + + assert len(await cluster.hosts.all()) == 2 + + host = await adcm_client.hosts.get(name__icontains="T-1") + await host.delete() + + +async def _test_pagination(adcm_client: ADCMClient, hostprovider: HostProvider, cluster: Cluster) -> None: + for i in range(55): + await adcm_client.hosts.create( + hostprovider=hostprovider, + cluster=cluster, + name=f"hostname-{i}", + ) + + hosts_list = await adcm_client.hosts.list() + cluster_hosts_list = await cluster.hosts.list() + assert len(hosts_list) == len(cluster_hosts_list) == 50 + + hosts_list = await adcm_client.hosts.list(query={"offset": 55}) + cluster_hosts_list = await cluster.hosts.list(query={"offset": 55}) + assert len(hosts_list) == 3 + assert len(cluster_hosts_list) == 2 + + hosts_list = await adcm_client.hosts.list(query={"offset": 60}) + cluster_hosts_list = await cluster.hosts.list(query={"offset": 60}) + assert len(hosts_list) == len(cluster_hosts_list) == 0 + + hosts_list = await adcm_client.hosts.list(query={"limit": 10}) + cluster_hosts_list = await cluster.hosts.list(query={"limit": 10}) + assert len(hosts_list) == len(cluster_hosts_list) == 10 + + assert len(await adcm_client.hosts.all()) == 58 + assert len(await cluster.hosts.all()) == 57 + + assert len(await adcm_client.hosts.filter()) == 58 + assert len(await cluster.hosts.filter()) == 57 diff --git a/tests/integration/test_hostprovider.py b/tests/integration/test_hostprovider.py new file mode 100644 index 00000000..bab65fcb --- /dev/null +++ b/tests/integration/test_hostprovider.py @@ -0,0 +1,102 @@ +from pathlib import Path + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.actions import ActionsAccessor, UpgradeNode +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.config import ( + ConfigHistoryNode, + ObjectConfig, +) +from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.objects.cm import Bundle, HostProvider +from tests.integration.bundle import pack_bundle +from tests.integration.conftest import BUNDLES + +pytestmark = [pytest.mark.asyncio] + + +@pytest_asyncio.fixture() +async def hostprovider_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_provider", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path) + + +async def test_hostprovider(adcm_client: ADCMClient, hostprovider_bundle: Bundle) -> None: + await _test_hostprovider_properties(adcm_client, hostprovider_bundle) + await _test_hostprovider_accessors(adcm_client, hostprovider_bundle) + await _test_pagination(adcm_client, hostprovider_bundle) + + +async def _test_hostprovider_properties(adcm_client: ADCMClient, hostprovider_bundle: Bundle) -> None: + hostprovider = await adcm_client.hostproviders.create( + bundle=hostprovider_bundle, name="Hostprovider name", description="Hostprovider description" + ) + assert hostprovider.display_name == "complex_provider" + assert hostprovider.name == "Hostprovider name" + assert hostprovider.description == "Hostprovider description" + assert isinstance(hostprovider.actions, ActionsAccessor) + assert isinstance(await hostprovider.config, ObjectConfig) + assert isinstance(hostprovider.config_history, ConfigHistoryNode) + assert isinstance(hostprovider.upgrades, UpgradeNode) + hosts = await hostprovider.hosts.all() + assert len(hosts) == 0 + + +async def _test_hostprovider_accessors(adcm_client: ADCMClient, hostprovider_bundle: Bundle) -> None: + for new_host_provider in ["hostprovider-1", "hostprovider-2", "hostprovider-3"]: + await adcm_client.hostproviders.create( + bundle=hostprovider_bundle, name=new_host_provider, description=new_host_provider + ) + + hostprovider = await adcm_client.hostproviders.get(name__eq="hostprovider-1") + assert isinstance(hostprovider, HostProvider) + assert hostprovider.name == "hostprovider-1" + + with pytest.raises(ObjectDoesNotExistError): + await adcm_client.hostproviders.get(name__eq="fake_hostprovider") + + with pytest.raises(MultipleObjectsReturnedError): + await adcm_client.hostproviders.get(name__icontains="pr") + + assert not await adcm_client.hostproviders.get_or_none(name__eq="fake_hostprovider") + assert isinstance(await adcm_client.hostproviders.get_or_none(name__contains="hostprovider-1"), HostProvider) + + assert len(await adcm_client.hostproviders.all()) == len(await adcm_client.hostproviders.list()) == 4 + + hostproviders_list = await adcm_client.hostproviders.list(query={"limit": 2, "offset": 1}) + assert isinstance(hostproviders_list, list) + assert len(hostproviders_list) == 2 + + hostproviders_list = await adcm_client.hostproviders.list(query={"offset": 4}) + assert isinstance(hostproviders_list, list) + assert len(hostproviders_list) == 0 + + async for hp in adcm_client.hostproviders.iter(): + assert isinstance(hp, HostProvider) + assert "hostprovider" in hp.name.lower() + + assert len(await adcm_client.hostproviders.filter(bundle__eq=hostprovider_bundle)) == 4 + + await hostprovider.delete() + + +async def _test_pagination(adcm_client: ADCMClient, bundle: Bundle) -> None: + for i in range(55): + await adcm_client.hostproviders.create(bundle=bundle, name=f"Hostprovider name {i}") + + hostproviders_list = await adcm_client.hostproviders.list() + assert len(hostproviders_list) == 50 + + hostproviders_list = await adcm_client.hostproviders.list(query={"offset": 55}) + assert len(hostproviders_list) == 3 + + hostproviders_list = await adcm_client.hostproviders.list(query={"offset": 60}) + assert len(hostproviders_list) == 0 + + hostproviders_list = await adcm_client.hostproviders.list(query={"limit": 10}) + assert len(hostproviders_list) == 10 + + assert len(await adcm_client.hostproviders.all()) == 58 + assert len(await adcm_client.hostproviders.filter()) == 58 From d0fa6979bd3d7f195677d7c340f3e081c8677809 Mon Sep 17 00:00:00 2001 From: Daniil Skrynnik Date: Tue, 24 Dec 2024 16:31:23 +0300 Subject: [PATCH 43/46] ADCM-6211: Add tests for Cluster/Service/Component public API (#55) --- adcm_aio_client/core/actions/_objects.py | 3 + .../core/host_groups/action_group.py | 2 + .../core/host_groups/config_group.py | 2 + adcm_aio_client/core/objects/cm.py | 17 +- .../bundles/complex_cluster/config.yaml | 5 + .../bundles/simple_cluster/config.yaml | 3 + tests/integration/conftest.py | 13 + tests/integration/test_bundle.py | 20 +- tests/integration/test_cluster.py | 233 ++++++++++++++++++ tests/integration/test_component.py | 167 +++++++++++++ tests/integration/test_service.py | 210 ++++++++++++++++ tests/integration/yaml.py | 16 ++ 12 files changed, 674 insertions(+), 17 deletions(-) create mode 100644 tests/integration/bundles/simple_cluster/config.yaml create mode 100644 tests/integration/test_cluster.py create mode 100644 tests/integration/test_component.py create mode 100644 tests/integration/test_service.py create mode 100644 tests/integration/yaml.py diff --git a/adcm_aio_client/core/actions/_objects.py b/adcm_aio_client/core/actions/_objects.py index bf0c11f5..a9a211f5 100644 --- a/adcm_aio_client/core/actions/_objects.py +++ b/adcm_aio_client/core/actions/_objects.py @@ -6,6 +6,7 @@ from asyncstdlib import cached_property as async_cached_property from adcm_aio_client.core.errors import HostNotInClusterError, NoMappingRulesForActionError +from adcm_aio_client.core.filters import FilterByDisplayName, FilterByName, Filtering from adcm_aio_client.core.mapping import ActionMapping from adcm_aio_client.core.objects._accessors import NonPaginatedChildAccessor from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject @@ -64,6 +65,7 @@ async def _rich_data(self: Self) -> dict: class ActionsAccessor(NonPaginatedChildAccessor): class_type = Action + filtering = Filtering(FilterByName, FilterByDisplayName) class Upgrade(Action): @@ -85,6 +87,7 @@ def validate(self: Self) -> bool: class UpgradeNode(NonPaginatedChildAccessor): class_type = Upgrade + filtering = Filtering(FilterByName, FilterByDisplayName) async def detect_cluster(owner: InteractiveObject) -> Cluster: diff --git a/adcm_aio_client/core/host_groups/action_group.py b/adcm_aio_client/core/host_groups/action_group.py index 13b39008..7ee4d90b 100644 --- a/adcm_aio_client/core/host_groups/action_group.py +++ b/adcm_aio_client/core/host_groups/action_group.py @@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Self, Union from adcm_aio_client.core.actions import ActionsAccessor +from adcm_aio_client.core.filters import FilterByName, Filtering from adcm_aio_client.core.host_groups._common import HostGroupNode, HostsInHostGroupNode from adcm_aio_client.core.objects._base import InteractiveChildObject from adcm_aio_client.core.objects._common import Deletable @@ -33,6 +34,7 @@ def actions(self: Self) -> ActionsAccessor: class ActionHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"], ActionHostGroup]): class_type = ActionHostGroup + filtering = Filtering(FilterByName) class HostsInActionHostGroupNode(HostsInHostGroupNode): diff --git a/adcm_aio_client/core/host_groups/config_group.py b/adcm_aio_client/core/host_groups/config_group.py index aae59cd0..53ea4867 100644 --- a/adcm_aio_client/core/host_groups/config_group.py +++ b/adcm_aio_client/core/host_groups/config_group.py @@ -1,6 +1,7 @@ from functools import cached_property from typing import TYPE_CHECKING, Self, Union +from adcm_aio_client.core.filters import FilterByName, Filtering from adcm_aio_client.core.host_groups._common import HostGroupNode, HostsInHostGroupNode from adcm_aio_client.core.objects._base import InteractiveChildObject from adcm_aio_client.core.objects._common import Deletable, WithConfig @@ -28,6 +29,7 @@ def hosts(self: Self) -> "HostsInConfigHostGroupNode": class ConfigHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"], ConfigHostGroup]): class_type = ConfigHostGroup + filtering = Filtering(FilterByName) # TODO: create() with `config` arg diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index f7408cdf..6c1d6b60 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -115,8 +115,8 @@ def signature_status(self: Self) -> Literal["invalid", "valid", "absent"]: def _type(self: Self) -> Literal["cluster", "provider"]: return self._data["mainPrototype"]["type"] - @property - def license(self: Self) -> License: + @async_cached_property + async def license(self: Self) -> License: return License(self._requester, self._data["mainPrototype"]) @cached_property @@ -147,8 +147,10 @@ async def create(self: Self, source: Path | URLStr, *, accept_license: bool = Fa bundle = Bundle(requester=self._requester, data=response.as_dict()) - if accept_license and bundle.license.state == "unaccepted": - await bundle.license.accept() + if accept_license: + license_ = await bundle.license + if license_.state == "unaccepted": + await license_.accept() return bundle @@ -262,9 +264,10 @@ def cluster(self: Self) -> Cluster: def components(self: Self) -> "ComponentsNode": return ComponentsNode(parent=self, path=(*self.get_own_path(), "components"), requester=self._requester) - @property - def license(self: Self) -> License: - return License(self._requester, self._data) + @async_cached_property + async def license(self: Self) -> License: + prototype_data = (await self.requester.get("prototypes", self._data["prototype"]["id"])).as_dict() + return License(self._requester, prototype_data) class ServicesNode(PaginatedChildAccessor[Cluster, Service]): diff --git a/tests/integration/bundles/complex_cluster/config.yaml b/tests/integration/bundles/complex_cluster/config.yaml index 02ebca07..8873389b 100644 --- a/tests/integration/bundles/complex_cluster/config.yaml +++ b/tests/integration/bundles/complex_cluster/config.yaml @@ -2,6 +2,11 @@ name: Some Cluster version: 1 + config: + - name: string_field + type: string + default: "string value" + - type: service name: example_1 display_name: First Example diff --git a/tests/integration/bundles/simple_cluster/config.yaml b/tests/integration/bundles/simple_cluster/config.yaml new file mode 100644 index 00000000..d0bb42ce --- /dev/null +++ b/tests/integration/bundles/simple_cluster/config.yaml @@ -0,0 +1,3 @@ +- type: cluster + name: Simple Cluster + version: 2 \ No newline at end of file diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index cfbcbcc3..767fdc8d 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -1,8 +1,10 @@ from pathlib import Path from typing import AsyncGenerator, Generator +from urllib.parse import urljoin import random import string +from httpx import AsyncClient from testcontainers.core.network import Network import pytest import pytest_asyncio @@ -52,3 +54,14 @@ async def adcm_client(adcm: ADCMContainer) -> AsyncGenerator[ADCMClient, None]: url = adcm.url async with ADCMSession(url=url, credentials=credentials, timeout=10, retry_interval=1, retry_attempts=1) as client: yield client + + +@pytest_asyncio.fixture() +async def httpx_client(adcm: ADCMContainer) -> AsyncGenerator[AsyncClient, None]: + client = AsyncClient(base_url=urljoin(adcm.url, "api/v2/")) + response = await client.post("login/", json={"username": "admin", "password": "admin"}) + client.headers["X-CSRFToken"] = response.cookies["csrftoken"] + + yield client + + await client.aclose() diff --git a/tests/integration/test_bundle.py b/tests/integration/test_bundle.py index 80d6b48e..29ba52b4 100644 --- a/tests/integration/test_bundle.py +++ b/tests/integration/test_bundle.py @@ -38,13 +38,13 @@ async def test_bundle(adcm_client: ADCMClient, load_bundles: list[Bundle], tmp_p async def _test_bundle_create_delete(adcm_client: ADCMClient, tmp_path: Path) -> None: bundle = await adcm_client.bundles.get(name__eq="cluster_with_license") - assert bundle.license.state == "unaccepted" + assert (await bundle.license).state == "unaccepted" await bundle.delete() bundle_path = pack_bundle(from_dir=BUNDLES / "cluster_with_license", to=tmp_path) bundle = await adcm_client.bundles.create(source=bundle_path, accept_license=True) - assert bundle.license.state == "accepted" + assert (await bundle.license).state == "accepted" await _test_download_external_bundle_success() @@ -83,15 +83,15 @@ async def _test_bundle_accessors(adcm_client: ADCMClient) -> None: async def _test_bundle_properties(adcm_client: ADCMClient) -> None: bundle = await adcm_client.bundles.get(name__eq="cluster_with_license") assert bundle.name == "cluster_with_license" - assert bundle.license.state == "accepted" - assert "LICENSE AGREEMENT" in bundle.license.text + assert (await bundle.license).state == "accepted" + assert "LICENSE AGREEMENT" in (await bundle.license).text assert bundle.version == "2.0" assert bundle.signature_status == "absent" assert bundle.edition == "enterprise" - await bundle.license.accept() + await (await bundle.license).accept() await bundle.refresh() - assert bundle.license.state == "accepted" + assert (await bundle.license).state == "accepted" async def _test_download_external_bundle_success() -> None: @@ -122,13 +122,13 @@ async def _test_pagination(adcm_client: ADCMClient, tmp_path: Path) -> None: assert len(bundles_list) == 50 bundles_list = await adcm_client.bundles.list(query={"offset": 55}) - assert len(bundles_list) == 5 + assert len(bundles_list) == 6 - bundles_list = await adcm_client.bundles.list(query={"offset": 60}) + bundles_list = await adcm_client.bundles.list(query={"offset": 61}) assert len(bundles_list) == 0 bundles_list = await adcm_client.bundles.list(query={"limit": 10}) assert len(bundles_list) == 10 - assert len(await adcm_client.bundles.all()) == 60 - assert len(await adcm_client.bundles.filter()) == 60 + assert len(await adcm_client.bundles.all()) == 61 + assert len(await adcm_client.bundles.filter()) == 61 diff --git a/tests/integration/test_cluster.py b/tests/integration/test_cluster.py new file mode 100644 index 00000000..812b89ab --- /dev/null +++ b/tests/integration/test_cluster.py @@ -0,0 +1,233 @@ +from pathlib import Path +from typing import Collection +import asyncio + +from httpx import AsyncClient +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig +from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.mapping import ClusterMapping +from adcm_aio_client.core.objects._imports import Imports +from adcm_aio_client.core.objects.cm import Bundle, Cluster +from tests.integration.bundle import pack_bundle +from tests.integration.conftest import BUNDLES + +pytestmark = [pytest.mark.asyncio] + + +async def get_ansible_forks(httpx_client: AsyncClient, cluster: Cluster) -> int: + ansible_cfg_url = f"clusters/{cluster.id}/ansible-config/" + response = await httpx_client.get(ansible_cfg_url) + assert response.status_code == 200 + + return response.json()["config"]["defaults"]["forks"] + + +async def update_cluster_name(httpx_client: AsyncClient, cluster: Cluster, new_name: str) -> None: + cluster_url = f"clusters/{cluster.id}/" + response = await httpx_client.patch(cluster_url, json={"name": new_name}) + assert response.status_code == 200 + + +async def assert_cluster(cluster: Cluster, expected: dict, httpx_client: AsyncClient) -> None: + cluster_url = f"clusters/{cluster.id}/" + response = await httpx_client.get(cluster_url) + assert response.status_code == 200 + + response = response.json() + for attr, value in expected.items(): + assert response[attr] == value + + await cluster.delete() + response = await httpx_client.get(cluster_url) + assert response.status_code == 404 + + +def assert_clusters_collection(clusters: Collection[Cluster], expected_amount: int) -> None: + assert all(isinstance(cluster, Cluster) for cluster in clusters) + assert len({cluster.id for cluster in clusters}) == expected_amount + assert len({id(cluster) for cluster in clusters}) == expected_amount + + +@pytest_asyncio.fixture() +async def complex_cluster_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_cluster", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + +@pytest_asyncio.fixture() +async def many_complex_clusters(adcm_client: ADCMClient, complex_cluster_bundle: Bundle) -> int: + """ + Creates 51 clusters (2 pages, if response's page size is 50) + with name pattern `Test-cluster-N` and one `Very special cluster` + """ + + num_similar_clusters = 50 + coros = ( + adcm_client.clusters.create(bundle=complex_cluster_bundle, name=f"Test-cluster-{i + 1}") + for i in range(num_similar_clusters) + ) + special_cluster_coro = adcm_client.clusters.create(bundle=complex_cluster_bundle, name="Very special cluster") + await asyncio.gather(*coros, special_cluster_coro) + + return num_similar_clusters + 1 + + +@pytest_asyncio.fixture() +async def simple_cluster_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "simple_cluster", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + +@pytest_asyncio.fixture() +async def simple_cluster(adcm_client: ADCMClient, simple_cluster_bundle: Bundle) -> Cluster: + return await adcm_client.clusters.create(bundle=simple_cluster_bundle, name="Simple cluster") + + +async def test_cluster( + adcm_client: ADCMClient, + complex_cluster_bundle: Bundle, + many_complex_clusters: int, + simple_cluster_bundle: Bundle, + simple_cluster: Cluster, # for filtering by bundle + httpx_client: AsyncClient, +) -> None: + _ = simple_cluster + num_clusters = many_complex_clusters + 1 # + simple_cluster + + await _test_cluster_create_delete_api( + adcm_client=adcm_client, bundle=complex_cluster_bundle, httpx_client=httpx_client + ) + + await _test_clusters_node( + adcm_client=adcm_client, + complex_bundle=complex_cluster_bundle, + num_clusters=num_clusters, + simple_bundle=simple_cluster_bundle, + ) + + cluster = await adcm_client.clusters.get(name__eq="Very special cluster") + await _test_cluster_object_api(httpx_client=httpx_client, cluster=cluster, cluster_bundle=complex_cluster_bundle) + + +async def _test_cluster_create_delete_api(adcm_client: ADCMClient, bundle: Bundle, httpx_client: AsyncClient) -> None: + name = "Test-cluster" + description = "des\ncription" + cluster = await adcm_client.clusters.create(bundle=bundle, name=name, description=description) + + expected = {"id": cluster.id, "name": name, "description": description} + await assert_cluster(cluster, expected, httpx_client) + + # without optional arguments + name = "Another-test-cluster" + cluster = await adcm_client.clusters.create(bundle=bundle, name=name) + + expected = {"id": cluster.id, "name": name, "description": ""} + await assert_cluster(cluster, expected, httpx_client) + + +async def _test_clusters_node( + adcm_client: ADCMClient, complex_bundle: Bundle, num_clusters: int, simple_bundle: Bundle +) -> None: + no_objects_msg = "^No objects found with the given filter.$" + multiple_objects_msg = "^More than one object found.$" + + # get + assert isinstance(await adcm_client.clusters.get(name__eq="Very special cluster"), Cluster) + + with pytest.raises(ObjectDoesNotExistError, match=no_objects_msg): + await adcm_client.clusters.get(name__eq="Not so special cluster") + + with pytest.raises(MultipleObjectsReturnedError, match=multiple_objects_msg): + await adcm_client.clusters.get(name__in=["Test-cluster-1", "Test-cluster-2"]) + + # get_or_none + assert isinstance(await adcm_client.clusters.get_or_none(name__eq="Test-cluster-3"), Cluster) + + assert await adcm_client.clusters.get_or_none(name__eq="Not so special cluster") is None + + with pytest.raises(MultipleObjectsReturnedError, match=multiple_objects_msg): + await adcm_client.clusters.get_or_none(name__in=["Very special cluster", "Test-cluster-2"]) + + # all + all_clusters = await adcm_client.clusters.all() + assert_clusters_collection(clusters=all_clusters, expected_amount=num_clusters) + + # list + page_size = 50 + assert page_size < num_clusters, "check page_size or number of clusters" + + first_page_clusters = await adcm_client.clusters.list() + assert_clusters_collection(clusters=first_page_clusters, expected_amount=page_size) + + # iter + iter_clusters = set() + async for cluster in adcm_client.clusters.iter(): + iter_clusters.add(cluster) + assert_clusters_collection(clusters=iter_clusters, expected_amount=num_clusters) + + # filter + # complex_bundle: "Test-cluster-N" - 50; "Very special cluster" - 1; + # simple_bundle: "Simple cluster" - 1 + filters_data = { + ("bundle__eq", simple_bundle): 1, + ("bundle__in", (complex_bundle, simple_bundle)): num_clusters, + ("bundle__ne", complex_bundle): 1, + ("bundle__exclude", (simple_bundle, complex_bundle)): 0, + ("name__eq", "Very special cluster"): 1, + ("name__ieq", "VERY SPECIAL cluster"): 1, + ("name__ne", "Simple cluster"): num_clusters - 1, + ("name__ine", "SIMPLE CLUSTER"): num_clusters - 1, + ("name__in", ("Test-cluster-1", "Test-cluster-2", "TEST-cluster-3", "Not a cluster")): 2, + ("name__iin", ("TEST-cluster-1", "Test-CLUSTER-2", "SIMPLE CLUSTER")): 3, + ("name__exclude", ("Test-cluster-1", "Test-cluster-2", "Not a cluster")): num_clusters - 2, + ("name__iexclude", ("VERY special CLUSTER", "Not a cluster")): num_clusters - 1, + ("name__contains", "special"): 1, + ("name__icontains", "-ClUsTeR-"): num_clusters - 2, + ("status__eq", "up"): 0, + ("status__eq", "down"): num_clusters, + ("status__in", ("down", "some status")): num_clusters, + ("status__in", ("up", "some status")): 0, + ("status__ne", "down"): 0, + ("status__ne", "up"): num_clusters, + ("status__exclude", ("excluded_status", "down")): 0, + ("status__exclude", ("excluded_status", "up")): num_clusters, + ("status__exclude", ("up", "down")): 0, + } + for filter_, expected in filters_data.items(): + filter_value = {filter_[0]: filter_[1]} + clusters = await adcm_client.clusters.filter(**filter_value) + assert len(clusters) == expected, f"Filter: {filter_value}" + + +async def _test_cluster_object_api(httpx_client: AsyncClient, cluster: Cluster, cluster_bundle: Bundle) -> None: + assert isinstance(cluster.id, int) + assert isinstance(cluster.name, str) + assert isinstance(cluster.description, str) + + bundle = await cluster.bundle + assert isinstance(bundle, Bundle) + assert bundle.id == cluster_bundle.id + + assert isinstance(await cluster.get_status(), str) + assert isinstance(await cluster.actions.all(), list) + assert isinstance(await cluster.upgrades.all(), list) + assert isinstance(await cluster.config_host_groups.all(), list) + assert isinstance(await cluster.action_host_groups.all(), list) + assert isinstance(await cluster.config, ObjectConfig) + assert isinstance(cluster.config_history, ConfigHistoryNode) + assert isinstance(await cluster.mapping, ClusterMapping) + assert isinstance(await cluster.imports, Imports) + + initial_ansible_forks = await get_ansible_forks(httpx_client, cluster) + await cluster.set_ansible_forks(value=initial_ansible_forks + 5) + assert await get_ansible_forks(httpx_client, cluster) == initial_ansible_forks + 5 + + new_name = "New cluster name" + await update_cluster_name(httpx_client, cluster, new_name) + assert cluster.name != new_name + await cluster.refresh() + assert cluster.name == new_name diff --git a/tests/integration/test_component.py b/tests/integration/test_component.py new file mode 100644 index 00000000..ed76d5e2 --- /dev/null +++ b/tests/integration/test_component.py @@ -0,0 +1,167 @@ +from pathlib import Path +from typing import Collection +import random +import string + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig +from adcm_aio_client.core.errors import MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.filters import Filter +from adcm_aio_client.core.objects.cm import Cluster, Component, Service +from tests.integration.bundle import pack_bundle +from tests.integration.yaml import create_yaml + +pytestmark = [pytest.mark.asyncio] + + +def prepare_bundle_data() -> list[dict]: + config = [{"name": "string_field", "type": "string", "default": "string_field value"}] + + component_data = {"config": config} + fifty_components = {f"generated_component_{i + 1}": component_data for i in range(50)} + special_component = {"special_component": component_data} + + return [ + { + "type": "cluster", + "name": "Generated cluster", + "version": 1, + }, + { + "type": "service", + "name": "Service", + "version": 1.0, + "config": config, + "components": {**fifty_components, **special_component}, + }, + ] + + +def assert_components_collection(components: Collection[Component], expected_amount: int) -> None: + assert all(isinstance(component, Component) for component in components) + assert len({component.id for component in components}) == expected_amount + assert len({id(component) for component in components}) == expected_amount + + +@pytest_asyncio.fixture() +async def service_51_components(adcm_client: ADCMClient, tmp_path: Path) -> Service: + config_yaml_path = tmp_path / "".join(random.sample(string.ascii_letters, k=6)).lower() / "config.yaml" + create_yaml(data=prepare_bundle_data(), path=config_yaml_path) + + bundle_path = pack_bundle(from_dir=config_yaml_path.parent, to=tmp_path) + bundle = await adcm_client.bundles.create(source=bundle_path) + cluster = await adcm_client.clusters.create(bundle=bundle, name="Test cluster 52") + + return (await cluster.services.add(filter_=Filter(attr="name", op="eq", value="Service")))[0] + + +async def test_component_api(service_51_components: Service) -> None: + service = service_51_components + num_components = 51 + + await _test_component_node(service=service, num_components=num_components) + + component = await service.components.get(name__eq="special_component") + await _test_component_object_api(component=component, parent_service=service) + + +async def _test_component_node(service: Service, num_components: int) -> None: + no_objects_msg = "^No objects found with the given filter.$" + multiple_objects_msg = "^More than one object found.$" + + # get + assert isinstance(await service.components.get(name__eq="special_component"), Component) + + with pytest.raises(ObjectDoesNotExistError, match=no_objects_msg): + await service.components.get(name__eq="some_component") + + with pytest.raises(MultipleObjectsReturnedError, match=multiple_objects_msg): + await service.components.get(name__in=["generated_component_1", "generated_component_2"]) + + # get_or_none + assert isinstance(await service.components.get_or_none(name__eq="generated_component_30"), Component) + + assert await service.components.get_or_none(name__eq="some_component") is None + + with pytest.raises(MultipleObjectsReturnedError, match=multiple_objects_msg): + await service.components.get_or_none(name__in=["generated_component_1", "generated_component_11"]) + + # all + all_components = await service.components.all() + assert_components_collection(components=all_components, expected_amount=num_components) + + # list + page_size = 50 + assert page_size < num_components, "check page_size or number of components" + + first_page_components = await service.components.list() + assert_components_collection(components=first_page_components, expected_amount=page_size) + + # iter + iter_components = set() + async for component in service.components.iter(): + iter_components.add(component) + assert_components_collection(components=iter_components, expected_amount=num_components) + + # filter + name_filters_data = { + ("name__eq", "generated_component_8"): 1, + ("name__ieq", "gEnErAtEd_CoMpOnEnT_18"): 1, + ("name__ne", "generated_component_2"): num_components - 1, + ("name__ine", "GENERATED_component_2"): num_components - 1, + ( + "name__in", + ("generated_component_20", "generated_component_21", "GENERATED_COMPONENT_22", "Not a component"), + ): 2, + ( + "name__iin", + ("generated_component_20", "generated_component_21", "GENERATED_COMPONENT_22", "Not a component"), + ): 3, + ("name__exclude", ("generated_component_20", "generated_component_21", "Not a component")): num_components - 2, + ("name__iexclude", ("GENERATED_COMPONENT_22", "Not a component")): num_components - 1, + ("name__contains", "38"): 1, + ("name__contains", "omponen"): num_components, + ("name__icontains", "_coMPON"): num_components, + } + display_name_filters_data = { # display_names are the same as names + (f"display_{filter_[0]}", filter_[1]): expected for filter_, expected in name_filters_data.items() + } + + filters_data = { + **name_filters_data, + **display_name_filters_data, + ("status__eq", "up"): 0, + ("status__eq", "down"): num_components, + ("status__in", ("down", "some status")): num_components, + ("status__in", ("up", "some status")): 0, + ("status__ne", "down"): 0, + ("status__ne", "up"): num_components, + ("status__exclude", ("excluded_status", "down")): 0, + ("status__exclude", ("excluded_status", "up")): num_components, + ("status__exclude", ("up", "down")): 0, + } + for filter_, expected in filters_data.items(): + filter_value = {filter_[0]: filter_[1]} + components = await service.components.filter(**filter_value) + assert len(components) == expected, f"Filter: {filter_value}" + + +async def _test_component_object_api(component: Component, parent_service: Service) -> None: + assert isinstance(component.id, int) + assert isinstance(component.name, str) + assert isinstance(component.display_name, str) + assert isinstance(await component.constraint, list) + assert isinstance(component.service, Service) + assert isinstance(component.cluster, Cluster) + assert component.service.id == parent_service.id + assert component.cluster.id == parent_service.cluster.id + assert isinstance(await component.hosts.all(), list) + assert isinstance(await component.get_status(), str) + assert isinstance(await component.actions.all(), list) + assert isinstance(await component.config, ObjectConfig) + assert isinstance(component.config_history, ConfigHistoryNode) + assert isinstance(await component.config_host_groups.all(), list) + assert isinstance(await component.action_host_groups.all(), list) diff --git a/tests/integration/test_service.py b/tests/integration/test_service.py new file mode 100644 index 00000000..7933f9de --- /dev/null +++ b/tests/integration/test_service.py @@ -0,0 +1,210 @@ +from copy import copy +from pathlib import Path +from typing import Collection +import random +import string + +from httpx import AsyncClient +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.config import ConfigHistoryNode, ObjectConfig +from adcm_aio_client.core.errors import ConflictError, MultipleObjectsReturnedError, ObjectDoesNotExistError +from adcm_aio_client.core.filters import Filter +from adcm_aio_client.core.objects._imports import Imports +from adcm_aio_client.core.objects.cm import Cluster, License, Service +from tests.integration.bundle import pack_bundle +from tests.integration.yaml import create_yaml + +pytestmark = [pytest.mark.asyncio] + + +def prepare_bundle_data() -> list[dict]: + config = [ + { + "name": "string_field", + "type": "string", + "default": "string_field value", + } + ] + + service = { + "type": "service", + "name": "Generated service", + "version": 1.0, + "config": config, + } + + service_manual_add = copy(service) + service_manual_add.update({"name": "Manual add", "license": "./service_license.txt"}) + + fifty_one_services = [] + for i in range(51): + service = copy(service) + service.update({"name": f"Generated service {i + 1}"}) + fifty_one_services.append(service) + + return [ + { + "type": "cluster", + "name": "Generated cluster", + "version": 1, + }, + *fifty_one_services, + service_manual_add, + ] + + +def assert_services_collection(services: Collection[Service], expected_amount: int) -> None: + assert all(isinstance(cluster, Service) for cluster in services) + assert len({service.id for service in services}) == expected_amount + assert len({id(service) for service in services}) == expected_amount + + +@pytest_asyncio.fixture() +async def cluster_52(adcm_client: ADCMClient, tmp_path: Path) -> Cluster: + """ + Cluster with 52 services, one not added + """ + + bundle_folder = tmp_path / "".join(random.sample(string.ascii_letters, k=6)).lower() + config_yaml_path = bundle_folder / "config.yaml" + create_yaml(data=prepare_bundle_data(), path=config_yaml_path) + + (bundle_folder / "service_license.txt").write_text("By using this test bundle, you agreeing to write tests well\n") + + bundle_path = pack_bundle(from_dir=bundle_folder, to=tmp_path) + bundle = await adcm_client.bundles.create(source=bundle_path) + + cluster = await adcm_client.clusters.create(bundle=bundle, name="Test cluster 52") + await cluster.services.add(filter_=Filter(attr="name", op="icontains", value="service")) + + return cluster + + +async def test_service_api(cluster_52: Cluster, httpx_client: AsyncClient) -> None: + cluster = cluster_52 + num_services = 51 + + await _test_service_create_delete_api(name="Manual add", cluster=cluster, httpx_client=httpx_client) + await _test_services_node(cluster=cluster, num_services=num_services) + await _test_service_object_api( + service=await cluster.services.get(name__eq="Generated service 1"), parent_cluster=cluster + ) + + +async def _test_service_create_delete_api(name: str, cluster: Cluster, httpx_client: AsyncClient) -> None: + target_service_filter = Filter(attr="name", op="eq", value=name) + + with pytest.raises(ConflictError, match="LICENSE_ERROR"): + await cluster.services.add(filter_=target_service_filter) + + service = await cluster.services.add(filter_=target_service_filter, accept_license=True) + assert len(service) == 1 + service = service[0] + + service_url_part = f"clusters/{cluster.id}/services/{service.id}/" + response = await httpx_client.get(service_url_part) + + assert response.status_code == 200 + service_data = response.json() + + assert service_data["id"] == service.id + assert service_data["name"] == name + + await service.delete() + response = await httpx_client.get(service_url_part) + assert response.status_code == 404 + + +async def _test_services_node(cluster: Cluster, num_services: int) -> None: + no_objects_msg = "^No objects found with the given filter.$" + multiple_objects_msg = "^More than one object found.$" + + # get + assert isinstance(await cluster.services.get(name__eq="Generated service 30"), Service) + + with pytest.raises(ObjectDoesNotExistError, match=no_objects_msg): + await cluster.services.get(name__eq="Non-existent service") + + with pytest.raises(MultipleObjectsReturnedError, match=multiple_objects_msg): + await cluster.services.get(name__in=["Generated service 1", "Generated service 2"]) + + # get_or_none + assert isinstance(await cluster.services.get_or_none(name__eq="Generated service 50"), Service) + + assert await cluster.services.get_or_none(name__eq="Non-existent service") is None + + with pytest.raises(MultipleObjectsReturnedError, match=multiple_objects_msg): + await cluster.services.get_or_none(name__in=["Generated service 1", "Generated service 2"]) + + # all + all_services = await cluster.services.all() + assert_services_collection(services=all_services, expected_amount=num_services) + + # list + page_size = 50 + assert page_size < num_services, "check page_size or number of services" + + first_page_services = await cluster.services.list() + assert_services_collection(services=first_page_services, expected_amount=page_size) + + # iter + iter_services = set() + async for service in cluster.services.iter(): + iter_services.add(service) + assert_services_collection(services=iter_services, expected_amount=num_services) + + # filter + name_filters_data = { + ("name__eq", "Generated service 8"): 1, + ("name__ieq", "GeNeRaTeD SeRvIcE 18"): 1, + ("name__ne", "Generated service 51"): num_services - 1, + ("name__ine", "GENERATED service 51"): num_services - 1, + ("name__in", ("Generated service 51", "Generated service 50", "GENERATED SERVICE 49", "Not a service")): 2, + ("name__iin", ("Generated service 51", "Generated service 50", "GENERATED SERVICE 49", "Not a service")): 3, + ("name__exclude", ("Generated service 1", "Generated service 2", "Not a service")): num_services - 2, + ("name__iexclude", ("GENERATED SERVICE 51", "Not a service")): num_services - 1, + ("name__contains", "38"): 1, + ("name__contains", "Generated"): num_services, + ("name__icontains", "TeD sErV"): num_services, + } + display_name_filters_data = { # display_names are the same as names + (f"display_{filter_[0]}", filter_[1]): expected for filter_, expected in name_filters_data.items() + } + + filters_data = { + **name_filters_data, + **display_name_filters_data, + ("status__eq", "up"): num_services, + ("status__eq", "down"): 0, + ("status__in", ("down", "some status")): 0, + ("status__in", ("up", "some status")): num_services, + ("status__ne", "down"): num_services, + ("status__ne", "up"): 0, + ("status__exclude", ("excluded_status", "down")): num_services, + ("status__exclude", ("excluded_status", "up")): 0, + ("status__exclude", ("up", "down")): 0, + } + for filter_, expected in filters_data.items(): + filter_value = {filter_[0]: filter_[1]} + services = await cluster.services.filter(**filter_value) + assert len(services) == expected, f"Filter: {filter_value}" + + +async def _test_service_object_api(service: Service, parent_cluster: Cluster) -> None: + assert isinstance(service.id, int) + assert isinstance(service.name, str) + assert isinstance(service.display_name, str) + assert isinstance(service.cluster, Cluster) + assert service.cluster.id == parent_cluster.id + assert isinstance(await service.license, License) + assert isinstance(await service.components.all(), list) + assert isinstance(await service.get_status(), str) + assert isinstance(await service.actions.all(), list) + assert isinstance(await service.config, ObjectConfig) + assert isinstance(service.config_history, ConfigHistoryNode) + assert isinstance(await service.imports, Imports) + assert isinstance(await service.config_host_groups.all(), list) + assert isinstance(await service.action_host_groups.all(), list) diff --git a/tests/integration/yaml.py b/tests/integration/yaml.py new file mode 100644 index 00000000..62725b2b --- /dev/null +++ b/tests/integration/yaml.py @@ -0,0 +1,16 @@ +from pathlib import Path + +import yaml + + +def create_yaml(data: list | dict, path: Path) -> None: + """ + :param data: desired .yaml file content + :param path: target .yaml path + """ + if path.suffix not in {".yaml", ".yml"}: + raise ValueError(f"Invalid .yaml/.yml path: {path}") + + path.parent.mkdir(parents=True, exist_ok=True) + with path.open("w") as yaml_file: + yaml.dump(data, yaml_file, default_flow_style=False) From ae30c1578cc014d73670fe7d248af30e95433826 Mon Sep 17 00:00:00 2001 From: Araslanov Egor Date: Wed, 25 Dec 2024 10:38:18 +0500 Subject: [PATCH 44/46] ADCM-6210 Test on Jobs and related fixes (#52) --- adcm_aio_client/core/actions/_objects.py | 122 +++++++--- adcm_aio_client/core/client.py | 6 +- adcm_aio_client/core/config/_objects.py | 52 +++-- adcm_aio_client/core/errors.py | 7 +- adcm_aio_client/core/filters.py | 5 +- adcm_aio_client/core/host_groups/_common.py | 3 +- .../core/host_groups/action_group.py | 9 +- adcm_aio_client/core/objects/_base.py | 8 +- adcm_aio_client/core/objects/_common.py | 5 +- adcm_aio_client/core/objects/cm.py | 171 +++++++++++--- adcm_aio_client/core/requesters.py | 11 +- poetry.lock | 16 +- pyproject.toml | 2 + .../bundles/complex_cluster/actions.yaml | 16 ++ .../bundles/complex_cluster/config.yaml | 47 ++++ .../bundles/simple_hostprovider/actions.yaml | 16 ++ .../bundles/simple_hostprovider/config.yaml | 13 ++ tests/integration/conftest.py | 20 +- tests/integration/test_jobs.py | 221 ++++++++++++++++++ 19 files changed, 647 insertions(+), 103 deletions(-) create mode 100644 tests/integration/bundles/complex_cluster/actions.yaml create mode 100644 tests/integration/bundles/simple_hostprovider/actions.yaml create mode 100644 tests/integration/test_jobs.py diff --git a/adcm_aio_client/core/actions/_objects.py b/adcm_aio_client/core/actions/_objects.py index a9a211f5..481feda6 100644 --- a/adcm_aio_client/core/actions/_objects.py +++ b/adcm_aio_client/core/actions/_objects.py @@ -5,14 +5,16 @@ from asyncstdlib import cached_property as async_cached_property -from adcm_aio_client.core.errors import HostNotInClusterError, NoMappingRulesForActionError +from adcm_aio_client.core.config._objects import ActionConfig +from adcm_aio_client.core.config.types import ConfigData +from adcm_aio_client.core.errors import HostNotInClusterError, NoConfigInActionError, NoMappingInActionError from adcm_aio_client.core.filters import FilterByDisplayName, FilterByName, Filtering from adcm_aio_client.core.mapping import ActionMapping from adcm_aio_client.core.objects._accessors import NonPaginatedChildAccessor from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject if TYPE_CHECKING: - from adcm_aio_client.core.objects.cm import Bundle, Cluster + from adcm_aio_client.core.objects.cm import Bundle, Cluster, Job class Action(InteractiveChildObject): @@ -21,6 +23,25 @@ class Action(InteractiveChildObject): def __init__(self: Self, parent: InteractiveObject, data: dict[str, Any]) -> None: super().__init__(parent, data) self._verbose = False + self._blocking = True + + @property + def verbose(self: Self) -> bool: + return self._verbose + + @verbose.setter + def verbose(self: Self, value: bool) -> bool: + self._verbose = value + return self._verbose + + @property + def blocking(self: Self) -> bool: + return self._blocking + + @blocking.setter + def blocking(self: Self, value: bool) -> bool: + self._blocking = value + return self._blocking @cached_property def name(self: Self) -> str: @@ -30,19 +51,29 @@ def name(self: Self) -> str: def display_name(self: Self) -> str: return self._data["displayName"] - async def run(self: Self) -> dict: # TODO: implement Task, return Task - return (await self._requester.post(*self.get_own_path(), "run", data={"isVerbose": self._verbose})).as_dict() + async def run(self: Self) -> Job: + from adcm_aio_client.core.objects.cm import Job - @async_cached_property - async def _mapping_rule(self: Self) -> list[dict] | None: - return (await self._rich_data)["hostComponentMapRules"] + await self._ensure_rich_data() + + data = {"isVerbose": self._verbose, "shouldBlockObject": self._blocking} + if self._has_mapping: + mapping = await self.mapping + data |= {"hostComponentMap": mapping._to_payload()} + if self._has_config: + config = await self.config + data |= {"configuration": config._to_payload()} + + response = await self._requester.post(*self.get_own_path(), "run", data=data) + return Job(requester=self._requester, data=response.as_dict()) @async_cached_property async def mapping(self: Self) -> ActionMapping: - mapping_change_allowed = await self._mapping_rule - if not mapping_change_allowed: + await self._ensure_rich_data() + + if not self._has_mapping: message = f"Action {self.display_name} doesn't allow mapping changes" - raise NoMappingRulesForActionError(message) + raise NoMappingInActionError(message) cluster = await detect_cluster(owner=self._parent) mapping = await cluster.mapping @@ -50,20 +81,64 @@ async def mapping(self: Self) -> ActionMapping: return ActionMapping(owner=self._parent, cluster=cluster, entries=entries) - def set_verbose(self: Self) -> Self: - self._verbose = True - return self + @async_cached_property + async def config(self: Self) -> ActionConfig: + await self._ensure_rich_data() - @async_cached_property # TODO: Config class - async def config(self: Self) -> ...: - return (await self._rich_data)["configuration"] + if not self._has_config: + message = f"Action {self.display_name} doesn't allow config changes" + raise NoConfigInActionError(message) - @async_cached_property - async def _rich_data(self: Self) -> dict: - return (await self._requester.get(*self.get_own_path())).as_dict() + configuration = self._configuration + data = ConfigData.from_v2_response(data_in_v2_format=configuration) + schema = configuration["configSchema"] + + return ActionConfig(schema=schema, config=data, parent=self) + @property + def _is_full_data_loaded(self: Self) -> bool: + return "hostComponentMapRules" in self._data -class ActionsAccessor(NonPaginatedChildAccessor): + @property + def _has_mapping(self: Self) -> bool: + return bool(self._mapping_rule) + + @property + def _has_config(self: Self) -> bool: + return bool(self._configuration) + + @property + def _mapping_rule(self: Self) -> list[dict]: + try: + return self._data["hostComponentMapRules"] + except KeyError as e: + message = ( + "Failed to retrieve mapping rules. " + "Most likely action was initialized with partial data." + " Need to load all data" + ) + raise KeyError(message) from e + + @property + def _configuration(self: Self) -> dict: + try: + return self._data["configuration"] + except KeyError as e: + message = ( + "Failed to retrieve configuration section. " + "Most likely action was initialized with partial data." + " Need to load all data" + ) + raise KeyError(message) from e + + async def _ensure_rich_data(self: Self) -> None: + if self._is_full_data_loaded: + return + + self._data = await self._retrieve_data() + + +class ActionsAccessor[Parent: InteractiveObject](NonPaginatedChildAccessor[Parent, Action]): class_type = Action filtering = Filtering(FilterByName, FilterByDisplayName) @@ -77,13 +152,6 @@ def bundle(self: Self) -> Bundle: return Bundle(requester=self._requester, data=self._data["bundle"]) - @async_cached_property # TODO: Config class - async def config(self: Self) -> ...: - return (await self._rich_data)["configuration"] - - def validate(self: Self) -> bool: - return True - class UpgradeNode(NonPaginatedChildAccessor): class_type = Upgrade diff --git a/adcm_aio_client/core/client.py b/adcm_aio_client/core/client.py index 5fbdf20f..2992fc63 100644 --- a/adcm_aio_client/core/client.py +++ b/adcm_aio_client/core/client.py @@ -13,7 +13,7 @@ from functools import cached_property from typing import Self -from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsNode +from adcm_aio_client.core.objects.cm import ADCM, BundlesNode, ClustersNode, HostProvidersNode, HostsNode, JobsNode from adcm_aio_client.core.requesters import BundleRetrieverInterface, Requester MIN_ADCM_VERSION = "2.5.0" @@ -48,3 +48,7 @@ def bundles(self: Self) -> BundlesNode: return BundlesNode( path=("bundles",), requester=self._requester, retriever=self._retrieve_bundle_from_remote_url ) + + @cached_property + def jobs(self: Self) -> JobsNode: + return JobsNode(path=("tasks",), requester=self._requester) diff --git a/adcm_aio_client/core/config/_objects.py b/adcm_aio_client/core/config/_objects.py index c36773a6..a8463ab5 100644 --- a/adcm_aio_client/core/config/_objects.py +++ b/adcm_aio_client/core/config/_objects.py @@ -301,26 +301,6 @@ def difference(self: Self, other: Self, *, other_is_previous: bool = True) -> Co full_diff = find_config_difference(previous=previous.data, current=current.data, schema=self._schema) return ConfigDifference.from_full_format(full_diff) - async def save(self: Self, description: str = "") -> Self: - config_to_save = self._current_config.config - self._serialize_json_fields_inplace_safe(config_to_save) - payload = {"description": description, "config": config_to_save.values, "adcmMeta": config_to_save.attributes} - - try: - response = await self._parent.requester.post(*self._parent.get_own_path(), "configs", data=payload) - except RequesterError: - # config isn't saved, no data update is in play, - # returning "pre-saved" parsed values - self._parse_json_fields_inplace_safe(config_to_save) - - raise - else: - new_config = ConfigData.from_v2_response(data_in_v2_format=response.as_dict()) - self._initial_config = self._parse_json_fields_inplace_safe(new_config) - self.reset() - - return self - # Public For Internal Use Only @property @@ -369,7 +349,7 @@ async def _retrieve_current_config(self: Self) -> ConfigData: return self._parse_json_fields_inplace_safe(config_data) -class _RefreshableConfig[T: _ConfigWrapperCreator](_GeneralConfig[T]): +class _SaveableConfig[T: _ConfigWrapperCreator](_GeneralConfig[T]): async def refresh(self: Self, strategy: ConfigRefreshStrategy = apply_local_changes) -> Self: remote_config = await retrieve_current_config( parent=self._parent, get_schema=partial(retrieve_schema, parent=self._parent) @@ -386,6 +366,26 @@ async def refresh(self: Self, strategy: ConfigRefreshStrategy = apply_local_chan return self + async def save(self: Self, description: str = "") -> Self: + config_to_save = self._current_config.config + self._serialize_json_fields_inplace_safe(config_to_save) + payload = {"description": description, "config": config_to_save.values, "adcmMeta": config_to_save.attributes} + + try: + response = await self._parent.requester.post(*self._parent.get_own_path(), "configs", data=payload) + except RequesterError: + # config isn't saved, no data update is in play, + # returning "pre-saved" parsed values + self._parse_json_fields_inplace_safe(config_to_save) + + raise + else: + new_config = ConfigData.from_v2_response(data_in_v2_format=response.as_dict()) + self._initial_config = self._parse_json_fields_inplace_safe(new_config) + self.reset() + + return self + class ActionConfig(_GeneralConfig[ObjectConfigWrapper]): _wrapper_class = ObjectConfigWrapper @@ -403,8 +403,14 @@ def __getitem__[ExpectedType: ConfigEntry]( ) -> ConfigEntry: return self._current_config[item] + def _to_payload(self: Self) -> dict: + # don't want complexity of regular config with rollbacks on failure + config_to_save = deepcopy(self._current_config.config) + self._serialize_json_fields_inplace_safe(config_to_save) + return {"config": config_to_save.values, "adcmMeta": config_to_save.attributes} + -class ObjectConfig(_RefreshableConfig[ObjectConfigWrapper]): +class ObjectConfig(_SaveableConfig[ObjectConfigWrapper]): _wrapper_class = ObjectConfigWrapper # todo fix typing copy-paste @@ -422,7 +428,7 @@ def __getitem__[ExpectedType: ConfigEntry]( return self._current_config[item] -class HostGroupConfig(_RefreshableConfig[HostGroupConfigWrapper]): +class HostGroupConfig(_SaveableConfig[HostGroupConfigWrapper]): _wrapper_class = HostGroupConfigWrapper @overload diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index f9fd4aa8..da598ae4 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -126,10 +126,13 @@ class ConfigComparisonError(ConfigError): ... class ConfigNoParameterError(ConfigError): ... -# Mapping +# Action -class NoMappingRulesForActionError(ADCMClientError): ... +class NoMappingInActionError(ADCMClientError): ... + + +class NoConfigInActionError(ADCMClientError): ... # Filtering diff --git a/adcm_aio_client/core/filters.py b/adcm_aio_client/core/filters.py index eb7c5f2e..178ef301 100644 --- a/adcm_aio_client/core/filters.py +++ b/adcm_aio_client/core/filters.py @@ -24,7 +24,8 @@ COMMON_OPERATIONS = frozenset(("eq", "ne", "in", "exclude")) -ALL_OPERATIONS = frozenset(("contains", "icontains", *COMMON_OPERATIONS, *tuple(f"i{op}" for op in COMMON_OPERATIONS))) +STATUS_OPERATIONS = frozenset((*COMMON_OPERATIONS, *tuple(f"i{op}" for op in COMMON_OPERATIONS))) +ALL_OPERATIONS = frozenset(("contains", "icontains", *STATUS_OPERATIONS)) type FilterSingleValue = str | int | InteractiveObject type FilterValue = FilterSingleValue | Iterable[FilterSingleValue] @@ -158,4 +159,4 @@ def _prepare_query_param_value(self: Self, value: SimplifiedValue) -> str: FilterByName = FilterBy("name", ALL_OPERATIONS, str) FilterByDisplayName = FilterBy("display_name", ALL_OPERATIONS, str) -FilterByStatus = FilterBy("status", COMMON_OPERATIONS, str) +FilterByStatus = FilterBy("status", STATUS_OPERATIONS, str) diff --git a/adcm_aio_client/core/host_groups/_common.py b/adcm_aio_client/core/host_groups/_common.py index 185c7261..7cf8eee7 100644 --- a/adcm_aio_client/core/host_groups/_common.py +++ b/adcm_aio_client/core/host_groups/_common.py @@ -10,7 +10,6 @@ PaginatedChildAccessor, filters_to_inline, ) -from adcm_aio_client.core.objects._base import InteractiveChildObject from adcm_aio_client.core.types import Endpoint, HostID, QueryParameters, Requester, RequesterResponse from adcm_aio_client.core.utils import safe_gather @@ -99,7 +98,7 @@ class HostGroupNode[ ](PaginatedChildAccessor[Parent, Child]): async def create( # TODO: can create HG with subset of `hosts` if adding some of them leads to an error self: Self, name: str, description: str = "", hosts: list["Host"] | None = None - ) -> InteractiveChildObject: + ) -> Child: response = await self._requester.post(*self._path, data={"name": name, "description": description}) host_group = self.class_type(parent=self._parent, data=response.as_dict()) diff --git a/adcm_aio_client/core/host_groups/action_group.py b/adcm_aio_client/core/host_groups/action_group.py index 7ee4d90b..d6e6f17e 100644 --- a/adcm_aio_client/core/host_groups/action_group.py +++ b/adcm_aio_client/core/host_groups/action_group.py @@ -1,18 +1,17 @@ from functools import cached_property from typing import TYPE_CHECKING, Self, Union -from adcm_aio_client.core.actions import ActionsAccessor from adcm_aio_client.core.filters import FilterByName, Filtering from adcm_aio_client.core.host_groups._common import HostGroupNode, HostsInHostGroupNode from adcm_aio_client.core.objects._base import InteractiveChildObject -from adcm_aio_client.core.objects._common import Deletable +from adcm_aio_client.core.objects._common import Deletable, WithActions from adcm_aio_client.core.types import AwareOfOwnPath, WithProtectedRequester if TYPE_CHECKING: from adcm_aio_client.core.objects.cm import Cluster, Component, Service -class ActionHostGroup(InteractiveChildObject, Deletable): +class ActionHostGroup(InteractiveChildObject, WithActions, Deletable): PATH_PREFIX = "action-host-groups" @property @@ -27,10 +26,6 @@ def description(self: Self) -> str: def hosts(self: Self) -> "HostsInActionHostGroupNode": return HostsInActionHostGroupNode(path=(*self.get_own_path(), "hosts"), requester=self._requester) - @cached_property - def actions(self: Self) -> ActionsAccessor: - return ActionsAccessor(parent=self, path=(*self.get_own_path(), "actions"), requester=self._requester) - class ActionHostGroupNode(HostGroupNode[Union["Cluster", "Service", "Component"], ActionHostGroup]): class_type = ActionHostGroup diff --git a/adcm_aio_client/core/objects/_base.py b/adcm_aio_client/core/objects/_base.py index 3d3a1566..86df2725 100644 --- a/adcm_aio_client/core/objects/_base.py +++ b/adcm_aio_client/core/objects/_base.py @@ -81,8 +81,6 @@ def _repr(self: Self) -> str: class RootInteractiveObject(InteractiveObject): - PATH_PREFIX: str - def get_own_path(self: Self) -> Endpoint: # change here return self._build_own_path(self.id) @@ -106,6 +104,12 @@ def __init__(self: Self, parent: Parent, data: dict[str, Any]) -> None: def get_own_path(self: Self) -> Endpoint: return *self._parent.get_own_path(), self.PATH_PREFIX, self.id + @classmethod + async def with_id(cls: type[Self], parent: Parent, object_id: int) -> Self: + object_path = (*parent.get_own_path(), cls.PATH_PREFIX, str(object_id)) + response = await parent.requester.get(*object_path) + return cls(parent=parent, data=response.as_dict()) + class MaintenanceMode: def __init__( diff --git a/adcm_aio_client/core/objects/_common.py b/adcm_aio_client/core/objects/_common.py index e51d7b4a..0db16076 100644 --- a/adcm_aio_client/core/objects/_common.py +++ b/adcm_aio_client/core/objects/_common.py @@ -24,10 +24,11 @@ async def get_status(self: Self) -> str: class WithActions(WithProtectedRequester, AwareOfOwnPath): @cached_property def actions(self: Self) -> ActionsAccessor: - return ActionsAccessor(parent=self, path=(*self.get_own_path(), "actions"), requester=self._requester) + # `WithActions` can actually be InteractiveObject, but it isn't required + # based on usages, so for now it's just ignore + return ActionsAccessor(parent=self, path=(*self.get_own_path(), "actions"), requester=self._requester) # type: ignore[reportArgumentType] -# todo whole section lacking implementation (and maybe code move is required) class WithConfig(ConfigOwner): @cached_property async def config(self: Self) -> ObjectConfig: diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index 6c1d6b60..94273cb9 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -2,13 +2,13 @@ from datetime import datetime, timedelta from functools import cached_property from pathlib import Path -from typing import Any, Awaitable, Callable, Iterable, Literal, Self +from typing import Any, AsyncGenerator, Awaitable, Callable, Iterable, Literal, Self import asyncio from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 from adcm_aio_client.core.actions._objects import Action -from adcm_aio_client.core.errors import NotFoundError +from adcm_aio_client.core.errors import InvalidFilterError, NotFoundError from adcm_aio_client.core.filters import ( ALL_OPERATIONS, COMMON_OPERATIONS, @@ -18,8 +18,10 @@ FilterByName, FilterByStatus, Filtering, + FilterValue, ) from adcm_aio_client.core.host_groups import WithActionHostGroups, WithConfigHostGroups +from adcm_aio_client.core.host_groups.action_group import ActionHostGroup from adcm_aio_client.core.mapping import ClusterMapping from adcm_aio_client.core.objects._accessors import ( PaginatedAccessor, @@ -421,7 +423,7 @@ async def hostprovider(self: Self) -> HostProvider: class HostsAccessor(PaginatedAccessor[Host]): class_type = Host - filtering = Filtering(FilterByName, FilterByStatus) + filtering = Filtering(FilterByName, FilterByStatus, FilterBy("hostprovider", COMMON_OPERATIONS, HostProvider)) class HostsNode(HostsAccessor): @@ -471,11 +473,11 @@ async def _get_hosts( return tuple(hosts) -def default_exit_condition(job: "Job") -> bool: - return job.get_status() in DEFAULT_JOB_TERMINAL_STATUSES +async def default_exit_condition(job: "Job") -> bool: + return await job.get_status() in DEFAULT_JOB_TERMINAL_STATUSES -class Job[Object: "InteractiveObject"](WithStatus, WithActions, RootInteractiveObject): +class Job(WithStatus, RootInteractiveObject): PATH_PREFIX = "tasks" @property @@ -483,46 +485,155 @@ def name(self: Self) -> str: return str(self._data["name"]) @property - def start_time(self: Self) -> datetime: - return self._data["startTime"] + def display_name(self: Self) -> str: + return str(self._data["displayName"]) - @property - def finish_time(self: Self) -> datetime: - return self._data["endTime"] + @cached_property + def start_time(self: Self) -> datetime | None: + time = self._data["startTime"] + if time is None: + return time - @property - def object(self: Self) -> Object: - obj_data = self._data["objects"][0] - obj_type = obj_data["type"] + return datetime.fromisoformat(time) + + @cached_property + def finish_time(self: Self) -> datetime | None: + time = self._data["endTime"] + if time is None: + return time - obj_dict = { - "host": Host, - "component": Component, - "provider": HostProvider, - "cluster": Cluster, - "service": Service, - "adcm": ADCM, - } + return datetime.fromisoformat(time) - return self._construct(what=obj_dict[obj_type], from_data=obj_data) + @async_cached_property + async def object(self: Self) -> InteractiveObject: + objects_raw = self._parse_objects() + return await self._retrieve_target(objects_raw) - @property - def action(self: Self) -> Action: - return self._construct(what=Action, from_data=self._data["action"]) + @async_cached_property + async def action(self: Self) -> Action: + target = await self.object + return Action(parent=target, data=self._data["action"]) async def wait( self: Self, timeout: int | None = None, poll_interval: int = 10, - exit_condition: Callable[[Self], bool] = default_exit_condition, + exit_condition: Callable[[Self], Awaitable[bool]] = default_exit_condition, ) -> Self: timeout_condition = datetime.max if timeout is None else (datetime.now() + timedelta(seconds=timeout)) # noqa: DTZ005 + while datetime.now() < timeout_condition: # noqa: DTZ005 - if exit_condition(self): + if await exit_condition(self): return self + await asyncio.sleep(poll_interval) - raise TimeoutError + message = "Failed to meet exit condition for job" + if timeout: + message = f"{message} in {timeout} seconds with {poll_interval} second interval" + + raise TimeoutError(message) async def terminate(self: Self) -> None: await self._requester.post(*self.get_own_path(), "terminate", data={}) + + def _parse_objects(self: Self) -> dict[str, int]: + return {entry["type"]: entry["id"] for entry in self._data["objects"]} + + async def _retrieve_target(self: Self, objects: dict[str, int]) -> InteractiveObject: + match objects: + case {"action_host_group": id_}: + objects.pop("action_host_group") + owner = await self._retrieve_target(objects) + return await ActionHostGroup.with_id(parent=owner, object_id=id_) + + case {"host": id_}: + return await Host.with_id(requester=self._requester, object_id=id_) + + case {"component": id_}: + objects.pop("component") + + owner = await self._retrieve_target(objects) + if not isinstance(owner, Service): + message = f"Incorrect owner for component detected from job data: {owner}" + raise TypeError(message) + + return await Component.with_id(parent=owner, object_id=id_) + + case {"service": id_}: + objects.pop("service") + + owner = await self._retrieve_target(objects) + if not isinstance(owner, Cluster): + message = f"Incorrect owner for service detected from job data: {owner}" + raise TypeError(message) + + return await Service.with_id(parent=owner, object_id=id_) + + case {"cluster": id_}: + return await Cluster.with_id(requester=self._requester, object_id=id_) + + case {"provider": id_}: + return await HostProvider.with_id(requester=self._requester, object_id=id_) + case _: + message = f"Failed to detect Job's owner based on {objects}" + raise RuntimeError(message) + + +class JobsNode(PaginatedAccessor[Job]): + class_type = Job + filtering = Filtering( + FilterByName, + FilterByDisplayName, + FilterByStatus, + FilterBy("action", COMMON_OPERATIONS, Action), + # technical filters, don't use them directly + FilterBy("target_id", ("eq",), int), + FilterBy("target_type", ("eq",), str), + ) + + # override accessor methods to allow passing object + + async def get(self: Self, *, object: InteractiveObject | None = None, **filters: FilterValue) -> Job: # noqa: A002 + object_filter = self._prepare_filter_by_object(object) + all_filters = filters | object_filter + return await super().get(**all_filters) + + async def get_or_none(self: Self, *, object: InteractiveObject | None = None, **filters: FilterValue) -> Job | None: # noqa: A002 + object_filter = self._prepare_filter_by_object(object) + all_filters = filters | object_filter + return await super().get_or_none(**all_filters) + + async def filter(self: Self, *, object: InteractiveObject | None = None, **filters: FilterValue) -> list[Job]: # noqa: A002 + object_filter = self._prepare_filter_by_object(object) + all_filters = filters | object_filter + return await super().filter(**all_filters) + + async def iter( + self: Self, + *, + object: InteractiveObject | None = None, # noqa: A002 + **filters: FilterValue, + ) -> AsyncGenerator[Job, None]: + object_filter = self._prepare_filter_by_object(object) + all_filters = filters | object_filter + async for entry in super().iter(**all_filters): + yield entry + + def _prepare_filter_by_object(self: Self, object_: InteractiveObject | None) -> dict: + if object_ is None: + return {} + + object_id = object_.id + + if isinstance(object_, (Cluster, Service, Component, Host)): + object_type = object_.__class__.__name__.lower() + elif isinstance(object_, HostProvider): + object_type = "provider" + elif isinstance(object_, ActionHostGroup): + object_type = "action_host_group" + else: + message = f"Failed to build filter: {object_.__class__.__name__} " "can't be an owner of Job" + raise InvalidFilterError(message) + + return {"target_id__eq": object_id, "target_type__eq": object_type} diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 91e82c60..f02127cb 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -110,10 +110,13 @@ def retry_request(request_func: RequestFunc) -> RequestFunc: @wraps(request_func) async def wrapper(self: "DefaultRequester", *args: Params.args, **kwargs: Params.kwargs) -> HTTPXRequesterResponse: retries = self._retries + last_error = None + for attempt in range(retries.attempts): try: response = await request_func(self, *args, **kwargs) - except (UnauthorizedError, httpx.NetworkError, httpx.TransportError): + except (UnauthorizedError, httpx.NetworkError, httpx.TransportError) as e: + last_error = e if attempt >= retries.attempts - 1: continue @@ -125,7 +128,11 @@ async def wrapper(self: "DefaultRequester", *args: Params.args, **kwargs: Params break else: message = f"Request failed in {retries.interval} attempts" - raise RetryRequestError(message) + if last_error is None: + raise RetryRequestError(message) + + message = f"{message}. Last error: {last_error}" + raise RetryRequestError(message) from last_error return response diff --git a/poetry.lock b/poetry.lock index 73fffa47..82be4859 100644 --- a/poetry.lock +++ b/poetry.lock @@ -385,6 +385,20 @@ pytest = ">=8.2,<9" docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-timeout" +version = "2.3.1" +description = "pytest plugin to abort hanging tests" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-timeout-2.3.1.tar.gz", hash = "sha256:12397729125c6ecbdaca01035b9e5239d4db97352320af155b3f5de1ba5165d9"}, + {file = "pytest_timeout-2.3.1-py3-none-any.whl", hash = "sha256:68188cb703edfc6a18fad98dc25a3c61e9f24d644b0b70f33af545219fc7813e"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + [[package]] name = "pywin32" version = "308" @@ -701,4 +715,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.12" -content-hash = "87a74f8686fa2e421979ffe2ffd12e46a54176e4852f184fd5782f252d117c1b" +content-hash = "f7bae9e0c1c116fe81eca25b419ce706a5b74821d1910f35632270f51a75727e" diff --git a/pyproject.toml b/pyproject.toml index 8492d355..294c2ee0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ pytest = "^8.3.3" pytest-asyncio = "^0.24.0" testcontainers = "^4.8.2" pyyaml = "^6.0.2" +pytest-timeout = "^2.3.1" [build-system] requires = ["poetry-core"] @@ -34,6 +35,7 @@ build-backend = "poetry.core.masonry.api" [tool.pytest.ini_options] asyncio_default_fixture_loop_scope = "function" +timeout = 300 [tool.ruff] line-length = 120 diff --git a/tests/integration/bundles/complex_cluster/actions.yaml b/tests/integration/bundles/complex_cluster/actions.yaml new file mode 100644 index 00000000..95d47d24 --- /dev/null +++ b/tests/integration/bundles/complex_cluster/actions.yaml @@ -0,0 +1,16 @@ +- name: letsgo + hosts: localhost + connection: local + gather_facts: no + + tasks: + - name: Success + debug: + msg: "successful step" + tags: [ok] + + - name: Fail + fail: + msg: "failed step" + tags: [fail] + diff --git a/tests/integration/bundles/complex_cluster/config.yaml b/tests/integration/bundles/complex_cluster/config.yaml index 8873389b..041ee101 100644 --- a/tests/integration/bundles/complex_cluster/config.yaml +++ b/tests/integration/bundles/complex_cluster/config.yaml @@ -2,6 +2,41 @@ name: Some Cluster version: 1 + actions: &actions + success: &job + display_name: I will survive + type: job + script_type: ansible + script: ./actions.yaml + allow_to_terminate: true + allow_for_action_host_group: true + params: + ansible_tags: ok + masking: {} + + fail: + <<: *job + display_name: no Way + params: + ansible_tags: fail + + success_task: + display_name: Lots Of me + type: task + masking: {} + allow_to_terminate: true + allow_for_action_host_group: true + scripts: + - &success_job + name: first + script_type: ansible + script: ./actions.yaml + params: + ansible_tags: ok + - <<: *success_job + name: second + display_name: AnothEr + config: - name: string_field type: string @@ -32,6 +67,18 @@ components: *example_c +- type: service + name: with_actions + version: 2.3 + actions: *actions + + components: + c1: + display_name: Awesome + actions: *actions + c2: + actions: *actions + - type: service name: complex_config version: 0.3 diff --git a/tests/integration/bundles/simple_hostprovider/actions.yaml b/tests/integration/bundles/simple_hostprovider/actions.yaml new file mode 100644 index 00000000..95d47d24 --- /dev/null +++ b/tests/integration/bundles/simple_hostprovider/actions.yaml @@ -0,0 +1,16 @@ +- name: letsgo + hosts: localhost + connection: local + gather_facts: no + + tasks: + - name: Success + debug: + msg: "successful step" + tags: [ok] + + - name: Fail + fail: + msg: "failed step" + tags: [fail] + diff --git a/tests/integration/bundles/simple_hostprovider/config.yaml b/tests/integration/bundles/simple_hostprovider/config.yaml index 56628762..12e0f7f8 100644 --- a/tests/integration/bundles/simple_hostprovider/config.yaml +++ b/tests/integration/bundles/simple_hostprovider/config.yaml @@ -2,6 +2,19 @@ name: simple_provider version: 4 + actions: &actions + success: &job + display_name: I will survive + type: job + script_type: ansible + script: ./actions.yaml + params: + ansible_tags: ok + masking: {} + + - type: host name: simple_host version: 2 + + actions: *actions diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 767fdc8d..3aa53fc3 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -11,7 +11,9 @@ from adcm_aio_client._session import ADCMSession from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.objects.cm import Bundle from adcm_aio_client.core.types import Credentials +from tests.integration.bundle import pack_bundle from tests.integration.setup_environment import ( DB_USER, ADCMContainer, @@ -49,13 +51,27 @@ def adcm(network: Network, postgres: ADCMPostgresContainer) -> Generator[ADCMCon @pytest_asyncio.fixture(scope="function") -async def adcm_client(adcm: ADCMContainer) -> AsyncGenerator[ADCMClient, None]: +async def adcm_client(request: pytest.FixtureRequest, adcm: ADCMContainer) -> AsyncGenerator[ADCMClient, None]: credentials = Credentials(username="admin", password="admin") # noqa: S106 url = adcm.url - async with ADCMSession(url=url, credentials=credentials, timeout=10, retry_interval=1, retry_attempts=1) as client: + extra_kwargs = getattr(request, "param", {}) + kwargs: dict = {"timeout": 10, "retry_interval": 1, "retry_attempts": 1} | extra_kwargs + async with ADCMSession(url=url, credentials=credentials, **kwargs) as client: yield client +@pytest_asyncio.fixture() +async def complex_cluster_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "complex_cluster", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + +@pytest_asyncio.fixture() +async def simple_hostprovider_bundle(adcm_client: ADCMClient, tmp_path: Path) -> Bundle: + bundle_path = pack_bundle(from_dir=BUNDLES / "simple_hostprovider", to=tmp_path) + return await adcm_client.bundles.create(source=bundle_path, accept_license=True) + + @pytest_asyncio.fixture() async def httpx_client(adcm: ADCMContainer) -> AsyncGenerator[AsyncClient, None]: client = AsyncClient(base_url=urljoin(adcm.url, "api/v2/")) diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py new file mode 100644 index 00000000..4d1f0916 --- /dev/null +++ b/tests/integration/test_jobs.py @@ -0,0 +1,221 @@ +from datetime import datetime +from itertools import chain +from operator import attrgetter +import asyncio + +import pytest +import pytest_asyncio + +from adcm_aio_client.core.client import ADCMClient +from adcm_aio_client.core.filters import Filter, FilterValue +from adcm_aio_client.core.host_groups.action_group import ActionHostGroup +from adcm_aio_client.core.objects._common import WithActions +from adcm_aio_client.core.objects.cm import Bundle, Cluster, Component, Job +from adcm_aio_client.core.types import WithID + +pytestmark = [pytest.mark.asyncio] + + +async def is_running(job: Job) -> bool: + return await job.get_status() == "running" + + +async def run_non_blocking(target: WithActions, **filters: FilterValue) -> Job: + action = await target.actions.get(**filters) + action.blocking = False + return await action.run() + + +async def check_job_object(job: Job, object_: WithID) -> None: + expected_type = object_.__class__ + expected_id = object_.id + + actual_object = await job.object + + assert isinstance(actual_object, expected_type) + assert actual_object.id == expected_id + + +@pytest_asyncio.fixture() +async def prepare_environment( + adcm_client: ADCMClient, + complex_cluster_bundle: Bundle, + simple_hostprovider_bundle: Bundle, +) -> None: + cluster_bundle = complex_cluster_bundle + hostprovider_bundle = simple_hostprovider_bundle + + clusters: list[Cluster] = await asyncio.gather( + *(adcm_client.clusters.create(cluster_bundle, f"wow-{i}") for i in range(5)) + ) + hostproviders = await asyncio.gather( + *(adcm_client.hostproviders.create(hostprovider_bundle, f"yay-{i}") for i in range(5)) + ) + await asyncio.gather( + *(adcm_client.hosts.create(hp, f"host-{hp.name}-{i}") for i in range(5) for hp in hostproviders) + ) + hosts = await adcm_client.hosts.all() + + services = tuple( + chain.from_iterable( + await asyncio.gather( + *(cluster.services.add(Filter(attr="name", op="eq", value="with_actions")) for cluster in clusters) + ) + ) + ) + components = tuple(chain.from_iterable(await asyncio.gather(*(service.components.all() for service in services)))) + + host_groups = await asyncio.gather( + *( + object_.action_host_groups.create(name=f"ahg for {object_.__class__.__name__}") + for object_ in chain(clusters, services, components) + ) + ) + + for object_ in chain(clusters, services, components, hosts, hostproviders): + await run_non_blocking(object_, name__eq="success") + + for group in host_groups: + await run_non_blocking(group, name__in=["fail"]) + + +@pytest.mark.usefixtures("prepare_environment") +@pytest.mark.parametrize("adcm_client", [{"timeout": 60}], ids=["t60"], indirect=True) +async def test_jobs_api(adcm_client: ADCMClient) -> None: + await _test_basic_api(adcm_client) + await _test_job_object(adcm_client) + await _test_collection_fitlering(adcm_client) + + +async def _test_basic_api(adcm_client: ADCMClient) -> None: + cluster = await adcm_client.clusters.get(name__eq="wow-4") + service = await cluster.services.get(name__contains="action") + component = await service.components.get(display_name__icontains="wESo") + + action = await component.actions.get(display_name__ieq="Lots of me") + job = await action.run() + # depending on retrieval time it's "one of" + assert await job.get_status() in ("created", "running") + assert job.start_time is None + assert job.finish_time is None + assert (await job.action).id == action.id + + await job.wait(exit_condition=is_running, timeout=30, poll_interval=1) + assert job.start_time is None + await job.refresh() + assert isinstance(job.start_time, datetime) + assert job.finish_time is None + + target = await job.object + assert isinstance(target, Component) + assert target.id == component.id + assert target.service.id == component.service.id + + await job.wait(timeout=30, poll_interval=3) + + assert await job.get_status() == "success" + assert job.finish_time is None + await job.refresh() + assert isinstance(job.finish_time, datetime) + + +async def _test_job_object(adcm_client: ADCMClient) -> None: + cluster, *_ = await adcm_client.clusters.list(query={"limit": 1, "offset": 4}) + service = await cluster.services.get() + component = await service.components.get(name__eq="c2") + hostprovider, *_ = await adcm_client.hostproviders.list(query={"limit": 1, "offset": 2}) + host, *_ = await adcm_client.hosts.list(query={"limit": 1, "offset": 4}) + + host_group_1 = await service.action_host_groups.get() + host_group_2 = await component.action_host_groups.get() + + all_targets = (cluster, service, component, hostprovider, host, host_group_1, host_group_2) + + for target in all_targets: + jobs = await adcm_client.jobs.filter(object=target) + assert len(jobs) == 1, f"Amount of jobs is incorrect for {target}: {len(jobs)}. Expected 1" + job = jobs[0] + await check_job_object(job=job, object_=target) # type: ignore + + +async def _test_collection_fitlering(adcm_client: ADCMClient) -> None: + failed_jobs = 20 + services_amount = 5 + + for job in await adcm_client.jobs.all(): + await job.wait(timeout=60) + + jobs = await adcm_client.jobs.list() + assert len(jobs) == 50 + + jobs = await adcm_client.jobs.all() + total_jobs = len(jobs) + assert total_jobs > 50 + + cases = ( + # status + ("status__eq", "failed", failed_jobs), + ("status__ieq", "faiLed", failed_jobs), + ("status__ne", "success", failed_jobs), + ("status__ine", "succEss", failed_jobs), + ("status__in", ("failed", "success"), total_jobs), + ("status__iin", ("faIled", "sUcceSs"), total_jobs), + ("status__exclude", ("failed", "success"), 0), + ("status__iexclude", ("succesS",), failed_jobs), + # name + ("name__eq", "fail", failed_jobs), + ("name__ieq", "FaIl", failed_jobs), + ("name__ne", "fail", total_jobs - failed_jobs), + ("name__ine", "FaIl", total_jobs - failed_jobs), + ("name__in", ("success", "success_task"), total_jobs - failed_jobs), + ("name__iin", ("sUccEss", "success_Task"), total_jobs - failed_jobs), + ("name__exclude", ("success",), failed_jobs + 1), + ("name__iexclude", ("success",), failed_jobs + 1), + ("name__contains", "il", failed_jobs), + ("name__icontains", "I", failed_jobs), + # display_name + ("display_name__eq", "no Way", failed_jobs), + ("display_name__ieq", "No way", failed_jobs), + ("display_name__ne", "no Way", total_jobs - failed_jobs), + ("display_name__ine", "No way", total_jobs - failed_jobs), + ("display_name__in", ("I will survive", "Lots Of me"), total_jobs - failed_jobs), + ("display_name__iin", ("i will survive", "lots of me"), total_jobs - failed_jobs), + ("display_name__exclude", ("I will survive",), failed_jobs + 1), + ("display_name__iexclude", ("i will survive",), failed_jobs + 1), + ("display_name__contains", "W", failed_jobs), + ("display_name__icontains", "W", total_jobs - 1), + ) + + for inline_filter, value, expected_amount in cases: + filter_ = {inline_filter: value} + result = await adcm_client.jobs.filter(**filter_) # type: ignore + actual_amount = len(result) + assert ( + actual_amount == expected_amount + ), f"Incorrect amount for {filter_=}\nExpected: {expected_amount}\nActual: {actual_amount}" + unique_entries = set(map(attrgetter("id"), result)) + assert len(unique_entries) == expected_amount + + cluster = await adcm_client.clusters.get(name__eq="wow-4") + service = await cluster.services.get() + service_ahg = await service.action_host_groups.get() + + fail_action = await service.actions.get(name__eq="fail") + success_action = await service.actions.get(name__eq="success") + + jobs = [job async for job in adcm_client.jobs.iter(action__eq=fail_action)] + assert len(jobs) == 5 + objects = [] + for job in jobs: + objects.append(await job.object) + assert all(isinstance(o, ActionHostGroup) for o in objects) + assert any(o.id == service_ahg.id for o in objects) + + job = await adcm_client.jobs.get_or_none(action__in=(fail_action, success_action), status__eq="notexist") + assert job is None + + jobs = await adcm_client.jobs.filter(action__ne=success_action) + assert len(jobs) == total_jobs - services_amount + + jobs = await adcm_client.jobs.filter(action__exclude=(success_action,)) + assert len(jobs) == total_jobs - services_amount From 688e6d62b2ac77b4c0459102421d3341b449aef4 Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Tue, 24 Dec 2024 09:52:28 +0300 Subject: [PATCH 45/46] ADCM-6240: rework errors --- adcm_aio_client/core/actions/_objects.py | 18 +++++- adcm_aio_client/core/config/_objects.py | 13 ++++- adcm_aio_client/core/errors.py | 51 ++++++++++++---- adcm_aio_client/core/host_groups/_common.py | 26 ++++++--- adcm_aio_client/core/mapping/_objects.py | 20 ++++++- adcm_aio_client/core/objects/cm.py | 65 ++++++++++++++++----- adcm_aio_client/core/requesters.py | 16 ++--- tests/integration/setup_environment.py | 2 +- tests/unit/test_requesters.py | 4 +- 9 files changed, 164 insertions(+), 51 deletions(-) diff --git a/adcm_aio_client/core/actions/_objects.py b/adcm_aio_client/core/actions/_objects.py index bf0c11f5..2506a2a2 100644 --- a/adcm_aio_client/core/actions/_objects.py +++ b/adcm_aio_client/core/actions/_objects.py @@ -5,7 +5,13 @@ from asyncstdlib import cached_property as async_cached_property -from adcm_aio_client.core.errors import HostNotInClusterError, NoMappingRulesForActionError +from adcm_aio_client.core.errors import ( + ConflictError, + HostNotInClusterError, + NoMappingRulesForActionError, + ObjectBlockedError, +) +from adcm_aio_client.core.filters import FilterByName, Filtering from adcm_aio_client.core.mapping import ActionMapping from adcm_aio_client.core.objects._accessors import NonPaginatedChildAccessor from adcm_aio_client.core.objects._base import InteractiveChildObject, InteractiveObject @@ -30,7 +36,14 @@ def display_name(self: Self) -> str: return self._data["displayName"] async def run(self: Self) -> dict: # TODO: implement Task, return Task - return (await self._requester.post(*self.get_own_path(), "run", data={"isVerbose": self._verbose})).as_dict() + try: + return ( + await self._requester.post(*self.get_own_path(), "run", data={"isVerbose": self._verbose}) + ).as_dict() + except ConflictError as e: + if "has issue" in str(e): + raise ObjectBlockedError(*e.args) from None + raise @async_cached_property async def _mapping_rule(self: Self) -> list[dict] | None: @@ -64,6 +77,7 @@ async def _rich_data(self: Self) -> dict: class ActionsAccessor(NonPaginatedChildAccessor): class_type = Action + filtering = Filtering(FilterByName) class Upgrade(Action): diff --git a/adcm_aio_client/core/config/_objects.py b/adcm_aio_client/core/config/_objects.py index c36773a6..f337eb7f 100644 --- a/adcm_aio_client/core/config/_objects.py +++ b/adcm_aio_client/core/config/_objects.py @@ -15,7 +15,13 @@ LevelNames, LocalConfigs, ) -from adcm_aio_client.core.errors import ConfigComparisonError, ConfigNoParameterError, RequesterError +from adcm_aio_client.core.errors import ( + BadRequestError, + ConfigComparisonError, + ConfigNoParameterError, + InvalidConfigError, + RequesterError, +) from adcm_aio_client.core.types import AwareOfOwnPath, WithRequesterProperty @@ -308,11 +314,12 @@ async def save(self: Self, description: str = "") -> Self: try: response = await self._parent.requester.post(*self._parent.get_own_path(), "configs", data=payload) - except RequesterError: + except RequesterError as e: # config isn't saved, no data update is in play, # returning "pre-saved" parsed values self._parse_json_fields_inplace_safe(config_to_save) - + if isinstance(e, BadRequestError): + raise InvalidConfigError(*e.args) from None raise else: new_config = ConfigData.from_v2_response(data_in_v2_format=response.as_dict()) diff --git a/adcm_aio_client/core/errors.py b/adcm_aio_client/core/errors.py index f9fd4aa8..cf7816ea 100644 --- a/adcm_aio_client/core/errors.py +++ b/adcm_aio_client/core/errors.py @@ -15,6 +15,10 @@ class ADCMClientError(Exception): pass +class WaitTimeoutError(ADCMClientError): + pass + + # Session @@ -25,10 +29,6 @@ class ClientInitError(ADCMClientError): # Version -class VersionRetrievalError(ADCMClientError): - pass - - class NotSupportedVersionError(ADCMClientError): pass @@ -44,7 +44,7 @@ class NoCredentialsError(RequesterError): pass -class WrongCredentialsError(RequesterError): +class AuthenticationError(RequesterError): pass @@ -64,31 +64,31 @@ class ResponseDataConversionError(RequesterError): pass -class ResponseError(RequesterError): +class UnknownError(RequesterError): pass -class BadRequestError(ResponseError): +class BadRequestError(UnknownError): pass -class UnauthorizedError(ResponseError): +class UnauthorizedError(UnknownError): pass -class ForbiddenError(ResponseError): +class PermissionDeniedError(UnknownError): pass -class NotFoundError(ResponseError): +class NotFoundError(UnknownError): pass -class ConflictError(ResponseError): +class ConflictError(UnknownError): pass -class ServerError(ResponseError): +class ServerError(UnknownError): pass @@ -107,6 +107,10 @@ class ObjectDoesNotExistError(AccessorError): pass +class ObjectAlreadyExistsError(AccessorError): # TODO: add tests + pass + + class OperationError(AccessorError): pass @@ -139,3 +143,26 @@ class FilterError(ADCMClientError): ... class InvalidFilterError(FilterError): ... + + +# Operation-related + + +class HostConflictError(ADCMClientError): + pass + + +class ObjectBlockedError(ADCMClientError): + pass + + +class InvalidMappingError(ADCMClientError): + pass + + +class InvalidConfigError(ADCMClientError): + pass + + +class ObjectUpdateError(ADCMClientError): + pass diff --git a/adcm_aio_client/core/host_groups/_common.py b/adcm_aio_client/core/host_groups/_common.py index 185c7261..b322b5dd 100644 --- a/adcm_aio_client/core/host_groups/_common.py +++ b/adcm_aio_client/core/host_groups/_common.py @@ -1,6 +1,7 @@ from functools import partial from typing import TYPE_CHECKING, Any, Iterable, Self, Union +from adcm_aio_client.core.errors import ConflictError, HostConflictError, ObjectAlreadyExistsError from adcm_aio_client.core.filters import Filter from adcm_aio_client.core.objects._accessors import ( DefaultQueryParams as AccessorFilter, @@ -55,13 +56,17 @@ async def set(self: Self, host: Union["Host", Iterable["Host"], Filter]) -> None async def _add_hosts_to_group(self: Self, ids: Iterable[HostID]) -> None: add_by_id = partial(self._requester.post, *self._path) - add_coros = map(add_by_id, ({"hostId": id_} for id_ in ids)) - error = await safe_gather( - coros=add_coros, - msg=f"Some hosts can't be added to {self.group_type} host group", - ) - if error is not None: - raise error + try: + if error := await safe_gather( + coros=(add_by_id(data={"hostId": id_}) for id_ in ids), + msg=f"Some hosts can't be added to {self.group_type} host group", + ): + raise error + except* ConflictError as conflict_err_group: + host_conflict_msgs = {"already a member of this group", "already is a member of another group"} + if target_gr := conflict_err_group.subgroup(lambda e: any(msg in str(e) for msg in host_conflict_msgs)): + raise HostConflictError(*target_gr.exceptions[0].args) from None + raise async def _remove_hosts_from_group(self: Self, ids: Iterable[HostID]) -> None: delete_by_id = partial(self._requester.delete, *self._path) @@ -100,7 +105,12 @@ class HostGroupNode[ async def create( # TODO: can create HG with subset of `hosts` if adding some of them leads to an error self: Self, name: str, description: str = "", hosts: list["Host"] | None = None ) -> InteractiveChildObject: - response = await self._requester.post(*self._path, data={"name": name, "description": description}) + try: + response = await self._requester.post(*self._path, data={"name": name, "description": description}) + except ConflictError as e: + if "already exists" in str(e): + raise ObjectAlreadyExistsError(*e.args) from None + raise host_group = self.class_type(parent=self._parent, data=response.as_dict()) if not hosts: diff --git a/adcm_aio_client/core/mapping/_objects.py b/adcm_aio_client/core/mapping/_objects.py index a72b348a..ef110913 100644 --- a/adcm_aio_client/core/mapping/_objects.py +++ b/adcm_aio_client/core/mapping/_objects.py @@ -6,6 +6,7 @@ from typing import TYPE_CHECKING, Any, Callable, Coroutine, Iterable, Self import asyncio +from adcm_aio_client.core.errors import BadRequestError, ConflictError, InvalidMappingError from adcm_aio_client.core.filters import Filter, FilterByDisplayName, FilterByName, FilterByStatus, Filtering from adcm_aio_client.core.mapping.refresh import apply_local_changes, apply_remote_changes from adcm_aio_client.core.mapping.types import LocalMappings, MappingEntry, MappingPair, MappingRefreshStrategy @@ -149,7 +150,24 @@ async def for_cluster(cls: type[Self], owner: Cluster) -> Self: async def save(self: Self) -> Self: data = self._to_payload() - await self._requester.post(*self._cluster.get_own_path(), "mapping", data=data) + try: + await self._requester.post(*self._cluster.get_own_path(), "mapping", data=data) + except ConflictError as e: + # TODO: may be incomplete. Add tests for errors + conflict_msgs = { + "has unsatisfied constraint", + "No required service", + "hosts in maintenance mode", + "COMPONENT_CONSTRAINT_ERROR", + } + if any(msg in str(e) for msg in conflict_msgs): + raise InvalidMappingError(*e.args) from None + raise + except BadRequestError as e: + bad_request_msgs = {"Mapping entries duplicates found"} + if any((msg in str(e)) for msg in bad_request_msgs): + raise InvalidMappingError(*e.args) from None + raise self._initial = copy(self._current) diff --git a/adcm_aio_client/core/objects/cm.py b/adcm_aio_client/core/objects/cm.py index c2e6151c..d5ab015f 100644 --- a/adcm_aio_client/core/objects/cm.py +++ b/adcm_aio_client/core/objects/cm.py @@ -8,7 +8,15 @@ from asyncstdlib.functools import cached_property as async_cached_property # noqa: N813 from adcm_aio_client.core.actions._objects import Action -from adcm_aio_client.core.errors import NotFoundError +from adcm_aio_client.core.errors import ( + ConflictError, + HostConflictError, + NotFoundError, + ObjectAlreadyExistsError, + ObjectUpdateError, + UnknownError, + WaitTimeoutError, +) from adcm_aio_client.core.filters import ( ALL_OPERATIONS, COMMON_OPERATIONS, @@ -143,7 +151,12 @@ async def create(self: Self, source: Path | URLStr, *, accept_license: bool = Fa else: file = await self._bundle_retriever.download_external_bundle(source) - response = await self._requester.post_files("bundles", files={"file": file}) + try: + response = await self._requester.post_files("bundles", files={"file": file}) + except ConflictError as e: + if "Bundle already exists" in str(e): + raise ObjectAlreadyExistsError(*e.args) from None + raise bundle = Bundle(requester=self._requester, data=response.as_dict()) @@ -194,9 +207,12 @@ async def bundle(self: Self) -> Bundle: # object-specific methods async def set_ansible_forks(self: Self, value: int) -> Self: - await self._requester.post( - *self.get_own_path(), "ansible-config", data={"config": {"defaults": {"forks": value}}, "adcmMeta": {}} - ) + try: + await self._requester.post( + *self.get_own_path(), "ansible-config", data={"config": {"defaults": {"forks": value}}, "adcmMeta": {}} + ) + except UnknownError as e: + raise ObjectUpdateError(*e.args) from None return self # nodes and managers to access @@ -222,9 +238,14 @@ class ClustersNode(PaginatedAccessor[Cluster]): filtering = Filtering(FilterByName, FilterByBundle, FilterByStatus) async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> Cluster: - response = await self._requester.post( - "clusters", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} - ) + try: + response = await self._requester.post( + "clusters", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} + ) + except ConflictError as e: + if "already exists" in str(e): + raise ObjectAlreadyExistsError(*e.args) from None + raise return Cluster(requester=self._requester, data=response.as_dict()) @@ -376,9 +397,15 @@ class HostProvidersNode(PaginatedAccessor[HostProvider]): filtering = Filtering(FilterByName, FilterByBundle) async def create(self: Self, bundle: Bundle, name: str, description: str = "") -> HostProvider: - response = await self._requester.post( - "hostproviders", data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description} - ) + try: + response = await self._requester.post( + "hostproviders", + data={"prototypeId": bundle._main_prototype_id, "name": name, "description": description}, + ) + except ConflictError as e: + if "duplicate host provider" in str(e): + raise ObjectAlreadyExistsError(*e.args) from None + raise return HostProvider(requester=self._requester, data=response.as_dict()) @@ -418,7 +445,12 @@ async def create( data = {"hostproviderId": hostprovider.id, "name": name, "description": description} if cluster: data["clusterId"] = cluster.id - await self._requester.post(*self._path, data=data) + try: + await self._requester.post(*self._path, data=data) + except ConflictError as e: + if "already exists" in str(e): + raise ObjectAlreadyExistsError(*e.args) from None + raise class HostsInClusterNode(HostsAccessor): @@ -431,7 +463,12 @@ def __init__(self: Self, cluster: Cluster) -> None: async def add(self: Self, host: Host | Iterable[Host] | Filter) -> None: hosts = await self._get_hosts(host=host, filter_func=self._root_host_filter) - await self._requester.post(*self._path, data=[{"hostId": host.id} for host in hosts]) + try: + await self._requester.post(*self._path, data=[{"hostId": host.id} for host in hosts]) + except ConflictError as e: + if "already linked to another cluster" in str(e): + raise HostConflictError(*e.args) from None + raise async def remove(self: Self, host: Host | Iterable[Host] | Filter) -> None: hosts = await self._get_hosts(host=host, filter_func=self.filter) @@ -509,7 +546,7 @@ async def wait( return self await asyncio.sleep(poll_interval) - raise TimeoutError + raise WaitTimeoutError async def terminate(self: Self) -> None: await self._requester.post(*self.get_own_path(), "terminate", data={}) diff --git a/adcm_aio_client/core/requesters.py b/adcm_aio_client/core/requesters.py index 91e82c60..a1837aa7 100644 --- a/adcm_aio_client/core/requesters.py +++ b/adcm_aio_client/core/requesters.py @@ -21,20 +21,20 @@ import httpx from adcm_aio_client.core.errors import ( + AuthenticationError, BadRequestError, ConflictError, - ForbiddenError, LoginError, LogoutError, NoCredentialsError, NotFoundError, OperationError, + PermissionDeniedError, ResponseDataConversionError, - ResponseError, RetryRequestError, ServerError, UnauthorizedError, - WrongCredentialsError, + UnknownError, ) from adcm_aio_client.core.types import Credentials, PathPart, QueryParameters, Requester, RetryPolicy, URLStr @@ -81,7 +81,7 @@ def _get_json_data(self: Self) -> Json: STATUS_ERRORS_MAP = { 400: BadRequestError, 401: UnauthorizedError, - 403: ForbiddenError, + 403: PermissionDeniedError, 404: NotFoundError, 409: ConflictError, 500: ServerError, @@ -93,7 +93,7 @@ def convert_exceptions(func: DoRequestFunc) -> DoRequestFunc: async def wrapper(*arg: Params.args, **kwargs: Params.kwargs) -> httpx.Response: response = await func(*arg, **kwargs) if response.status_code >= 300: - error_cls = STATUS_ERRORS_MAP.get(response.status_code, ResponseError) + error_cls = STATUS_ERRORS_MAP.get(response.status_code, UnknownError) # not safe, because can be not json try: message = response.json() @@ -155,8 +155,8 @@ async def login(self: Self, credentials: Credentials) -> Self: f"Login to ADCM at {self.client.base_url} has failed for " f"user {credentials.username} most likely due to incorrect credentials" ) - raise WrongCredentialsError(message) from e - except ResponseError as e: + raise AuthenticationError(message) from e + except UnknownError as e: message = f"Login to ADCM at {self.client.base_url} has failed for user {credentials.username}: {e}" raise LoginError(message) from e @@ -172,7 +172,7 @@ async def logout(self: Self) -> Self: try: request_coro = self.client.post(url=logout_url, data={}) await self._do_request(request_coro) - except ResponseError as e: + except UnknownError as e: message = f"Logout from ADCM at {self.client.base_url} has failed" raise LogoutError(message) from e diff --git a/tests/integration/setup_environment.py b/tests/integration/setup_environment.py index b0a8c63b..1b7d5196 100644 --- a/tests/integration/setup_environment.py +++ b/tests/integration/setup_environment.py @@ -9,7 +9,7 @@ from testcontainers.postgres import DbContainer, PostgresContainer postgres_image_name = "postgres:latest" -adcm_image_name = "hub.adsw.io/adcm/adcm:feature_ADCM-6181" +adcm_image_name = "hub.adsw.io/adcm/adcm:develop" adcm_container_name = "test_adcm" postgres_name = "test_pg_db" diff --git a/tests/unit/test_requesters.py b/tests/unit/test_requesters.py index 0fc1d42e..b24d4749 100644 --- a/tests/unit/test_requesters.py +++ b/tests/unit/test_requesters.py @@ -7,7 +7,7 @@ import pytest import pytest_asyncio -from adcm_aio_client.core.errors import ResponseDataConversionError, ResponseError +from adcm_aio_client.core.errors import ResponseDataConversionError, UnknownError from adcm_aio_client.core.requesters import DefaultRequester, HTTPXRequesterResponse from adcm_aio_client.core.types import RetryPolicy @@ -93,7 +93,7 @@ async def test_raising_client_error_for_status( partial(requester.patch, data={}), requester.delete, ): - with pytest.raises(ResponseError): + with pytest.raises(UnknownError): await method() From 0a746fe49d78e6f3ee31a7d03f64db634a985629 Mon Sep 17 00:00:00 2001 From: Skrynnik Daniil Date: Wed, 25 Dec 2024 15:20:47 +0300 Subject: [PATCH 46/46] ADCM-6240: linters fix after merge --- adcm_aio_client/core/actions/_objects.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/adcm_aio_client/core/actions/_objects.py b/adcm_aio_client/core/actions/_objects.py index 428895da..f595a017 100644 --- a/adcm_aio_client/core/actions/_objects.py +++ b/adcm_aio_client/core/actions/_objects.py @@ -12,7 +12,7 @@ HostNotInClusterError, NoConfigInActionError, NoMappingInActionError, - ObjectBlockedError + ObjectBlockedError, ) from adcm_aio_client.core.filters import FilterByDisplayName, FilterByName, Filtering from adcm_aio_client.core.mapping import ActionMapping