diff --git a/.env.docker b/.env.docker index 68abe292..1a94d715 100644 --- a/.env.docker +++ b/.env.docker @@ -4,17 +4,34 @@ ENV=LOCAL # Debug SYNCMASTER__SERVER__DEBUG=true -# Logging Backend -SYNCMASTER__SERVER__LOGGING__SETUP=True -SYNCMASTER__SERVER__LOGGING__PRESET=colored +# Logging +SYNCMASTER__LOGGING__SETUP=True +SYNCMASTER__LOGGING__PRESET=colored +SYNCMASTER__LOG_URL_TEMPLATE=https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }} -# Logging Worker -SYNCMASTER__WORKER__LOGGING__SETUP=True -SYNCMASTER__WORKER__LOGGING__PRESET=json +# Session +SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key + +# Encrypt / Decrypt credentials data +SYNCMASTER__CRYPTO_KEY=UBgPTioFrtH2unlC4XFDiGf5sYfzbdSf_VgiUSaQc94= # Postgres SYNCMASTER__DATABASE__URL=postgresql+asyncpg://syncmaster:changeme@db:5432/syncmaster +# TODO: add to KeycloakAuthProvider documentation about creating new realms, add users, etc. +# KEYCLOAK Auth +SYNCMASTER__AUTH__SERVER_URL=http://keycloak:8080/ +SYNCMASTER__AUTH__REALM_NAME=manually_created +SYNCMASTER__AUTH__CLIENT_ID=manually_created +SYNCMASTER__AUTH__CLIENT_SECRET=generated_by_keycloak +SYNCMASTER__AUTH__REDIRECT_URI=http://localhost:8000/v1/auth/callback +SYNCMASTER__AUTH__SCOPE=email +SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.keycloak_provider.KeycloakAuthProvider + +# Dummy Auth +SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.dummy_provider.DummyAuthProvider +SYNCMASTER__AUTH__ACCESS_TOKEN__SECRET_KEY=secret + # RabbitMQ SYNCMASTER__BROKER__URL=amqp://guest:guest@rabbitmq:5672/ diff --git a/.env.local b/.env.local index 7b72047e..401a8dd4 100644 --- a/.env.local +++ b/.env.local @@ -5,16 +5,23 @@ export ENV=LOCAL export SYNCMASTER__SERVER__DEBUG=true # Logging -export SYNCMASTER__SERVER__LOGGING__SETUP=True -export SYNCMASTER__SERVER__LOGGING__PRESET=colored +export SYNCMASTER__LOGGING__SETUP=True +export SYNCMASTER__LOGGING__PRESET=colored +export SYNCMASTER__LOG_URL_TEMPLATE="https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}" -# Logging Worker -export SYNCMASTER__WORKER__LOGGING__SETUP=True -export SYNCMASTER__WORKER__LOGGING__PRESET=json +# Session +export SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key + +# Encrypt / Decrypt credentials data +export SYNCMASTER__CRYPTO_KEY=UBgPTioFrtH2unlC4XFDiGf5sYfzbdSf_VgiUSaQc94= # Postgres export SYNCMASTER__DATABASE__URL=postgresql+asyncpg://syncmaster:changeme@localhost:5432/syncmaster +# Auth +export SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.dummy_provider.DummyAuthProvider +export SYNCMASTER__AUTH__ACCESS_TOKEN__SECRET_KEY=secret + # RabbitMQ export SYNCMASTER__BROKER__URL=amqp://guest:guest@localhost:5672/ diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 6ae7bf1e..6c6d1d3a 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -17,7 +17,7 @@ build: - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH python -m poetry install --no-root --all-extras --with docs --without dev,test - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH python -m poetry show -v - python -m pip list -v - - SYNCMASTER__DATABASE__URL=postgresql+psycopg://fake:fake@127.0.0.1:5432/fake SYNCMASTER__BROKER__URL=amqp://fake:faket@fake:5672/ python -m syncmaster.backend.export_openapi_schema docs/_static/openapi.json + - SYNCMASTER__DATABASE__URL=postgresql+psycopg://fake:fake@127.0.0.1:5432/fake SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key SYNCMASTER__BROKER__URL=amqp://fake:faket@fake:5672/ SYNCMASTER__CRYPTO_KEY=crypto_key SYNCMASTER__AUTH__ACCESS_TOKEN__SECRET_KEY=fakepython python -m syncmaster.backend.export_openapi_schema docs/_static/openapi.json sphinx: configuration: docs/conf.py diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 094f3868..7bdce679 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -40,6 +40,7 @@ services: - 8000:8000 volumes: - ./syncmaster:/app/syncmaster + - ./docs/_static:/app/docs/_static - ./cached_jars:/root/.ivy2 - ./reports:/app/reports - ./tests:/app/tests @@ -156,6 +157,19 @@ services: retries: 3 profiles: [hive, hdfs, all] + keycloak: + image: quay.io/keycloak/keycloak:latest + command: start-dev + restart: unless-stopped + environment: + KEYCLOAK_ADMIN: admin + KEYCLOAK_ADMIN_PASSWORD: admin + ports: + - 8080:8080 + volumes: + - keycloak_data:/opt/keycloak/data + profiles: [keycloak, all] + test-hive: image: mtsrus/hadoop:hadoop2.7.3-hive2.3.9 restart: unless-stopped @@ -184,3 +198,4 @@ services: volumes: postgres_test_data: rabbitmq_test_data: + keycloak_data: diff --git a/docs/backend/auth/custom.rst b/docs/backend/auth/custom.rst new file mode 100644 index 00000000..03490d6b --- /dev/null +++ b/docs/backend/auth/custom.rst @@ -0,0 +1,10 @@ +.. _backend-auth-custom: + +Custom Auth provider +==================== + +You can implement custom auth provider by inheriting from class below and implementing necessary methods. + +.. autoclass:: syncmaster.backend.providers.auth.AuthProvider + :members: + :member-order: bysource diff --git a/docs/backend/auth/dummy.rst b/docs/backend/auth/dummy.rst new file mode 100644 index 00000000..a991c0ac --- /dev/null +++ b/docs/backend/auth/dummy.rst @@ -0,0 +1,82 @@ +.. _backend-auth-dummy: + +Dummy Auth provider +=================== + +Description +----------- + +This auth provider allows to sign-in with any username and password, and and then issues an access token. + +After successful auth, username is saved to backend database. It is then used for creating audit records for any object change, see ``changed_by`` field. + +Interaction schema +------------------ + +.. dropdown:: Interaction schema + + .. plantuml:: + + @startuml + title DummyAuthProvider + participant "Client" + participant "Backend" + + == POST v1/auth/token == + + activate "Client" + alt Successful case + "Client" -> "Backend" ++ : login + password + "Backend" --> "Backend" : Password is completely ignored + "Backend" --> "Backend" : Check user in internal backend database + "Backend" -> "Backend" : Create user if not exist + "Backend" -[#green]> "Client" -- : Generate and return access_token + + else User is blocked + "Client" -> "Backend" ++ : login + password + "Backend" --> "Backend" : Password is completely ignored + "Backend" --> "Backend" : Check user in internal backend database + "Backend" x-[#red]> "Client" -- : 401 Unauthorized + + else User is deleted + "Client" -> "Backend" ++ : login + password + "Backend" --> "Backend" : Password is completely ignored + "Backend" --> "Backend" : Check user in internal backend database + "Backend" x-[#red]> "Client" -- : 404 Not found + end + + == GET v1/namespaces == + + alt Successful case + "Client" -> "Backend" ++ : access_token + "Backend" --> "Backend" : Validate token + "Backend" --> "Backend" : Check user in internal backend database + "Backend" -> "Backend" : Get data + "Backend" -[#green]> "Client" -- : Return data + + else Token is expired + "Client" -> "Backend" ++ : access_token + "Backend" --> "Backend" : Validate token + "Backend" x-[#red]> "Client" -- : 401 Unauthorized + + else User is blocked + "Client" -> "Backend" ++ : access_token + "Backend" --> "Backend" : Validate token + "Backend" --> "Backend" : Check user in internal backend database + "Backend" x-[#red]> "Client" -- : 401 Unauthorized + + else User is deleted + "Client" -> "Backend" ++ : access_token + "Backend" --> "Backend" : Validate token + "Backend" --> "Backend" : Check user in internal backend database + "Backend" x-[#red]> "Client" -- : 404 Not found + end + + deactivate "Client" + @enduml + +Configuration +------------- + +.. autopydantic_model:: syncmaster.backend.settings.auth.dummy.DummyAuthProviderSettings +.. autopydantic_model:: syncmaster.backend.settings.auth.jwt.JWTSettings diff --git a/docs/backend/auth/index.rst b/docs/backend/auth/index.rst new file mode 100644 index 00000000..728b193e --- /dev/null +++ b/docs/backend/auth/index.rst @@ -0,0 +1,21 @@ +.. _backend-auth-providers: + +Auth Providers +============== + +Syncmaster supports different auth provider implementations. You can change implementation via settings: + +.. autopydantic_model:: keycloak.backend.settings.auth.AuthSettings + +.. toctree:: + :maxdepth: 2 + :caption: Auth providers + + dummy + keycloak + +.. toctree:: + :maxdepth: 2 + :caption: For developers + + custom diff --git a/docs/backend/auth/keycloak.rst b/docs/backend/auth/keycloak.rst new file mode 100644 index 00000000..6fe7381b --- /dev/null +++ b/docs/backend/auth/keycloak.rst @@ -0,0 +1,81 @@ +.. _backend-auth-ldap: + +KeyCloak Auth provider +================== + +Description +----------- + +TODO: + +Strategies +---------- + +TODO: + +Interaction schema +------------------ + +.. dropdown:: Interaction schema + + .. plantuml:: + + @startuml + title Keycloak Authorization Flow + participant "Client (User from Browser)" as Client + participant "Syncmaster" + participant "Keycloak" + + == Client Authentication at Keycloak == + Client -> Syncmaster : Request endpoint that requires authentication (/v1/users) + + Syncmaster x-[#red]> Client : Redirect to Keycloak login URL (if no access token) + + Client -> Keycloak : Callback redirect to Keycloak login page + + alt Successful login + Client --> Keycloak : Log in with login and password + else Login failed + Keycloak x-[#red]> Client -- : Display error (401 Unauthorized) + end + + Keycloak -> Client : Redirect to Syncmaster to callback endpoint with code + Client -> Syncmaster : Callback request to /v1/auth/callback with code + Syncmaster-> Keycloak : Exchange code for access token + Keycloak --> Syncmaster : Return JWT token + Syncmaster --> Client : Set JWT token in user's browser in cookies and redirect /v1/users + + Client --> Syncmaster : Redirect to /v1/users + Syncmaster -> Syncmaster : Get user info from JWT token and check user in internal backend database + Syncmaster -> Syncmaster : Create user in internal backend database if not exist + Syncmaster -[#green]> Client -- : Return requested data + + + + == GET v1/users == + alt Successful case + Client -> Syncmaster : Request data with JWT token + Syncmaster --> Syncmaster : Get user info from JWT token and check user in internal backend database + Syncmaster -> Syncmaster : Create user in internal backend database if not exist + Syncmaster -[#green]> Client -- : Return requested data + + else Access token is expired + Syncmaster -> Keycloak : Get new JWT token via refresh token + Keycloak --> Syncmaster : Return new JWT token + Syncmaster --> Syncmaster : Get user info from JWT token and check user in internal backend database + Syncmaster -> Syncmaster : Create user in internal backend database if not exist + Syncmaster -[#green]> Client -- : Return requested data and set new JWT token in user's browser in cookies + + else Refresh token is expired + Syncmaster x-[#red]> Client -- : Redirect to Keycloak login URL + end + + deactivate Client + @enduml + +Basic configuration +------------------- + +.. autopydantic_model:: syncmaster.settings.auth.keycloak.KeycloakProviderSettings +.. autopydantic_model:: syncmaster.settings.auth.jwt.JWTSettings + diff --git a/docs/backend/configuration/cors.rst b/docs/backend/configuration/cors.rst index 4768d864..6c4bd5cf 100644 --- a/docs/backend/configuration/cors.rst +++ b/docs/backend/configuration/cors.rst @@ -5,4 +5,4 @@ CORS settings These settings used to control `CORS `_ options. -.. autopydantic_model:: syncmaster.settings.server.cors.CORSSettings \ No newline at end of file +.. autopydantic_model:: syncmaster.backend.settings.server.cors.CORSSettings \ No newline at end of file diff --git a/docs/backend/configuration/index.rst b/docs/backend/configuration/index.rst index ba68d1df..90ee6507 100644 --- a/docs/backend/configuration/index.rst +++ b/docs/backend/configuration/index.rst @@ -11,6 +11,7 @@ Configuration database broker logging + session cors debug monitoring diff --git a/docs/backend/configuration/monitoring.rst b/docs/backend/configuration/monitoring.rst index f12b25d6..c2b8273e 100644 --- a/docs/backend/configuration/monitoring.rst +++ b/docs/backend/configuration/monitoring.rst @@ -13,4 +13,4 @@ REST API server provides the following endpoints with Prometheus compatible metr These endpoints are enabled and configured using settings below: -.. autopydantic_model:: syncmaster.settings.server.monitoring.MonitoringSettings +.. autopydantic_model:: syncmaster.backend.settings.server.monitoring.MonitoringSettings diff --git a/docs/backend/configuration/openapi.rst b/docs/backend/configuration/openapi.rst index b1a4603a..27620d89 100644 --- a/docs/backend/configuration/openapi.rst +++ b/docs/backend/configuration/openapi.rst @@ -5,8 +5,8 @@ OpenAPI settings These settings used to control exposing OpenAPI.json and SwaggerUI/ReDoc endpoints. -.. autopydantic_model:: syncmaster.settings.server.openapi.OpenAPISettings -.. autopydantic_model:: syncmaster.settings.server.openapi.SwaggerSettings -.. autopydantic_model:: syncmaster.settings.server.openapi.RedocSettings -.. autopydantic_model:: syncmaster.settings.server.openapi.LogoSettings -.. autopydantic_model:: syncmaster.settings.server.openapi.FaviconSettings +.. autopydantic_model:: syncmaster.backend.settings.server.openapi.OpenAPISettings +.. autopydantic_model:: syncmaster.backend.settings.server.openapi.SwaggerSettings +.. autopydantic_model:: syncmaster.backend.settings.server.openapi.RedocSettings +.. autopydantic_model:: syncmaster.backend.settings.server.openapi.LogoSettings +.. autopydantic_model:: syncmaster.backend.settings.server.openapi.FaviconSettings diff --git a/docs/backend/configuration/session.rst b/docs/backend/configuration/session.rst new file mode 100644 index 00000000..893dd76b --- /dev/null +++ b/docs/backend/configuration/session.rst @@ -0,0 +1,8 @@ +.. _backend-configuration-server-session: + +Session settings +================ + +These settings used to control `Session `_ options. + +.. autopydantic_model:: syncmaster.backend.settings.server.session.SessionSettings \ No newline at end of file diff --git a/docs/backend/configuration/static_files.rst b/docs/backend/configuration/static_files.rst index dd31ddd5..6628a962 100644 --- a/docs/backend/configuration/static_files.rst +++ b/docs/backend/configuration/static_files.rst @@ -5,4 +5,4 @@ Serving static files These settings used to control serving static files by a server. -.. autopydantic_model:: syncmaster.settings.server.static_files.StaticFilesSettings +.. autopydantic_model:: syncmaster.backend.settings.server.static_files.StaticFilesSettings diff --git a/docs/backend/install.rst b/docs/backend/install.rst index 20dea3fe..dc62e641 100644 --- a/docs/backend/install.rst +++ b/docs/backend/install.rst @@ -123,6 +123,6 @@ To start backend server you need to execute following command: .. code-block:: console - $ python -m horizon.backend --host 0.0.0.0 --port 8000 + $ python -m syncmaster.backend --host 0.0.0.0 --port 8000 After server is started and ready, open http://localhost:8000/docs. diff --git a/docs/changelog/next_release/123.feature.rst b/docs/changelog/next_release/123.feature.rst new file mode 100644 index 00000000..852a4c00 --- /dev/null +++ b/docs/changelog/next_release/123.feature.rst @@ -0,0 +1,13 @@ +- Implemented ``KeycloakAuthProvider`` for Single Sign-On (SSO) authentication. +- Implemented ``DummyAuthProvider`` for development and testing environments. +- Enabled dynamic selection of authentication provider via environment variable SYNCMASTER__AUTH__PROVIDER: + + .. code:: + + # syncmaster.backend.providers.auth.keycloak_provider.KeycloakAuthProvider for Keycloak. + SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.keycloak_provider.KeycloakAuthProvider + + # syncmaster.backend.providers.auth.dummy_provider.DummyAuthProvider for Dummy authentication. + SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.dummy_provider.DummyAuthProvider + +- Updated ``User`` model to include ``email``, ``first_name``, ``middle_name``, and ``last_name`` fields. diff --git a/docs/index.rst b/docs/index.rst index 1e6d5282..295eea91 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,6 +16,7 @@ backend/install backend/architecture + backend/auth/index backend/openapi backend/configuration/index diff --git a/poetry.lock b/poetry.lock index d7155c0b..a9203b6c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -178,6 +178,35 @@ starlette = ">=0.18" [package.extras] celery = ["celery"] +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = true +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "async-property" +version = "0.2.2" +description = "Python decorator for async properties." +optional = true +python-versions = "*" +files = [ + {file = "async_property-0.2.2-py2.py3-none-any.whl", hash = "sha256:8924d792b5843994537f8ed411165700b27b2bd966cefc4daeefc1253442a9d7"}, + {file = "async_property-0.2.2.tar.gz", hash = "sha256:17d9bd6ca67e27915a75d92549df64b5c7174e9dc806b30a3934dc4ff0506380"}, +] + [[package]] name = "asyncpg" version = "0.30.0" @@ -850,6 +879,36 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = true +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "devtools" +version = "0.12.2" +description = "Python's missing debug print command, and more." +optional = true +python-versions = ">=3.7" +files = [ + {file = "devtools-0.12.2-py3-none-any.whl", hash = "sha256:c366e3de1df4cdd635f1ad8cbcd3af01a384d7abda71900e68d43b04eb6aaca7"}, + {file = "devtools-0.12.2.tar.gz", hash = "sha256:efceab184cb35e3a11fa8e602cc4fadacaa2e859e920fc6f87bf130b69885507"}, +] + +[package.dependencies] +asttokens = ">=2.0.0,<3.0.0" +executing = ">=1.1.1" +pygments = ">=2.15.0" + [[package]] name = "distlib" version = "0.3.9" @@ -930,6 +989,20 @@ files = [ {file = "evacuator-1.0.4.tar.gz", hash = "sha256:4fac38ee4241e826fced8115ab7cdc8ca2fd18e2e200083405c4a802e836e926"}, ] +[[package]] +name = "executing" +version = "2.1.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = true +python-versions = ">=3.8" +files = [ + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + [[package]] name = "faker" version = "30.8.2" @@ -1080,49 +1153,49 @@ sphinx-basic-ng = ">=1.0.0.beta2" [[package]] name = "gevent" -version = "24.10.3" +version = "24.11.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.9" files = [ - {file = "gevent-24.10.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d7a1ad0f2da582f5bd238bca067e1c6c482c30c15a6e4d14aaa3215cbb2232f3"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4e526fdc279c655c1e809b0c34b45844182c2a6b219802da5e411bd2cf5a8ad"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57a5c4e0bdac482c5f02f240d0354e61362df73501ef6ebafce8ef635cad7527"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d67daed8383326dc8b5e58d88e148d29b6b52274a489e383530b0969ae7b9cb9"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e24ffea72e27987979c009536fd0868e52239b44afe6cf7135ce8aafd0f108e"}, - {file = "gevent-24.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c1d80090485da1ea3d99205fe97908b31188c1f4857f08b333ffaf2de2e89d18"}, - {file = "gevent-24.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0c129f81d60cda614acb4b0c5731997ca05b031fb406fcb58ad53a7ade53b13"}, - {file = "gevent-24.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:26ca7a6b42d35129617025ac801135118333cad75856ffc3217b38e707383eba"}, - {file = "gevent-24.10.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:68c3a0d8402755eba7f69022e42e8021192a721ca8341908acc222ea597029b6"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d850a453d66336272be4f1d3a8126777f3efdaea62d053b4829857f91e09755"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e58ee3723f1fbe07d66892f1caa7481c306f653a6829b6fd16cb23d618a5915"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b52382124eca13135a3abe4f65c6bd428656975980a48e51b17aeab68bdb14db"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ca2266e08f43c0e22c028801dff7d92a0b102ef20e4caeb6a46abfb95f6a328"}, - {file = "gevent-24.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d758f0d4dbf32502ec87bb9b536ca8055090a16f8305f0ada3ce6f34e70f2fd7"}, - {file = "gevent-24.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0de6eb3d55c03138fda567d9bfed28487ce5d0928c5107549767a93efdf2be26"}, - {file = "gevent-24.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:385710355eadecdb70428a5ae3e7e5a45dcf888baa1426884588be9d25ac4290"}, - {file = "gevent-24.10.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ad8fb70aa0ebc935729c9699ac31b210a49b689a7b27b7ac9f91676475f3f53"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18689f7a70d2ed0e75bad5036ec3c89690a493d4cfac8d7cdb258ac04b132bd"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f4f171d4d2018170454d84c934842e1b5f6ce7468ba298f6e7f7cff15000a3"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7021e26d70189b33c27173d4173f27bf4685d6b6f1c0ea50e5335f8491cb110c"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34aea15f9c79f27a8faeaa361bc1e72c773a9b54a1996a2ec4eefc8bcd59a824"}, - {file = "gevent-24.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8af65a4d4feaec6042c666d22c322a310fba3b47e841ad52f724b9c3ce5da48e"}, - {file = "gevent-24.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:89c4115e3f5ada55f92b61701a46043fe42f702b5af863b029e4c1a76f6cc2d4"}, - {file = "gevent-24.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:1ce6dab94c0b0d24425ba55712de2f8c9cb21267150ca63f5bb3a0e1f165da99"}, - {file = "gevent-24.10.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:f147e38423fbe96e8731f60a63475b3d2cab2f3d10578d8ee9d10c507c58a2ff"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e6984ec96fc95fd67488555c38ece3015be1f38b1bcceb27b7d6c36b343008"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:051b22e2758accfddb0457728bfc9abf8c3f2ce6bca43f1ff6e07b5ed9e49bf4"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb5edb6433764119a664bbb148d2aea9990950aa89cc3498f475c2408d523ea3"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce417bcaaab496bc9c77f75566531e9d93816262037b8b2dbb88b0fdcd66587c"}, - {file = "gevent-24.10.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:1c3a828b033fb02b7c31da4d75014a1f82e6c072fc0523456569a57f8b025861"}, - {file = "gevent-24.10.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f2ae3efbbd120cdf4a68b7abc27a37e61e6f443c5a06ec2c6ad94c37cd8471ec"}, - {file = "gevent-24.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:9e1210334a9bc9f76c3d008e0785ca62214f8a54e1325f6c2ecab3b6a572a015"}, - {file = "gevent-24.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70e9ed7ecb70e0df7dc97c3bc420de9a45a7c76bd5861c6cfec8c549700e681e"}, - {file = "gevent-24.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3ac83b74304487afa211a01909c7dd257e574db0cd429d866c298e21df7aeedf"}, - {file = "gevent-24.10.3-cp39-cp39-win32.whl", hash = "sha256:a9a89d6e396ef6f1e3968521bf56e8c4bee25b193bbf5d428b7782d582410822"}, - {file = "gevent-24.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:40ea3e40e8bb4fdb143c2a8edf2ccfdebd56016c7317c341ce8094c7bee08818"}, - {file = "gevent-24.10.3-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:e534e6a968d74463b11de6c9c67f4b4bf61775fb00f2e6e0f7fcdd412ceade18"}, - {file = "gevent-24.10.3.tar.gz", hash = "sha256:aa7ee1bd5cabb2b7ef35105f863b386c8d5e332f754b60cfc354148bd70d35d1"}, + {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7398c629d43b1b6fd785db8ebd46c0a353880a6fab03d1cf9b6788e7240ee32e"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7886b63ebfb865178ab28784accd32f287d5349b3ed71094c86e4d3ca738af5"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ca80711e6553880974898d99357fb649e062f9058418a92120ca06c18c3c59"}, + {file = "gevent-24.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e24181d172f50097ac8fc272c8c5b030149b630df02d1c639ee9f878a470ba2b"}, + {file = "gevent-24.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1d4fadc319b13ef0a3c44d2792f7918cf1bca27cacd4d41431c22e6b46668026"}, + {file = "gevent-24.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d882faa24f347f761f934786dde6c73aa6c9187ee710189f12dcc3a63ed4a50"}, + {file = "gevent-24.11.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546"}, + {file = "gevent-24.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c"}, + {file = "gevent-24.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61"}, + {file = "gevent-24.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897"}, + {file = "gevent-24.11.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671"}, + {file = "gevent-24.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f"}, + {file = "gevent-24.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a"}, + {file = "gevent-24.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae"}, + {file = "gevent-24.11.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d618e118fdb7af1d6c1a96597a5cd6ac84a9f3732b5be8515c6a66e098d498b6"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2142704c2adce9cd92f6600f371afb2860a446bfd0be5bd86cca5b3e12130766"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92e0d7759de2450a501effd99374256b26359e801b2d8bf3eedd3751973e87f5"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca845138965c8c56d1550499d6b923eb1a2331acfa9e13b817ad8305dde83d11"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:356b73d52a227d3313f8f828025b665deada57a43d02b1cf54e5d39028dbcf8d"}, + {file = "gevent-24.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:58851f23c4bdb70390f10fc020c973ffcf409eb1664086792c8b1e20f25eef43"}, + {file = "gevent-24.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1ea50009ecb7f1327347c37e9eb6561bdbc7de290769ee1404107b9a9cba7cf1"}, + {file = "gevent-24.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:ec68e270543ecd532c4c1d70fca020f90aa5486ad49c4f3b8b2e64a66f5c9274"}, + {file = "gevent-24.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9347690f4e53de2c4af74e62d6fabc940b6d4a6cad555b5a379f61e7d3f2a8e"}, + {file = "gevent-24.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8619d5c888cb7aebf9aec6703e410620ef5ad48cdc2d813dd606f8aa7ace675f"}, + {file = "gevent-24.11.1-cp39-cp39-win32.whl", hash = "sha256:c6b775381f805ff5faf250e3a07c0819529571d19bb2a9d474bee8c3f90d66af"}, + {file = "gevent-24.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c3443b0ed23dcb7c36a748d42587168672953d368f2956b17fad36d43b58836"}, + {file = "gevent-24.11.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:f43f47e702d0c8e1b8b997c00f1601486f9f976f84ab704f8f11536e3fa144c9"}, + {file = "gevent-24.11.1.tar.gz", hash = "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca"}, ] [package.dependencies] @@ -1419,6 +1492,17 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "itsdangerous" +version = "2.2.0" +description = "Safely pass data to untrusted environments and back." +optional = true +python-versions = ">=3.8" +files = [ + {file = "itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef"}, + {file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"}, +] + [[package]] name = "jinja2" version = "3.1.4" @@ -1436,6 +1520,21 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jwcrypto" +version = "1.5.6" +description = "Implementation of JOSE Web standards" +optional = true +python-versions = ">= 3.8" +files = [ + {file = "jwcrypto-1.5.6-py3-none-any.whl", hash = "sha256:150d2b0ebbdb8f40b77f543fb44ffd2baeff48788be71f67f03566692fd55789"}, + {file = "jwcrypto-1.5.6.tar.gz", hash = "sha256:771a87762a0c081ae6166958a954f80848820b2ab066937dc8b8379d65b1b039"}, +] + +[package.dependencies] +cryptography = ">=3.4" +typing-extensions = ">=4.5.0" + [[package]] name = "kombu" version = "5.4.2" @@ -1944,99 +2043,98 @@ test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg" -version = "3.2.3" +version = "3.1.19" description = "PostgreSQL database adapter for Python" optional = true -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, + {file = "psycopg-3.1.19-py3-none-any.whl", hash = "sha256:dca5e5521c859f6606686432ae1c94e8766d29cc91f2ee595378c510cc5b0731"}, + {file = "psycopg-3.1.19.tar.gz", hash = "sha256:92d7b78ad82426cdcf1a0440678209faa890c6e1721361c2f8901f0dccd62961"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +psycopg-binary = {version = "3.1.19", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.1.19)"] +c = ["psycopg-c (==3.1.19)"] +dev = ["black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=3.6.2,<4.0)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.2.3" +version = "3.1.19" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = true -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7204818f05151dd08f8f851defb01972ec9d2cc925608eb0de232563f203f354"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4e67fd86758dbeac85641419a54f84d74495a8683b58ad5dfad08b7fc37a8f"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12173e34b176e93ad2da913de30f774d5119c2d4d4640c6858d2d77dfa6c9bf"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052f5193304066318853b4b2e248f523c8f52b371fc4e95d4ef63baee3f30955"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29008f3f8977f600b8a7fb07c2e041b01645b08121760609cc45e861a0364dc9"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6a9a651a08d876303ed059c9553df18b3c13c3406584a70a8f37f1a1fe2709"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:91a645e6468c4f064b7f4f3b81074bdd68fe5aa2b8c5107de15dcd85ba6141be"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5c6956808fd5cf0576de5a602243af8e04594b25b9a28675feddc71c5526410a"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:1622ca27d5a7a98f7d8f35e8b146dc7efda4a4b6241d2edf7e076bd6bcecbeb4"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a100482950a55228f648bd382bb71bfaff520002f29845274fccbbf02e28bd52"}, + {file = "psycopg_binary-3.1.19-cp310-cp310-win_amd64.whl", hash = "sha256:955ca8905c0251fc4af7ce0a20999e824a25652f53a558ab548b60969f1f368e"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cf49e91dcf699b8a449944ed898ef1466b39b92720613838791a551bc8f587a"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964c307e400c5f33fa762ba1e19853e048814fcfbd9679cc923431adb7a2ead2"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3433924e1b14074798331dc2bfae2af452ed7888067f2fc145835704d8981b15"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00879d4c6be4b3afc510073f48a5e960f797200e261ab3d9bd9b7746a08c669d"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34a6997c80f86d3dd80a4f078bb3b200079c47eeda4fd409d8899b883c90d2ac"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0106e42b481677c41caa69474fe530f786dcef88b11b70000f0e45a03534bc8f"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81efe09ba27533e35709905c3061db4dc9fb814f637360578d065e2061fbb116"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d312d6dddc18d9c164e1893706269c293cba1923118349d375962b1188dafb01"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:bfd2c734da9950f7afaad5f132088e0e1478f32f042881fca6651bb0c8d14206"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8a732610a5a6b4f06dadcf9288688a8ff202fd556d971436a123b7adb85596e2"}, + {file = "psycopg_binary-3.1.19-cp311-cp311-win_amd64.whl", hash = "sha256:321814a9a3ad785855a821b842aba08ca1b7de7dfb2979a2f0492dca9ec4ae70"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4aa0ca13bb8a725bb6d12c13999217fd5bc8b86a12589f28a74b93e076fbb959"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:469424e354ebcec949aa6aa30e5a9edc352a899d9a68ad7a48f97df83cc914cf"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04f5349313529ae1f1c42fe1aa0443faaf50fdf12d13866c2cc49683bfa53d0"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959feabddc7fffac89b054d6f23f3b3c62d7d3c90cd414a02e3747495597f150"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9da624a6ca4bc5f7fa1f03f8485446b5b81d5787b6beea2b4f8d9dbef878ad7"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1823221a6b96e38b15686170d4fc5b36073efcb87cce7d3da660440b50077f6"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:866db42f986298f0cf15d805225eb8df2228bf19f7997d7f1cb5f388cbfc6a0f"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:738c34657305b5973af6dbb6711b07b179dfdd21196d60039ca30a74bafe9648"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb9758473200384a04374d0e0cac6f451218ff6945a024f65a1526802c34e56e"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0e991632777e217953ac960726158987da684086dd813ac85038c595e7382c91"}, + {file = "psycopg_binary-3.1.19-cp312-cp312-win_amd64.whl", hash = "sha256:1d87484dd42c8783c44a30400949efb3d81ef2487eaa7d64d1c54df90cf8b97a"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d1d1723d7449c12bb61aca7eb6e0c6ab2863cd8dc0019273cc4d4a1982f84bdb"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e538a8671005641fa195eab962f85cf0504defbd3b548c4c8fc27102a59f687b"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c50592bc8517092f40979e4a5d934f96a1737a77724bb1d121eb78b614b30fc8"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95f16ae82bc242b76cd3c3e5156441e2bd85ff9ec3a9869d750aad443e46073c"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebd1e98e865e9a28ce0cb2c25b7dfd752f0d1f0a423165b55cd32a431dcc0f4"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:49cd7af7d49e438a39593d1dd8cab106a1912536c2b78a4d814ebdff2786094e"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:affebd61aa3b7a8880fd4ac3ee94722940125ff83ff485e1a7c76be9adaabb38"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d1bac282f140fa092f2bbb6c36ed82270b4a21a6fc55d4b16748ed9f55e50fdb"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1285aa54449e362b1d30d92b2dc042ad3ee80f479cc4e323448d0a0a8a1641fa"}, + {file = "psycopg_binary-3.1.19-cp37-cp37m-win_amd64.whl", hash = "sha256:6cff31af8155dc9ee364098a328bab688c887c732c66b8d027e5b03818ca0287"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d9b689c4a17dd3130791dcbb8c30dbf05602f7c2d56c792e193fb49adc7bf5f8"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:017518bd2de4851adc826a224fb105411e148ad845e11355edd6786ba3dfedf5"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c35fd811f339a3cbe7f9b54b2d9a5e592e57426c6cc1051632a62c59c4810208"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38ed45ec9673709bfa5bc17f140e71dd4cca56d4e58ef7fd50d5a5043a4f55c6"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:433f1c256108f9e26f480a8cd6ddb0fb37dbc87d7f5a97e4540a9da9b881f23f"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ed61e43bf5dc8d0936daf03a19fef3168d64191dbe66483f7ad08c4cea0bc36b"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ae8109ff9fdf1fa0cb87ab6645298693fdd2666a7f5f85660df88f6965e0bb7"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a53809ee02e3952fae7977c19b30fd828bd117b8f5edf17a3a94212feb57faaf"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9d39d5ffc151fb33bcd55b99b0e8957299c0b1b3e5a1a5f4399c1287ef0051a9"}, + {file = "psycopg_binary-3.1.19-cp38-cp38-win_amd64.whl", hash = "sha256:e14bc8250000921fcccd53722f86b3b3d1b57db901e206e49e2ab2afc5919c2d"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd88c5cea4efe614d5004fb5f5dcdea3d7d59422be796689e779e03363102d24"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:621a814e60825162d38760c66351b4df679fd422c848b7c2f86ad399bff27145"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46e50c05952b59a214e27d3606f6d510aaa429daed898e16b8a37bfbacc81acc"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03354a9db667c27946e70162cb0042c3929154167f3678a30d23cebfe0ad55b5"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c2f3b79037581afec7baa2bdbcb0a1787f1758744a7662099b0eca2d721cb"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6469ebd9e93327e9f5f36dcf8692fb1e7aeaf70087c1c15d4f2c020e0be3a891"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:85bca9765c04b6be90cb46e7566ffe0faa2d7480ff5c8d5e055ac427f039fd24"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a836610d5c75e9cff98b9fdb3559c007c785c09eaa84a60d5d10ef6f85f671e8"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8de7a1d9fb3518cc6b58e3c80b75a824209ad52b90c542686c912db8553dad"}, + {file = "psycopg_binary-3.1.19-cp39-cp39-win_amd64.whl", hash = "sha256:76fcd33342f38e35cd6b5408f1bc117d55ab8b16e5019d99b6d3ce0356c51717"}, ] [[package]] @@ -2471,6 +2569,25 @@ files = [ {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, ] +[[package]] +name = "python-keycloak" +version = "4.7.0" +description = "python-keycloak is a Python package providing access to the Keycloak API." +optional = true +python-versions = "<4.0,>=3.9" +files = [ + {file = "python_keycloak-4.7.0-py3-none-any.whl", hash = "sha256:1b8a3f8d43fc7c9e81abaf80f1fd2c808300b0675f490a5149078c132ce6be09"}, + {file = "python_keycloak-4.7.0.tar.gz", hash = "sha256:9d6c3077906d13056959ce43494a0c333b88487c1659ddd164a06446b92a3445"}, +] + +[package.dependencies] +async-property = ">=0.2.2" +deprecation = ">=2.1.0" +httpx = ">=0.23.2" +jwcrypto = ">=1.5.4" +requests = ">=2.20.0" +requests-toolbelt = ">=0.6.0" + [[package]] name = "python-multipart" version = "0.0.17" @@ -2576,6 +2693,20 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + [[package]] name = "rsa" version = "4.9" @@ -2592,23 +2723,23 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "75.3.0" +version = "75.4.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.4.0-py3-none-any.whl", hash = "sha256:b3c5d862f98500b06ffdf7cc4499b48c46c317d8d56cb30b5c8bce4d88f5c216"}, + {file = "setuptools-75.4.0.tar.gz", hash = "sha256:1dc484f5cf56fd3fe7216d7b8df820802e7246cfb534a1db2aa64f14fcb9cdcb"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" @@ -3373,11 +3504,11 @@ test = ["coverage[toml]", "zope.event", "zope.testing"] testing = ["coverage[toml]", "zope.event", "zope.testing"] [extras] -backend = ["alembic", "apscheduler", "asgi-correlation-id", "asyncpg", "celery", "coloredlogs", "fastapi", "jinja2", "psycopg", "pydantic-settings", "python-jose", "python-json-logger", "python-multipart", "sqlalchemy", "sqlalchemy-utils", "starlette-exporter", "uuid6", "uvicorn"] +backend = ["alembic", "apscheduler", "asgi-correlation-id", "asyncpg", "celery", "coloredlogs", "devtools", "fastapi", "itsdangerous", "jinja2", "psycopg", "pydantic-settings", "python-jose", "python-json-logger", "python-keycloak", "python-multipart", "sqlalchemy", "sqlalchemy-utils", "starlette-exporter", "uuid6", "uvicorn"] scheduler = ["apscheduler", "asyncpg", "celery", "pydantic-settings", "python-jose", "python-multipart", "sqlalchemy", "sqlalchemy-utils"] -worker = ["asgi-correlation-id", "celery", "coloredlogs", "jinja2", "onetl", "psycopg", "pydantic-settings", "python-json-logger", "sqlalchemy", "sqlalchemy-utils", "uuid6"] +worker = ["asgi-correlation-id", "celery", "coloredlogs", "jinja2", "onetl", "psycopg", "pydantic-settings", "python-json-logger", "python-keycloak", "sqlalchemy", "sqlalchemy-utils", "uuid6"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "5415877e93dcf62f80fc5779fa8e10f5117d43adecdf1a0249c59d2adc093987" +content-hash = "6756fe9953eb22a50f495add370f31c25e5e2817d7fe04741c816919bd083d8f" diff --git a/pyproject.toml b/pyproject.toml index ce2f9e3d..2554b102 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,13 +61,16 @@ python-multipart = { version = ">=0.0.9,<0.0.18", optional = true } celery = { version = "^5.4.0", optional = true } onetl = { version = "^0.12.0", extras = ["spark"], optional = true } # due to not supporting MacOS 14.x https://www.psycopg.org/psycopg3/docs/news.html#psycopg-3-1-20 -psycopg = { version = ">=3.1.0,<3.2.4", extras = ["binary"], optional = true } +psycopg = { version = ">=3.1.0, <3.1.20", extras = ["binary"], optional = true } uuid6 = "^2024.7.10" coloredlogs = {version = "*", optional = true} python-json-logger = {version = "*", optional = true} asyncpg = { version = ">=0.29,<0.31", optional = true } apscheduler = { version = "^3.10.4", optional = true } starlette-exporter = {version = "^0.23.0", optional = true} +itsdangerous = {version = "*", optional = true} +python-keycloak = {version = "^4.7.0", optional = true} +devtools = {version = "*", optional = true} [tool.poetry.extras] backend = [ @@ -87,6 +90,9 @@ backend = [ "coloredlogs", "python-json-logger", "asyncpg", + "devtools", + "itsdangerous", + "python-keycloak", # migrations only "celery", "apscheduler", @@ -104,6 +110,7 @@ worker = [ "uuid6", "coloredlogs", "python-json-logger", + "python-keycloak", ] scheduler = [ @@ -181,6 +188,10 @@ ignore_missing_imports = true module = "pyarrow.*" ignore_missing_imports = true +[[tool.mypy.overrides]] +module = "keycloak.*" +ignore_missing_imports = true + [[tool.mypy.overrides]] module = "avro.*" ignore_missing_imports = true diff --git a/syncmaster/backend/__init__.py b/syncmaster/backend/__init__.py index 946e645e..8436b837 100644 --- a/syncmaster/backend/__init__.py +++ b/syncmaster/backend/__init__.py @@ -1,7 +1,5 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -import uuid - from fastapi import FastAPI, HTTPException from fastapi.exceptions import RequestValidationError from pydantic import ValidationError @@ -15,10 +13,11 @@ validation_exception_handler, ) from syncmaster.backend.middlewares import apply_middlewares +from syncmaster.backend.providers.auth import AuthProvider from syncmaster.backend.services.unit_of_work import UnitOfWork +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.factory import create_session_factory, get_uow from syncmaster.exceptions import SyncmasterError -from syncmaster.settings import Settings def application_factory(settings: Settings) -> FastAPI: @@ -48,6 +47,9 @@ def application_factory(settings: Settings) -> FastAPI: }, ) + auth_class: type[AuthProvider] = settings.auth.provider # type: ignore[assignment] + auth_class.setup(application) + apply_middlewares(application, settings) return application diff --git a/syncmaster/backend/api/v1/auth.py b/syncmaster/backend/api/v1/auth.py new file mode 100644 index 00000000..3c0a9b96 --- /dev/null +++ b/syncmaster/backend/api/v1/auth.py @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from typing import Annotated + +from fastapi import APIRouter, Depends, HTTPException, Request +from fastapi.responses import RedirectResponse +from fastapi.security import OAuth2PasswordRequestForm + +from syncmaster.backend.dependencies import Stub +from syncmaster.backend.providers.auth import ( + AuthProvider, + DummyAuthProvider, + KeycloakAuthProvider, +) +from syncmaster.backend.utils.state import validate_state +from syncmaster.errors.registration import get_error_responses +from syncmaster.errors.schemas.invalid_request import InvalidRequestSchema +from syncmaster.errors.schemas.not_authorized import NotAuthorizedSchema +from syncmaster.schemas.v1.auth import AuthTokenSchema + +router = APIRouter( + prefix="/auth", + tags=["Auth"], + responses=get_error_responses(include={NotAuthorizedSchema, InvalidRequestSchema}), +) + + +@router.post("/token") +async def token( + auth_provider: Annotated[DummyAuthProvider, Depends(Stub(AuthProvider))], + form_data: OAuth2PasswordRequestForm = Depends(), +) -> AuthTokenSchema: + token = await auth_provider.get_token_password_grant( + grant_type=form_data.grant_type, + login=form_data.username, + password=form_data.password, + scopes=form_data.scopes, + client_id=form_data.client_id, + client_secret=form_data.client_secret, + ) + return AuthTokenSchema.parse_obj(token) + + +@router.get("/callback") +async def auth_callback( + request: Request, + code: str, + state: str, + auth_provider: Annotated[KeycloakAuthProvider, Depends(Stub(AuthProvider))], +): + original_redirect_url = validate_state(state) + if not original_redirect_url: + raise HTTPException(status_code=400, detail="Invalid state parameter") + token = await auth_provider.get_token_authorization_code_grant( + code=code, + redirect_uri=auth_provider.settings.redirect_uri, + ) + request.session["access_token"] = token["access_token"] + request.session["refresh_token"] = token["refresh_token"] + + return RedirectResponse(url=original_redirect_url) diff --git a/syncmaster/backend/api/v1/auth/router.py b/syncmaster/backend/api/v1/auth/router.py deleted file mode 100644 index 3a5dbd6d..00000000 --- a/syncmaster/backend/api/v1/auth/router.py +++ /dev/null @@ -1,41 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS PJSC -# SPDX-License-Identifier: Apache-2.0 -from typing import Annotated - -from fastapi import APIRouter, Depends -from fastapi.security import OAuth2PasswordRequestForm - -from syncmaster.backend.api.v1.auth.utils import sign_jwt -from syncmaster.backend.dependencies import Stub -from syncmaster.backend.services import UnitOfWork -from syncmaster.errors.registration import get_error_responses -from syncmaster.errors.schemas.invalid_request import InvalidRequestSchema -from syncmaster.errors.schemas.not_authorized import NotAuthorizedSchema -from syncmaster.exceptions import EntityNotFoundError -from syncmaster.schemas.v1.auth import AuthTokenSchema -from syncmaster.settings import Settings - -router = APIRouter( - prefix="/auth", - tags=["Auth"], - responses=get_error_responses(include={NotAuthorizedSchema, InvalidRequestSchema}), -) - - -@router.post("/token") -async def login( - settings: Annotated[Settings, Depends(Stub(Settings))], - form_data: OAuth2PasswordRequestForm = Depends(), - unit_of_work: UnitOfWork = Depends(UnitOfWork), -) -> AuthTokenSchema: - """This is the test auth method!!! Not for production!!!!""" - try: - user = await unit_of_work.user.read_by_username(username=form_data.username) - except EntityNotFoundError: - async with unit_of_work: - user = await unit_of_work.user.create( - username=form_data.username, - is_active=True, - ) - token = sign_jwt(user_id=user.id, settings=settings) - return AuthTokenSchema(access_token=token, refresh_token="refresh_token") diff --git a/syncmaster/backend/api/v1/auth/utils.py b/syncmaster/backend/api/v1/auth/utils.py deleted file mode 100644 index cd7b215a..00000000 --- a/syncmaster/backend/api/v1/auth/utils.py +++ /dev/null @@ -1,26 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS PJSC -# SPDX-License-Identifier: Apache-2.0 -import time - -from jose import JWTError, jwt -from pydantic import ValidationError - -from syncmaster.schemas.v1.auth import TokenPayloadSchema -from syncmaster.settings import Settings - - -def sign_jwt(user_id: int, settings: Settings) -> str: - """This method authentication for dev version without keycloak""" - payload = { - "user_id": user_id, - "expires": time.time() + settings.TOKEN_EXPIRED_TIME, - } - return jwt.encode(payload, settings.SECRET_KEY, algorithm=settings.SECURITY_ALGORITHM) - - -def decode_jwt(token: str, settings: Settings) -> TokenPayloadSchema | None: - try: - payload = jwt.decode(token, settings.SECRET_KEY, algorithms=[settings.SECURITY_ALGORITHM]) - return TokenPayloadSchema(**payload) - except (JWTError, ValidationError): - return None diff --git a/syncmaster/backend/api/v1/router.py b/syncmaster/backend/api/v1/router.py index c363cb50..1d626b2a 100644 --- a/syncmaster/backend/api/v1/router.py +++ b/syncmaster/backend/api/v1/router.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from fastapi import APIRouter -from syncmaster.backend.api.v1.auth.router import router as auth_router +from syncmaster.backend.api.v1.auth import router as auth_router from syncmaster.backend.api.v1.connections import router as connection_router from syncmaster.backend.api.v1.groups import router as group_router from syncmaster.backend.api.v1.queue import router as queue_router diff --git a/syncmaster/backend/api/v1/runs.py b/syncmaster/backend/api/v1/runs.py index 1ec7698b..cbcc161b 100644 --- a/syncmaster/backend/api/v1/runs.py +++ b/syncmaster/backend/api/v1/runs.py @@ -2,14 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 import asyncio from datetime import datetime -from typing import Annotated from asgi_correlation_id import correlation_id from fastapi import APIRouter, Depends, Query from jinja2 import Template from kombu.exceptions import KombuError -from syncmaster.backend.dependencies import Stub from syncmaster.backend.services import UnitOfWork, get_user from syncmaster.db.models import RunType, Status, User from syncmaster.db.utils import Permission @@ -23,9 +21,11 @@ ReadRunSchema, RunPageSchema, ) -from syncmaster.settings import Settings from syncmaster.worker.config import celery +# TODO: remove global import of WorkerSettings +from syncmaster.worker.settings import WorkerSettings as Settings + router = APIRouter(tags=["Runs"], responses=get_error_responses()) @@ -83,7 +83,6 @@ async def read_run( @router.post("/runs") async def start_run( create_run_data: CreateRunSchema, - settings: Annotated[Settings, Depends(Stub(Settings))], unit_of_work: UnitOfWork = Depends(UnitOfWork), current_user: User = Depends(get_user(is_active=True)), ) -> ReadRunSchema: @@ -120,7 +119,7 @@ async def start_run( type=RunType.MANUAL, ) - log_url = Template(settings.worker.LOG_URL_TEMPLATE).render( + log_url = Template(Settings().LOG_URL_TEMPLATE).render( run=run, correlation_id=correlation_id.get(), ) diff --git a/syncmaster/backend/dependencies/__init__.py b/syncmaster/backend/dependencies/__init__.py index 3d831ff9..48aa914c 100644 --- a/syncmaster/backend/dependencies/__init__.py +++ b/syncmaster/backend/dependencies/__init__.py @@ -1,4 +1,5 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 +from syncmaster.backend.dependencies.get_access_token import get_access_token from syncmaster.backend.dependencies.stub import Stub diff --git a/syncmaster/backend/dependencies/get_access_token.py b/syncmaster/backend/dependencies/get_access_token.py new file mode 100644 index 00000000..a9fe1708 --- /dev/null +++ b/syncmaster/backend/dependencies/get_access_token.py @@ -0,0 +1,12 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from fastapi import Request +from fastapi.security.utils import get_authorization_scheme_param + + +async def get_access_token(request: Request) -> str | None: + authorization = request.headers.get("Authorization") + scheme, token = get_authorization_scheme_param(authorization) + if not authorization or scheme.lower() != "bearer": + return None + return token diff --git a/syncmaster/backend/export_openapi_schema.py b/syncmaster/backend/export_openapi_schema.py old mode 100644 new mode 100755 index 7b586fe3..fac804b4 --- a/syncmaster/backend/export_openapi_schema.py +++ b/syncmaster/backend/export_openapi_schema.py @@ -7,8 +7,7 @@ from fastapi import FastAPI -from syncmaster.backend import application_factory -from syncmaster.settings import Settings +from syncmaster.backend import get_application def get_openapi_schema(app: FastAPI) -> dict: @@ -16,8 +15,7 @@ def get_openapi_schema(app: FastAPI) -> dict: if __name__ == "__main__": - settings = Settings() - app = application_factory(settings) + app = get_application() schema = get_openapi_schema(app) file_path = sys.argv[1] if not file_path: diff --git a/syncmaster/backend/handler.py b/syncmaster/backend/handler.py index 46883000..9ddbdda1 100644 --- a/syncmaster/backend/handler.py +++ b/syncmaster/backend/handler.py @@ -5,11 +5,13 @@ from fastapi import HTTPException, Request, Response, status from fastapi.exceptions import RequestValidationError +from fastapi.responses import RedirectResponse from pydantic import ValidationError from syncmaster.errors.base import APIErrorSchema, BaseErrorSchema from syncmaster.errors.registration import get_response_for_exception from syncmaster.exceptions import ActionNotAllowedError, SyncmasterError +from syncmaster.exceptions.auth import AuthorizationError from syncmaster.exceptions.connection import ( ConnectionDeleteError, ConnectionNotFoundError, @@ -31,6 +33,7 @@ QueueDeleteError, QueueNotFoundError, ) +from syncmaster.exceptions.redirect import RedirectException from syncmaster.exceptions.run import ( CannotConnectToTaskQueueError, CannotStopRunError, @@ -119,6 +122,14 @@ async def syncmsater_exception_handler(request: Request, exc: SyncmasterError): content=content, ) + if isinstance(exc, RedirectException): + return RedirectResponse(url=exc.redirect_url) + + if isinstance(exc, AuthorizationError): + content.code = "unauthorized" + content.message = "Not authenticated" + return exception_json_response(status=status.HTTP_401_UNAUTHORIZED, content=content) + if isinstance(exc, ConnectionDeleteError): content.code = "conflict" return exception_json_response(status=status.HTTP_409_CONFLICT, content=content) diff --git a/syncmaster/backend/middlewares/__init__.py b/syncmaster/backend/middlewares/__init__.py index 1ecd6c54..e1a13c02 100644 --- a/syncmaster/backend/middlewares/__init__.py +++ b/syncmaster/backend/middlewares/__init__.py @@ -10,8 +10,9 @@ ) from syncmaster.backend.middlewares.openapi import apply_openapi_middleware from syncmaster.backend.middlewares.request_id import apply_request_id_middleware +from syncmaster.backend.middlewares.session import apply_session_middleware from syncmaster.backend.middlewares.static_files import apply_static_files -from syncmaster.settings import Settings +from syncmaster.backend.settings import BackendSettings as Settings def apply_middlewares( @@ -20,13 +21,14 @@ def apply_middlewares( ) -> FastAPI: """Add middlewares to the application.""" - if settings.server.logging.setup: - setup_logging(settings.server.logging.get_log_config_path()) + if settings.logging.setup: + setup_logging(settings.logging.get_log_config_path()) apply_cors_middleware(application, settings.server.cors) apply_monitoring_metrics_middleware(application, settings.server.monitoring) apply_request_id_middleware(application, settings.server.request_id) apply_openapi_middleware(application, settings.server.openapi) apply_static_files(application, settings.server.static_files) + apply_session_middleware(application, settings.server.session) return application diff --git a/syncmaster/backend/middlewares/cors.py b/syncmaster/backend/middlewares/cors.py index 21b3aee8..24dbd5e3 100644 --- a/syncmaster/backend/middlewares/cors.py +++ b/syncmaster/backend/middlewares/cors.py @@ -3,7 +3,7 @@ from fastapi import FastAPI from starlette.middleware.cors import CORSMiddleware -from syncmaster.settings.server import CORSSettings +from syncmaster.backend.settings.server import CORSSettings def apply_cors_middleware(app: FastAPI, settings: CORSSettings) -> FastAPI: diff --git a/syncmaster/backend/middlewares/monitoring/metrics.py b/syncmaster/backend/middlewares/monitoring/metrics.py index abafa7e3..4ce1f455 100644 --- a/syncmaster/backend/middlewares/monitoring/metrics.py +++ b/syncmaster/backend/middlewares/monitoring/metrics.py @@ -4,8 +4,8 @@ from starlette.responses import PlainTextResponse from starlette_exporter import PrometheusMiddleware, handle_metrics +from syncmaster.backend.settings.server.monitoring import MonitoringSettings from syncmaster.backend.utils.slug import slugify -from syncmaster.settings.server.monitoring import MonitoringSettings DEFAULT_SKIP_PATHS = { "/monitoring/metrics", diff --git a/syncmaster/backend/middlewares/openapi.py b/syncmaster/backend/middlewares/openapi.py index 4b5abebe..190f401b 100644 --- a/syncmaster/backend/middlewares/openapi.py +++ b/syncmaster/backend/middlewares/openapi.py @@ -12,7 +12,7 @@ from starlette.requests import Request from starlette.responses import JSONResponse -from syncmaster.settings.server.openapi import OpenAPISettings +from syncmaster.backend.settings.server.openapi import OpenAPISettings async def custom_openapi(request: Request) -> JSONResponse: diff --git a/syncmaster/backend/middlewares/request_id.py b/syncmaster/backend/middlewares/request_id.py index 018fd992..24871302 100644 --- a/syncmaster/backend/middlewares/request_id.py +++ b/syncmaster/backend/middlewares/request_id.py @@ -4,7 +4,7 @@ from fastapi import FastAPI from uuid6 import uuid7 -from syncmaster.settings.server import RequestIDSettings +from syncmaster.backend.settings.server import RequestIDSettings def apply_request_id_middleware(app: FastAPI, settings: RequestIDSettings) -> FastAPI: diff --git a/syncmaster/backend/middlewares/session.py b/syncmaster/backend/middlewares/session.py new file mode 100644 index 00000000..d29d30a4 --- /dev/null +++ b/syncmaster/backend/middlewares/session.py @@ -0,0 +1,17 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 + +from fastapi import FastAPI +from starlette.middleware.sessions import SessionMiddleware + +from syncmaster.backend.settings.server.session import SessionSettings + + +def apply_session_middleware(app: FastAPI, settings: SessionSettings) -> FastAPI: + """Add SessionMiddleware middleware to the application.""" + + app.add_middleware( + SessionMiddleware, + **settings.dict(), + ) + return app diff --git a/syncmaster/backend/middlewares/static_files.py b/syncmaster/backend/middlewares/static_files.py index 367a086f..0ed68023 100644 --- a/syncmaster/backend/middlewares/static_files.py +++ b/syncmaster/backend/middlewares/static_files.py @@ -1,9 +1,8 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 from fastapi import FastAPI -from fastapi.staticfiles import StaticFiles -from syncmaster.settings.server.static_files import StaticFilesSettings +from syncmaster.backend.settings.server.static_files import StaticFilesSettings def apply_static_files(app: FastAPI, settings: StaticFilesSettings) -> FastAPI: @@ -12,5 +11,5 @@ def apply_static_files(app: FastAPI, settings: StaticFilesSettings) -> FastAPI: return app # https://fastapi.tiangolo.com/how-to/custom-docs-ui-assets/#serve-the-static-files - app.mount("/static", StaticFiles(directory=settings.directory), name="static") + # app.mount("/static", StaticFiles(directory=settings.directory), name="static") return app diff --git a/syncmaster/backend/api/v1/auth/__init__.py b/syncmaster/backend/providers/__init__.py similarity index 100% rename from syncmaster/backend/api/v1/auth/__init__.py rename to syncmaster/backend/providers/__init__.py diff --git a/syncmaster/backend/providers/auth/__init__.py b/syncmaster/backend/providers/auth/__init__.py new file mode 100644 index 00000000..bae07ad5 --- /dev/null +++ b/syncmaster/backend/providers/auth/__init__.py @@ -0,0 +1,5 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from syncmaster.backend.providers.auth.base_provider import AuthProvider +from syncmaster.backend.providers.auth.dummy_provider import DummyAuthProvider +from syncmaster.backend.providers.auth.keycloak_provider import KeycloakAuthProvider diff --git a/syncmaster/backend/providers/auth/base_provider.py b/syncmaster/backend/providers/auth/base_provider.py new file mode 100644 index 00000000..9faed5e3 --- /dev/null +++ b/syncmaster/backend/providers/auth/base_provider.py @@ -0,0 +1,114 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 + +from abc import ABC, abstractmethod +from typing import Any + +from fastapi import FastAPI + +from syncmaster.db.models import User + + +class AuthProvider(ABC): + """Basic class for all Auth providers. + + Constructor is called by FastAPI, and can use Dependency injection mechanism. + See :obj:`~setup` for more details. + """ + + @classmethod + @abstractmethod + def setup(cls, app: FastAPI) -> FastAPI: + """ + This method is called by :obj:`syncmaster.backend.application_factory`. + + Here you should add dependency overrides for auth provider, + and return new ``app`` object. + + Examples + -------- + + .. code-block:: + + from fastapi import FastAPI + from my_awesome_auth_provider.settings import MyAwesomeAuthProviderSettings + from syncmaster.backend.dependencies import Stub + + class MyAwesomeAuthProvider(AuthProvider): + def setup(app): + app.dependency_overrides[AuthProvider] = MyAwesomeAuthProvider + + # `settings_object_factory` returns MyAwesomeAuthProviderSettings object + app.dependency_overrides[MyAwesomeAuthProviderSettings] = settings_object_factory + return app + + def __init__( + self, + settings: Annotated[MyAwesomeAuthProviderSettings, Depends(Stub(MyAwesomeAuthProviderSettings))], + ): + # settings object is set automatically by FastAPI's dependency_overrides + self.settings = settings + """ + ... + + @abstractmethod + async def get_current_user(self, access_token: Any, *args, **kwargs) -> User: + """ + This method should return currently logged in user. + + Parameters + ---------- + access_token : str + JWT token got from ``Authorization: Bearer `` header. + + Returns + ------- + :obj:`syncmaster.backend.db.models.User` + Current user object + """ + ... + + @abstractmethod + async def get_token_password_grant( + self, + grant_type: str | None = None, + login: str | None = None, + password: str | None = None, + scopes: list[str] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + ) -> dict[str, Any]: + """ + This method should perform authentication and return JWT token. + + Parameters + ---------- + See: + * https://auth0.com/docs/get-started/authentication-and-authorization-flow/call-your-api-using-resource-owner-password-flow + * https://connect2id.com/products/server/docs/api/token + + Returns + ------- + Dict: + .. code-block:: python + + { + "access_token": "some.jwt.token", + "token_type": "bearer", + "expires_in": 3600, + } + """ + ... + + @abstractmethod + async def get_token_authorization_code_grant( + self, + code: str, + redirect_uri: str, + scopes: list[str] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + ) -> dict[str, Any]: + """ + Obtain a token using the Authorization Code grant. + """ diff --git a/syncmaster/backend/providers/auth/dummy_provider.py b/syncmaster/backend/providers/auth/dummy_provider.py new file mode 100644 index 00000000..bb5d77b4 --- /dev/null +++ b/syncmaster/backend/providers/auth/dummy_provider.py @@ -0,0 +1,115 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 + +import logging +from time import time +from typing import Annotated, Any + +from devtools import pformat +from fastapi import Depends, FastAPI + +from syncmaster.backend.dependencies import Stub +from syncmaster.backend.providers.auth.base_provider import AuthProvider +from syncmaster.backend.services import UnitOfWork +from syncmaster.backend.settings.auth.dummy import DummyAuthProviderSettings +from syncmaster.backend.utils.jwt import decode_jwt, sign_jwt +from syncmaster.db.models import User +from syncmaster.exceptions import EntityNotFoundError +from syncmaster.exceptions.auth import AuthorizationError + +log = logging.getLogger(__name__) + + +class DummyAuthProvider(AuthProvider): + def __init__( + self, + settings: Annotated[DummyAuthProviderSettings, Depends(Stub(DummyAuthProviderSettings))], + unit_of_work: Annotated[UnitOfWork, Depends()], + ) -> None: + self._settings = settings + self._uow = unit_of_work + + @classmethod + def setup(cls, app: FastAPI) -> FastAPI: + settings = DummyAuthProviderSettings.parse_obj(app.state.settings.auth.dict(exclude={"provider"})) + log.info("Using %s provider with settings:\n%s", cls.__name__, pformat(settings)) + app.dependency_overrides[AuthProvider] = cls + app.dependency_overrides[DummyAuthProviderSettings] = lambda: settings + return app + + async def get_current_user(self, access_token: str, *args, **kwargs) -> User: + if not access_token: + raise AuthorizationError("Missing auth credentials") + + user_id = self._get_user_id_from_token(access_token) + user = await self._uow.user.read_by_id(user_id) + return user + + async def get_token_password_grant( + self, + grant_type: str | None = None, + login: str | None = None, + password: str | None = None, + scopes: list[str] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + ) -> dict[str, Any]: + if not login or not password: + raise AuthorizationError("Missing auth credentials") + + log.info("Get/create user %r in database", login) + async with self._uow: + try: + user = await self._uow.user.read_by_username(login) + except EntityNotFoundError: + user = await self._uow.user.create( + username=login, + email=f"{login}@example.com", + is_active=True, + ) + + log.info("User with id %r found", user.id) + if not user.is_active: + raise AuthorizationError(f"User {user.username!r} is disabled") + + log.info("Generate access token for user id %r", user.id) + access_token, expires_at = self._generate_access_token(user_id=user.id) + return { + "access_token": access_token, + "token_type": "bearer", + "expires_at": expires_at, + } + + async def get_token_authorization_code_grant( + self, + code: str, + redirect_uri: str, + scopes: list[str] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + ) -> dict[str, Any]: + raise NotImplementedError("Authorization code grant is not supported by DummyAuthProvider.") + + def _generate_access_token(self, user_id: int) -> tuple[str, float]: + expires_at = time() + self._settings.access_token.expire_seconds + payload = { + "user_id": user_id, + "exp": expires_at, + } + access_token = sign_jwt( + payload, + self._settings.access_token.secret_key.get_secret_value(), + self._settings.access_token.security_algorithm, + ) + return access_token, expires_at + + def _get_user_id_from_token(self, token: str) -> int: + try: + payload = decode_jwt( + token, + self._settings.access_token.secret_key.get_secret_value(), + self._settings.access_token.security_algorithm, + ) + return int(payload["user_id"]) + except (KeyError, TypeError, ValueError) as e: + raise AuthorizationError("Invalid token") from e diff --git a/syncmaster/backend/providers/auth/keycloak_provider.py b/syncmaster/backend/providers/auth/keycloak_provider.py new file mode 100644 index 00000000..b9a97710 --- /dev/null +++ b/syncmaster/backend/providers/auth/keycloak_provider.py @@ -0,0 +1,145 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +import logging +from typing import Annotated, Any + +from fastapi import Depends, FastAPI, Request +from keycloak import KeycloakOpenID + +from syncmaster.backend.dependencies import Stub +from syncmaster.backend.providers.auth.base_provider import AuthProvider +from syncmaster.backend.services import UnitOfWork +from syncmaster.backend.settings.auth.keycloak import KeycloakAuthProviderSettings +from syncmaster.backend.utils.state import generate_state +from syncmaster.exceptions import EntityNotFoundError +from syncmaster.exceptions.auth import AuthorizationError +from syncmaster.exceptions.redirect import RedirectException + +log = logging.getLogger(__name__) + + +class KeycloakAuthProvider(AuthProvider): + def __init__( + self, + settings: Annotated[KeycloakAuthProviderSettings, Depends(Stub(KeycloakAuthProviderSettings))], + unit_of_work: Annotated[UnitOfWork, Depends()], + ) -> None: + self.settings = settings + self._uow = unit_of_work + self.keycloak_openid = KeycloakOpenID( + server_url=self.settings.server_url, + client_id=self.settings.client_id, + realm_name=self.settings.realm_name, + client_secret_key=self.settings.client_secret.get_secret_value(), + verify=self.settings.verify_ssl, + ) + + @classmethod + def setup(cls, app: FastAPI) -> FastAPI: + settings = KeycloakAuthProviderSettings.parse_obj(app.state.settings.auth.dict(exclude={"provider"})) + log.info("Using %s provider with settings:\n%s", cls.__name__, settings) + app.dependency_overrides[AuthProvider] = cls + app.dependency_overrides[KeycloakAuthProviderSettings] = lambda: settings + return app + + async def get_token_password_grant( + self, + grant_type: str | None = None, + login: str | None = None, + password: str | None = None, + scopes: list[str] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + ) -> dict[str, Any]: + raise NotImplementedError("Password grant is not supported by KeycloakAuthProvider.") + + async def get_token_authorization_code_grant( + self, + code: str, + redirect_uri: str, + scopes: list[str] | None = None, + client_id: str | None = None, + client_secret: str | None = None, + ) -> dict[str, Any]: + try: + redirect_uri = redirect_uri or self.settings.redirect_uri + token = self.keycloak_openid.token( + grant_type="authorization_code", + code=code, + redirect_uri=redirect_uri, + ) + return token + except Exception as e: + raise AuthorizationError("Failed to get token") from e + + async def get_current_user(self, access_token: str, *args, **kwargs) -> Any: + request: Request = kwargs["request"] + refresh_token = request.session.get("refresh_token") + + if not access_token: + log.debug("No access token found in session.") + self.redirect_to_auth(request.url.path) + + try: + token_info = self.keycloak_openid.decode_token(token=access_token) + except Exception as e: + log.info("Access token is invalid or expired: %s", e) + token_info = None + + if not token_info and refresh_token: + log.debug("Access token invalid. Attempting to refresh.") + + try: + new_tokens = await self.refresh_access_token(refresh_token) + + new_access_token = new_tokens.get("access_token") + new_refresh_token = new_tokens.get("refresh_token") + request.session["access_token"] = new_access_token + request.session["refresh_token"] = new_refresh_token + + token_info = self.keycloak_openid.decode_token( + token=new_access_token, + ) + log.debug("Access token refreshed and decoded successfully.") + except Exception as e: + log.debug("Failed to refresh access token: %s", e) + self.redirect_to_auth(request.url.path) + + # these names are hardcoded in keycloak: + # https://github.com/keycloak/keycloak/blob/3ca3a4ad349b4d457f6829eaf2ae05f1e01408be/core/src/main/java/org/keycloak/representations/IDToken.java + user_id = token_info.get("sub") + login = token_info.get("preferred_username") + email = token_info.get("email") + first_name = token_info.get("given_name") + middle_name = token_info.get("middle_name") + last_name = token_info.get("family_name") + + if not user_id: + raise AuthorizationError("Invalid token payload") + + async with self._uow: + try: + user = await self._uow.user.read_by_username(login) + except EntityNotFoundError: + user = await self._uow.user.create( + username=login, + email=email, + first_name=first_name, + middle_name=middle_name, + last_name=last_name, + is_active=True, + ) + return user + + async def refresh_access_token(self, refresh_token: str) -> dict[str, Any]: + new_tokens = self.keycloak_openid.refresh_token(refresh_token) + return new_tokens + + def redirect_to_auth(self, path: str) -> None: + state = generate_state(path) + auth_url = self.keycloak_openid.auth_url( + redirect_uri=self.settings.redirect_uri, + scope=self.settings.scope, + state=state, + ) + raise RedirectException(redirect_url=auth_url) diff --git a/syncmaster/backend/services/__init__.py b/syncmaster/backend/services/__init__.py index 903cd387..84b01c58 100644 --- a/syncmaster/backend/services/__init__.py +++ b/syncmaster/backend/services/__init__.py @@ -1,4 +1,4 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from syncmaster.backend.services.auth import get_user +from syncmaster.backend.services.get_user import get_user from syncmaster.backend.services.unit_of_work import UnitOfWork diff --git a/syncmaster/backend/services/auth.py b/syncmaster/backend/services/auth.py deleted file mode 100644 index becff5ff..00000000 --- a/syncmaster/backend/services/auth.py +++ /dev/null @@ -1,53 +0,0 @@ -# SPDX-FileCopyrightText: 2023-2024 MTS PJSC -# SPDX-License-Identifier: Apache-2.0 -from collections.abc import Awaitable, Callable -from typing import Annotated - -from fastapi import Depends, status -from fastapi.exceptions import HTTPException -from fastapi.security import OAuth2PasswordBearer - -from syncmaster.backend.api.v1.auth.utils import decode_jwt -from syncmaster.backend.dependencies import Stub -from syncmaster.backend.services.unit_of_work import UnitOfWork -from syncmaster.db.models import User -from syncmaster.settings import Settings - -oauth_schema = OAuth2PasswordBearer(tokenUrl="v1/auth/token") - - -def get_user( - is_active: bool = False, - is_superuser: bool = False, -) -> Callable[[Settings, str, UnitOfWork], Awaitable[User]]: - async def wrapper( - settings: Annotated[Settings, Depends(Stub(Settings))], - token: str = Depends(oauth_schema), - unit_of_work: UnitOfWork = Depends(UnitOfWork), - ) -> User: - async with unit_of_work: - token_data = decode_jwt(token, settings=settings) - if token_data is None: - raise HTTPException( - status_code=status.HTTP_401_UNAUTHORIZED, - detail="You are not authorized", - ) - user = await unit_of_work.user.read_by_id(user_id=token_data.user_id) - if user is None: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="User not found", - ) - if is_active and not user.is_active: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="Inactive user", - ) - if is_superuser and not user.is_superuser: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You have no power here", - ) - return user - - return wrapper diff --git a/syncmaster/backend/services/get_user.py b/syncmaster/backend/services/get_user.py new file mode 100644 index 00000000..ac0b0adc --- /dev/null +++ b/syncmaster/backend/services/get_user.py @@ -0,0 +1,41 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from collections.abc import Callable, Coroutine +from typing import Annotated, Any + +from fastapi import Depends, Request +from fastapi.security import OAuth2PasswordBearer + +from syncmaster.backend.dependencies import Stub +from syncmaster.backend.providers.auth import AuthProvider +from syncmaster.db.models import User +from syncmaster.exceptions import ActionNotAllowedError, EntityNotFoundError + +oauth_schema = OAuth2PasswordBearer(tokenUrl="v1/auth/token", auto_error=False) + + +def get_user( + is_active: bool = False, + is_superuser: bool = False, +) -> Callable[[Request, AuthProvider, str], Coroutine[Any, Any, User]]: + async def wrapper( + request: Request, + auth_provider: Annotated[AuthProvider, Depends(Stub(AuthProvider))], + access_token: Annotated[str | None, Depends(oauth_schema)], + ) -> User: + # keycloak provider patches session and store access_token in cookie, + # when dummy auth stores it in "Authorization" header + access_token = request.session.get("access_token", "") or access_token + user = await auth_provider.get_current_user( + access_token=access_token, + request=request, + ) + if user is None: + raise EntityNotFoundError("User not found") + if is_active and not user.is_active: + raise ActionNotAllowedError("Inactive user") + if is_superuser and not user.is_superuser: + raise ActionNotAllowedError("You have no power here") + return user + + return wrapper diff --git a/syncmaster/backend/services/unit_of_work.py b/syncmaster/backend/services/unit_of_work.py index 2c6d008c..b3ffa555 100644 --- a/syncmaster/backend/services/unit_of_work.py +++ b/syncmaster/backend/services/unit_of_work.py @@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from syncmaster.backend.dependencies import Stub +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData from syncmaster.db.repositories import ( ConnectionRepository, @@ -16,7 +17,6 @@ TransferRepository, UserRepository, ) -from syncmaster.settings import Settings class UnitOfWork: diff --git a/syncmaster/backend/settings/__init__.py b/syncmaster/backend/settings/__init__.py new file mode 100644 index 00000000..a73a845a --- /dev/null +++ b/syncmaster/backend/settings/__init__.py @@ -0,0 +1,55 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from enum import StrEnum + +from pydantic import Field +from pydantic.types import ImportString + +from syncmaster.backend.settings.auth import AuthSettings +from syncmaster.backend.settings.server import ServerSettings +from syncmaster.settings import SyncmasterSettings + + +class EnvTypes(StrEnum): + LOCAL = "LOCAL" + + +class BackendSettings(SyncmasterSettings): + """Syncmaster backend settings. + + Backend can be configured in 2 ways: + + * By explicitly passing ``settings`` object as an argument to :obj:`application_factory ` + * By setting up environment variables matching a specific key. + + All environment variable names are written in uppercase and should be prefixed with ``SYNCMASTER__``. + Nested items are delimited with ``__``. + + + More details can be found in + `Pydantic documentation `_. + + Examples + -------- + + .. code-block:: bash + + # same as settings.database.url = "postgresql+asyncpg://postgres:postgres@localhost:5432/syncmaster" + SYNCMASTER__DATABASE__URL=postgresql+asyncpg://postgres:postgres@localhost:5432/syncmaster + + # same as settings.server.debug = True + SYNCMASTER__SERVER__DEBUG=True + """ + + server: ServerSettings = Field( + default_factory=ServerSettings, + description="Server settings `_ + documentation. + """, + ), + ) + expire_seconds: int = Field( + default=10 * 60 * 60, + description="Token expiration time, in seconds", + ) diff --git a/syncmaster/backend/settings/auth/keycloak.py b/syncmaster/backend/settings/auth/keycloak.py new file mode 100644 index 00000000..b5e43552 --- /dev/null +++ b/syncmaster/backend/settings/auth/keycloak.py @@ -0,0 +1,15 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from pydantic import BaseModel, Field, SecretStr + + +class KeycloakAuthProviderSettings(BaseModel): + """Settings related to Keycloak interaction.""" + + server_url: str = Field(..., description="Keycloak server URL") + client_id: str = Field(..., description="Keycloak client ID") + realm_name: str = Field(..., description="Keycloak realm name") + client_secret: SecretStr = Field(..., description="Keycloak client secret") + redirect_uri: str = Field(..., description="Redirect URI") + verify_ssl: bool = Field(True, description="Verify SSL certificates") + scope: str = Field("openid", description="Keycloak scope") diff --git a/syncmaster/settings/server/__init__.py b/syncmaster/backend/settings/server/__init__.py similarity index 68% rename from syncmaster/settings/server/__init__.py rename to syncmaster/backend/settings/server/__init__.py index 8c9402e9..c83fbfdb 100644 --- a/syncmaster/settings/server/__init__.py +++ b/syncmaster/backend/settings/server/__init__.py @@ -5,12 +5,12 @@ from pydantic import BaseModel, Field -from syncmaster.settings.log import LoggingSettings -from syncmaster.settings.server.cors import CORSSettings -from syncmaster.settings.server.monitoring import MonitoringSettings -from syncmaster.settings.server.openapi import OpenAPISettings -from syncmaster.settings.server.request_id import RequestIDSettings -from syncmaster.settings.server.static_files import StaticFilesSettings +from syncmaster.backend.settings.server.cors import CORSSettings +from syncmaster.backend.settings.server.monitoring import MonitoringSettings +from syncmaster.backend.settings.server.openapi import OpenAPISettings +from syncmaster.backend.settings.server.request_id import RequestIDSettings +from syncmaster.backend.settings.server.session import SessionSettings +from syncmaster.backend.settings.server.static_files import StaticFilesSettings class ServerSettings(BaseModel): @@ -34,13 +34,13 @@ class ServerSettings(BaseModel): """, ), ) - logging: LoggingSettings = Field( - default_factory=LoggingSettings, - description=":ref:`Logging settings `", - ) request_id: RequestIDSettings = Field( default_factory=RequestIDSettings, ) + session: SessionSettings = Field( + default_factory=SessionSettings, # type: ignore[arg-type] + description=":ref:`Session settings `", + ) cors: CORSSettings = Field( default_factory=CORSSettings, description=":ref:`CORS settings `", diff --git a/syncmaster/settings/server/cors.py b/syncmaster/backend/settings/server/cors.py similarity index 100% rename from syncmaster/settings/server/cors.py rename to syncmaster/backend/settings/server/cors.py diff --git a/syncmaster/settings/server/monitoring.py b/syncmaster/backend/settings/server/monitoring.py similarity index 100% rename from syncmaster/settings/server/monitoring.py rename to syncmaster/backend/settings/server/monitoring.py diff --git a/syncmaster/settings/server/openapi.py b/syncmaster/backend/settings/server/openapi.py similarity index 100% rename from syncmaster/settings/server/openapi.py rename to syncmaster/backend/settings/server/openapi.py diff --git a/syncmaster/settings/server/request_id.py b/syncmaster/backend/settings/server/request_id.py similarity index 100% rename from syncmaster/settings/server/request_id.py rename to syncmaster/backend/settings/server/request_id.py diff --git a/syncmaster/backend/settings/server/session.py b/syncmaster/backend/settings/server/session.py new file mode 100644 index 00000000..ed2a008a --- /dev/null +++ b/syncmaster/backend/settings/server/session.py @@ -0,0 +1,56 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 + + +from pydantic import BaseModel, Field + + +class SessionSettings(BaseModel): + """Session Middleware Settings. + + See `SessionMiddleware `_ documentation. + + .. note:: + + You can pass here any extra option supported by ``SessionMiddleware``, + even if it is not mentioned in documentation. + + Examples + -------- + + For development environment: + + .. code-block:: bash + + SYNCMASTER__SERVER__SESSION__SECRET_KEY=secret + SYNCMASTER__SERVER__SESSION__SESSION_COOKIE=custom_cookie_name + SYNCMASTER__SERVER__SESSION__MAX_AGE=None # cookie will last as long as the browser session + SYNCMASTER__SERVER__SESSION__SAME_SITE=strict + SYNCMASTER__SERVER__SESSION__HTTPS_ONLY=True + SYNCMASTER__SERVER__SESSION__DOMAIN=example.com + + For production environment: + + .. code-block:: bash + + SYNCMASTER__SERVER__SESSION__SECRET_KEY=secret + SYNCMASTER__SERVER__SESSION__HTTPS_ONLY=True + + """ + + secret_key: str = Field(description="A random string for signing cookies.") + session_cookie: str | None = Field(default="session", description="Name of the session cookie.") + max_age: int | None = Field(default=1209600, description="Session expiry time in seconds. Defaults to 2 weeks.") + same_site: str | None = Field( + default="lax", + description="Prevents cookie from being sent with cross-site requests.", + ) + path: str | None = Field(default="/", description="Path to restrict session cookie access.") + https_only: bool = Field(default=False, description="Secure flag for HTTPS-only access.") + domain: str | None = Field( + default=None, + description="Domain for sharing cookies between subdomains or cross-domains.", + ) + + class Config: + extra = "allow" diff --git a/syncmaster/settings/server/static_files.py b/syncmaster/backend/settings/server/static_files.py similarity index 100% rename from syncmaster/settings/server/static_files.py rename to syncmaster/backend/settings/server/static_files.py diff --git a/syncmaster/backend/utils/jwt.py b/syncmaster/backend/utils/jwt.py new file mode 100644 index 00000000..40e8997a --- /dev/null +++ b/syncmaster/backend/utils/jwt.py @@ -0,0 +1,29 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 + +from jose import ExpiredSignatureError, JWTError, jwt + +from syncmaster.exceptions.auth import AuthorizationError + + +def sign_jwt(payload: dict, secret_key: str, security_algorithm: str) -> str: + return jwt.encode( + payload, + secret_key, + algorithm=security_algorithm, + ) + + +def decode_jwt(token: str, secret_key: str, security_algorithm: str) -> dict: + try: + result = jwt.decode( + token, + secret_key, + algorithms=[security_algorithm], + ) + if "exp" not in result: + raise ExpiredSignatureError("Missing expiration time in token") + + return result + except JWTError as e: + raise AuthorizationError("Invalid token") from e diff --git a/syncmaster/backend/utils/slug.py b/syncmaster/backend/utils/slug.py index 248af14d..334d83d7 100644 --- a/syncmaster/backend/utils/slug.py +++ b/syncmaster/backend/utils/slug.py @@ -1,5 +1,7 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 + + def slugify(text: str) -> str: """Convert ``Some value`` to ``some-value``. diff --git a/syncmaster/backend/utils/state.py b/syncmaster/backend/utils/state.py new file mode 100644 index 00000000..9707bae9 --- /dev/null +++ b/syncmaster/backend/utils/state.py @@ -0,0 +1,15 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +import secrets + +state_store: dict[str, str] = {} + + +def generate_state(redirect_url: str) -> str: + state = secrets.token_urlsafe(16) + state_store[state] = redirect_url + return state + + +def validate_state(state: str) -> str | None: + return state_store.pop(state, None) diff --git a/syncmaster/db/factory.py b/syncmaster/db/factory.py index 0404dad5..04061ed0 100644 --- a/syncmaster/db/factory.py +++ b/syncmaster/db/factory.py @@ -11,7 +11,7 @@ ) from syncmaster.backend.services import UnitOfWork -from syncmaster.settings import Settings +from syncmaster.backend.settings import BackendSettings as Settings def create_engine(connection_uri: str, **engine_kwargs: Any) -> AsyncEngine: diff --git a/syncmaster/db/migrations/env.py b/syncmaster/db/migrations/env.py index a790a711..578cd2f2 100644 --- a/syncmaster/db/migrations/env.py +++ b/syncmaster/db/migrations/env.py @@ -11,8 +11,8 @@ from sqlalchemy.engine import Connection from sqlalchemy.ext.asyncio import async_engine_from_config +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Base -from syncmaster.settings import Settings config = context.config diff --git a/syncmaster/db/migrations/versions/2023-11-23_0001_create_user_table.py b/syncmaster/db/migrations/versions/2023-11-23_0001_create_user_table.py index eb397be7..9603efa7 100644 --- a/syncmaster/db/migrations/versions/2023-11-23_0001_create_user_table.py +++ b/syncmaster/db/migrations/versions/2023-11-23_0001_create_user_table.py @@ -22,12 +22,17 @@ def upgrade(): "user", sa.Column("id", sa.BigInteger(), nullable=False), sa.Column("username", sa.String(length=256), nullable=False), + sa.Column("email", sa.String(length=256), nullable=False), + sa.Column("first_name", sa.String(length=256), nullable=True), + sa.Column("last_name", sa.String(length=256), nullable=True), + sa.Column("middle_name", sa.String(length=256), nullable=True), sa.Column("is_superuser", sa.Boolean(), nullable=False), sa.Column("is_active", sa.Boolean(), nullable=False), sa.Column("created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("updated_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), sa.Column("is_deleted", sa.Boolean(), nullable=False), sa.PrimaryKeyConstraint("id", name=op.f("pk__user")), + sa.UniqueConstraint("email", name=op.f("uq__user__email")), ) op.create_index(op.f("ix__user__username"), "user", ["username"], unique=True) diff --git a/syncmaster/db/models/user.py b/syncmaster/db/models/user.py index ad6def59..72163d15 100644 --- a/syncmaster/db/models/user.py +++ b/syncmaster/db/models/user.py @@ -12,6 +12,10 @@ class User(Base, TimestampMixin, DeletableMixin): id: Mapped[int] = mapped_column(BigInteger, primary_key=True) username: Mapped[str] = mapped_column(String(256), nullable=False, unique=True, index=True) + email: Mapped[str] = mapped_column(String(256), nullable=False, unique=True) + first_name: Mapped[str] = mapped_column(String(256), nullable=True) + last_name: Mapped[str] = mapped_column(String(256), nullable=True) + middle_name: Mapped[str] = mapped_column(String(256), nullable=True) is_superuser: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) is_active: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) diff --git a/syncmaster/db/repositories/credentials_repository.py b/syncmaster/db/repositories/credentials_repository.py index fce09ae9..a5ea78b4 100644 --- a/syncmaster/db/repositories/credentials_repository.py +++ b/syncmaster/db/repositories/credentials_repository.py @@ -8,12 +8,12 @@ from sqlalchemy.exc import DBAPIError, IntegrityError, NoResultFound from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData from syncmaster.db.repositories.base import Repository from syncmaster.db.repositories.utils import decrypt_auth_data, encrypt_auth_data from syncmaster.exceptions import SyncmasterError from syncmaster.exceptions.credentials import AuthDataNotFoundError -from syncmaster.settings import Settings class CredentialsRepository(Repository[AuthData]): diff --git a/syncmaster/db/repositories/user.py b/syncmaster/db/repositories/user.py index 391b4cb4..72bfabf4 100644 --- a/syncmaster/db/repositories/user.py +++ b/syncmaster/db/repositories/user.py @@ -55,11 +55,24 @@ async def update(self, user_id: int, data: dict) -> User: except IntegrityError as e: self._raise_error(e) - async def create(self, username: str, is_active: bool, is_superuser: bool = False) -> User: + async def create( + self, + username: str, + email: str, + is_active: bool, + first_name: str | None = None, + middle_name: str | None = None, + last_name: str | None = None, + is_superuser: bool = False, + ) -> User: query = ( insert(User) .values( username=username, + email=email, + first_name=first_name, + middle_name=middle_name, + last_name=last_name, is_active=is_active, is_superuser=is_superuser, ) diff --git a/syncmaster/db/repositories/utils.py b/syncmaster/db/repositories/utils.py index 5947724c..a01785db 100644 --- a/syncmaster/db/repositories/utils.py +++ b/syncmaster/db/repositories/utils.py @@ -5,14 +5,14 @@ from cryptography.fernet import Fernet from pydantic import SecretStr -from syncmaster.settings import Settings +from syncmaster.backend.settings import BackendSettings as Settings def decrypt_auth_data( value: str, settings: Settings, ) -> dict: - decryptor = Fernet(settings.CRYPTO_KEY) + decryptor = Fernet(settings.crypto_key) decrypted = decryptor.decrypt(value) return json.loads(decrypted) @@ -26,7 +26,7 @@ def encrypt_auth_data( value: dict, settings: Settings, ) -> str: - encryptor = Fernet(settings.CRYPTO_KEY) + encryptor = Fernet(settings.crypto_key) serialized = json.dumps( value, ensure_ascii=False, diff --git a/syncmaster/exceptions/redirect.py b/syncmaster/exceptions/redirect.py new file mode 100644 index 00000000..e8afaafa --- /dev/null +++ b/syncmaster/exceptions/redirect.py @@ -0,0 +1,9 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 + +from syncmaster.exceptions.base import SyncmasterError + + +class RedirectException(SyncmasterError): + def __init__(self, redirect_url: str): + self.redirect_url = redirect_url diff --git a/syncmaster/scheduler/__main__.py b/syncmaster/scheduler/__main__.py old mode 100644 new mode 100755 index 74eac548..c22a855c --- a/syncmaster/scheduler/__main__.py +++ b/syncmaster/scheduler/__main__.py @@ -3,9 +3,9 @@ import asyncio import logging +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.scheduler.transfer_fetcher import TransferFetcher from syncmaster.scheduler.transfer_job_manager import TransferJobManager -from syncmaster.settings import Settings logger = logging.getLogger(__name__) diff --git a/syncmaster/scheduler/transfer_fetcher.py b/syncmaster/scheduler/transfer_fetcher.py index 0f7ac524..22689acc 100644 --- a/syncmaster/scheduler/transfer_fetcher.py +++ b/syncmaster/scheduler/transfer_fetcher.py @@ -2,9 +2,9 @@ # SPDX-License-Identifier: Apache-2.0 from sqlalchemy import select +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Transfer from syncmaster.scheduler.utils import get_async_session -from syncmaster.settings import Settings class TransferFetcher: diff --git a/syncmaster/scheduler/transfer_job_manager.py b/syncmaster/scheduler/transfer_job_manager.py index a735e3ae..a4f06d6c 100644 --- a/syncmaster/scheduler/transfer_job_manager.py +++ b/syncmaster/scheduler/transfer_job_manager.py @@ -7,11 +7,11 @@ from kombu.exceptions import KombuError from syncmaster.backend.services.unit_of_work import UnitOfWork +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import RunType, Status, Transfer from syncmaster.exceptions.run import CannotConnectToTaskQueueError from syncmaster.scheduler.utils import get_async_session from syncmaster.schemas.v1.connections.connection import ReadAuthDataSchema -from syncmaster.settings import Settings from syncmaster.worker.config import celery diff --git a/syncmaster/scheduler/utils.py b/syncmaster/scheduler/utils.py index 3899c040..5a76bb28 100644 --- a/syncmaster/scheduler/utils.py +++ b/syncmaster/scheduler/utils.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine -from syncmaster.settings import Settings +from syncmaster.backend.settings import BackendSettings as Settings def get_async_session(settings: Settings) -> AsyncSession: diff --git a/syncmaster/schemas/v1/auth.py b/syncmaster/schemas/v1/auth.py index fab4c9f1..7777035e 100644 --- a/syncmaster/schemas/v1/auth.py +++ b/syncmaster/schemas/v1/auth.py @@ -9,4 +9,5 @@ class TokenPayloadSchema(BaseModel): class AuthTokenSchema(BaseModel): access_token: str - refresh_token: str + token_type: str + expires_at: float diff --git a/syncmaster/settings/__init__.py b/syncmaster/settings/__init__.py index 19aa9995..71ad0596 100644 --- a/syncmaster/settings/__init__.py +++ b/syncmaster/settings/__init__.py @@ -3,21 +3,18 @@ from enum import StrEnum from pydantic import Field -from pydantic.types import ImportString from pydantic_settings import BaseSettings from syncmaster.settings.broker import RabbitMQSettings from syncmaster.settings.database import DatabaseSettings -from syncmaster.settings.server import ServerSettings -from syncmaster.settings.worker import WorkerSettings +from syncmaster.settings.log import LoggingSettings class EnvTypes(StrEnum): LOCAL = "LOCAL" -# TODO: split settings into syncmaster/server/settings and syncmaster/worker/settings -class Settings(BaseSettings): +class SyncmasterSettings(BaseSettings): """Syncmaster backend settings. Backend can be configured in 2 ways: @@ -44,27 +41,18 @@ class Settings(BaseSettings): SYNCMASTER__SERVER__DEBUG=True """ - # TODO: move settings to corresponding classes (scheduler also) - SECRET_KEY: str = "secret" - SECURITY_ALGORITHM: str = "HS256" - CRYPTO_KEY: str = "UBgPTioFrtH2unlC4XFDiGf5sYfzbdSf_VgiUSaQc94=" + crypto_key: str + # TODO: move settings to corresponding classes (scheduler also) TZ: str = "UTC" SCHEDULER_TRANSFER_FETCHING_TIMEOUT: int = 180 # seconds SCHEDULER_MISFIRE_GRACE_TIME: int = 300 # seconds - TOKEN_EXPIRED_TIME: int = 60 * 60 * 10 # 10 hours - CREATE_SPARK_SESSION_FUNCTION: ImportString = "syncmaster.worker.spark.get_worker_spark_session" - database: DatabaseSettings = Field(description=":ref:`Database settings `") broker: RabbitMQSettings = Field(description=":ref:`Broker settings `") - server: ServerSettings = Field( - default_factory=ServerSettings, - description="Server settings None: - # TODO: remove settings object creating during import self.settings = Settings() self.engine = create_engine( url=self.settings.database.sync_url, diff --git a/syncmaster/worker/config.py b/syncmaster/worker/config.py index 1871c037..6c3fcc30 100644 --- a/syncmaster/worker/config.py +++ b/syncmaster/worker/config.py @@ -2,16 +2,16 @@ # SPDX-License-Identifier: Apache-2.0 from celery import Celery -from syncmaster.settings import Settings from syncmaster.worker.base import WorkerTask -# TODO: remove settings object creating during import -settings = Settings() +# TODO: remove global import of WorkerSettings +from syncmaster.worker.settings import WorkerSettings as Settings +worker_settings = Settings() celery = Celery( __name__, - broker=settings.broker.url, - backend="db+" + settings.database.sync_url, + broker=worker_settings.broker.url, + backend="db+" + worker_settings.database.sync_url, task_cls=WorkerTask, imports=[ "syncmaster.worker.transfer", diff --git a/syncmaster/worker/controller.py b/syncmaster/worker/controller.py index defcb73a..85502bed 100644 --- a/syncmaster/worker/controller.py +++ b/syncmaster/worker/controller.py @@ -19,7 +19,6 @@ S3TransferDTO, ) from syncmaster.exceptions.connection import ConnectionTypeNotRecognizedError -from syncmaster.settings import Settings from syncmaster.worker.handlers.base import Handler from syncmaster.worker.handlers.db.hive import HiveHandler from syncmaster.worker.handlers.db.oracle import OracleHandler @@ -27,6 +26,9 @@ from syncmaster.worker.handlers.file.hdfs import HDFSHandler from syncmaster.worker.handlers.file.s3 import S3Handler +# TODO: remove global import of WorkerSettings +from syncmaster.worker.settings import WorkerSettings as Settings + logger = logging.getLogger(__name__) @@ -70,10 +72,8 @@ def __init__( source_auth_data: dict, target_connection: Connection, target_auth_data: dict, - settings: Settings, ): self.run = run - self.settings = settings self.source_handler = self.get_handler( connection_data=source_connection.data, transfer_params=run.transfer.source_params, @@ -86,8 +86,7 @@ def __init__( ) def perform_transfer(self) -> None: - spark = self.settings.CREATE_SPARK_SESSION_FUNCTION( - settings=self.settings, + spark = Settings().CREATE_SPARK_SESSION_FUNCTION( run=self.run, source=self.source_handler.connection_dto, target=self.target_handler.connection_dto, diff --git a/syncmaster/settings/worker/__init__.py b/syncmaster/worker/settings/__init__.py similarity index 54% rename from syncmaster/settings/worker/__init__.py rename to syncmaster/worker/settings/__init__.py index f5757100..edb761ea 100644 --- a/syncmaster/settings/worker/__init__.py +++ b/syncmaster/worker/settings/__init__.py @@ -1,11 +1,11 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 -from pydantic import BaseModel, Field +from pydantic.types import ImportString -from syncmaster.settings.log import LoggingSettings +from syncmaster.settings import SyncmasterSettings -class WorkerSettings(BaseModel): +class WorkerSettings(SyncmasterSettings): """Celery worker settings. Examples @@ -16,9 +16,7 @@ class WorkerSettings(BaseModel): SYNCMASTER__WORKER__LOGGING__PRESET=colored """ - logging: LoggingSettings = Field( - default_factory=LoggingSettings, - description=":ref:`Logging settings `", - ) CORRELATION_CELERY_HEADER_ID: str = "CORRELATION_CELERY_HEADER_ID" LOG_URL_TEMPLATE: str = "" + + CREATE_SPARK_SESSION_FUNCTION: ImportString = "syncmaster.worker.spark.get_worker_spark_session" diff --git a/syncmaster/worker/spark.py b/syncmaster/worker/spark.py index 0a16edf1..9bfc46fd 100644 --- a/syncmaster/worker/spark.py +++ b/syncmaster/worker/spark.py @@ -7,7 +7,6 @@ from syncmaster.db.models import Run from syncmaster.dto.connections import ConnectionDTO -from syncmaster.settings import Settings if TYPE_CHECKING: from pyspark.sql import SparkSession @@ -16,7 +15,6 @@ def get_worker_spark_session( - settings: Settings, run: Run, source: ConnectionDTO, target: ConnectionDTO, diff --git a/syncmaster/worker/transfer.py b/syncmaster/worker/transfer.py index 332296a9..35c53cfe 100644 --- a/syncmaster/worker/transfer.py +++ b/syncmaster/worker/transfer.py @@ -11,18 +11,19 @@ from sqlalchemy.orm import Session, selectinload from syncmaster.backend.middlewares.logging import setup_logging +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Run, Status, Transfer from syncmaster.db.repositories.utils import decrypt_auth_data from syncmaster.exceptions.run import RunNotFoundError -from syncmaster.settings import Settings from syncmaster.worker.base import WorkerTask from syncmaster.worker.config import celery from syncmaster.worker.controller import TransferController +from syncmaster.worker.settings import WorkerSettings as WorkerSettings logger = get_task_logger(__name__) -# TODO: configure correlation id from settings: settings.CORRELATION_CELERY_HEADER_ID -CORRELATION_CELERY_HEADER_ID = "CORRELATION_CELERY_HEADER_ID" +# TODO: remove global import of WorkerSettings +CORRELATION_CELERY_HEADER_ID = WorkerSettings().CORRELATION_CELERY_HEADER_ID @celery.task(name="run_transfer_task", bind=True, track_started=True) @@ -69,7 +70,6 @@ def run_transfer(session: Session, run_id: int, settings: Settings): target_connection=run.transfer.target_connection, source_auth_data=source_auth_data, target_auth_data=target_auth_data, - settings=settings, ) controller.perform_transfer() except Exception: @@ -86,8 +86,7 @@ def run_transfer(session: Session, run_id: int, settings: Settings): @after_setup_logger.connect def setup_loggers(*args, **kwargs): - # TODO: remove calling Settings - setup_logging(Settings().worker.logging.get_log_config_path()) + setup_logging(Settings().logging.get_log_config_path()) @before_task_publish.connect() diff --git a/tests/conftest.py b/tests/conftest.py index 8304f8d7..7e213f9c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ import asyncio import logging import os +import time from collections.abc import AsyncGenerator from pathlib import Path @@ -16,8 +17,10 @@ ) from syncmaster.backend import application_factory +from syncmaster.backend.settings import BackendSettings as Settings +from syncmaster.backend.settings.auth.jwt import JWTSettings +from syncmaster.backend.utils.jwt import sign_jwt from syncmaster.db.models import Base -from syncmaster.settings import Settings from tests.mocks import UserTestRoles from tests.settings import TestSettings from tests.utils import prepare_new_database, run_async_migrations @@ -36,6 +39,23 @@ ] +@pytest.fixture +def access_token_settings(settings: Settings) -> JWTSettings: + return JWTSettings.parse_obj(settings.auth.access_token) + + +@pytest.fixture +def access_token_factory(access_token_settings: JWTSettings): + def _generate_access_token(user_id): + return sign_jwt( + {"user_id": user_id, "exp": time.time() + 1000}, + access_token_settings.secret_key.get_secret_value(), + access_token_settings.security_algorithm, + ) + + return _generate_access_token + + @pytest.fixture(scope="session") def event_loop(): policy = asyncio.get_event_loop_policy() diff --git a/tests/test_integration/test_run_transfer/conftest.py b/tests/test_integration/test_run_transfer/conftest.py index 4d0aa3a2..80e8f242 100644 --- a/tests/test_integration/test_run_transfer/conftest.py +++ b/tests/test_integration/test_run_transfer/conftest.py @@ -24,7 +24,7 @@ from pytest import FixtureRequest from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.backend.api.v1.auth.utils import sign_jwt +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Group from syncmaster.dto.connections import ( HDFSConnectionDTO, @@ -33,7 +33,6 @@ PostgresConnectionDTO, S3ConnectionDTO, ) -from syncmaster.settings import Settings from tests.mocks import MockUser, UserTestRoles from tests.resources.file_df_connection.test_data import data from tests.settings import TestSettings @@ -563,6 +562,7 @@ def target_file_format(request: FixtureRequest): async def group_owner( settings: Settings, session: AsyncSession, + access_token_factory, ): user = await create_user( session=session, @@ -570,9 +570,10 @@ async def group_owner( is_active=True, ) + token = access_token_factory(user.id) yield MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=token, role=UserTestRoles.Owner, ) diff --git a/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py b/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py index d88f4fdc..60191ea2 100644 --- a/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py +++ b/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py @@ -3,9 +3,8 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.backend.api.v1.auth.utils import sign_jwt +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import ( MockConnection, MockCredentials, @@ -31,6 +30,7 @@ async def group_transfer_integration_mock( settings: Settings, create_connection_data: dict | None, create_transfer_data: dict | None, + access_token_factory, ) -> AsyncGenerator[MockTransfer, None]: group_owner = await create_user( session=session, @@ -60,7 +60,7 @@ async def group_transfer_integration_mock( username=username, group_id=group.id, session=session, - settings=settings, + access_token_factory=access_token_factory, ), ) @@ -69,12 +69,11 @@ async def group_transfer_integration_mock( group=group, owner=MockUser( user=group_owner, - auth_token=sign_jwt(group_owner.id, settings), + auth_token=access_token_factory(group_owner.id), role=UserTestRoles.Owner, ), members=members, ) - source_connection = await create_connection( session=session, name="group_transfer_source_connection", diff --git a/tests/test_integration/test_scheduler/test_scheduler.py b/tests/test_integration/test_scheduler/test_scheduler.py index cbf6ad36..ab18d7ed 100644 --- a/tests/test_integration/test_scheduler/test_scheduler.py +++ b/tests/test_integration/test_scheduler/test_scheduler.py @@ -5,9 +5,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Run, Status from syncmaster.scheduler import TransferFetcher, TransferJobManager -from syncmaster.settings import Settings from tests.mocks import MockTransfer pytestmark = [pytest.mark.asyncio, pytest.mark.worker, pytest.mark.scheduler_integration] diff --git a/tests/test_unit/conftest.py b/tests/test_unit/conftest.py index 94bc7b61..1fde63c4 100644 --- a/tests/test_unit/conftest.py +++ b/tests/test_unit/conftest.py @@ -1,13 +1,11 @@ import secrets -from collections.abc import AsyncGenerator import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.backend.api.v1.auth.utils import sign_jwt +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Queue, User, UserGroup from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import ( MockConnection, MockCredentials, @@ -31,7 +29,7 @@ async def create_group_member( username: str, group_id: int, session: AsyncSession, - settings: Settings, + access_token_factory, ) -> MockUser: role_name = username.split("_")[-1] @@ -58,9 +56,10 @@ async def create_group_member( ), ) + token = access_token_factory(user.id) return MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=token, role=role, ) @@ -82,51 +81,55 @@ async def add_user_to_group( @pytest_asyncio.fixture -async def superuser(session: AsyncSession, settings: Settings): +async def superuser(session: AsyncSession, access_token_factory) -> MockUser: async with create_user_cm(session, username="superuser", is_active=True, is_superuser=True) as user: + token = access_token_factory(user.id) yield MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=token, role=UserTestRoles.Superuser, ) @pytest_asyncio.fixture -async def simple_user(session: AsyncSession, settings: Settings): +async def simple_user(session: AsyncSession, access_token_factory) -> MockUser: async with create_user_cm(session, username="simple_user", is_active=True) as user: + token = access_token_factory(user.id) yield MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=token, role=UserTestRoles.Developer, ) @pytest_asyncio.fixture -async def inactive_user(session: AsyncSession, settings: Settings): +async def inactive_user(session: AsyncSession, access_token_factory) -> MockUser: async with create_user_cm(session, username="inactive_user") as user: + access_token_factory(user.id) yield MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=access_token_factory(user.id), role=UserTestRoles.Developer, ) @pytest_asyncio.fixture -async def deleted_user(session: AsyncSession, settings: Settings): +async def deleted_user(session: AsyncSession, access_token_factory) -> MockUser: async with create_user_cm( session, username="deleted_user", is_deleted=True, ) as user: + token = access_token_factory(user.id) yield MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=token, role=UserTestRoles.Developer, ) @pytest_asyncio.fixture -async def user_with_many_roles(session: AsyncSession, settings: Settings, simple_user: MockUser) -> MockUser: +async def user_with_many_roles(session: AsyncSession, simple_user: MockUser, access_token_factory) -> MockUser: user = await create_user( session=session, username="multi_role_user", @@ -155,9 +158,10 @@ async def user_with_many_roles(session: AsyncSession, settings: Settings, simple await session.commit() + token = access_token_factory(user.id) mock_user = MockUser( user=user, - auth_token=sign_jwt(user.id, settings), + auth_token=token, ) yield mock_user @@ -170,7 +174,7 @@ async def user_with_many_roles(session: AsyncSession, settings: Settings, simple @pytest_asyncio.fixture -async def empty_group(session: AsyncSession, settings) -> AsyncGenerator[MockGroup, None]: +async def empty_group(session: AsyncSession, access_token_factory) -> MockGroup: owner = await create_user( session=session, username="empty_group_owner", @@ -181,11 +185,12 @@ async def empty_group(session: AsyncSession, settings) -> AsyncGenerator[MockGro name="empty_group", owner_id=owner.id, ) + token = access_token_factory(owner.id) yield MockGroup( group=group, owner=MockUser( user=owner, - auth_token=sign_jwt(owner.id, settings), + auth_token=token, role=UserTestRoles.Owner, ), members=[], @@ -196,7 +201,7 @@ async def empty_group(session: AsyncSession, settings) -> AsyncGenerator[MockGro @pytest_asyncio.fixture -async def group(session: AsyncSession, settings: Settings) -> AsyncGenerator[MockGroup, None]: +async def group(session: AsyncSession, access_token_factory) -> MockGroup: owner = await create_user( session=session, username="notempty_group_owner", @@ -215,16 +220,17 @@ async def group(session: AsyncSession, settings: Settings) -> AsyncGenerator[Moc username=username, group_id=group.id, session=session, - settings=settings, + access_token_factory=access_token_factory, ), ) await session.commit() + token = access_token_factory(owner.id) yield MockGroup( group=group, owner=MockUser( user=owner, - auth_token=sign_jwt(owner.id, settings), + auth_token=token, role=UserTestRoles.Owner, ), members=members, @@ -239,8 +245,8 @@ async def group(session: AsyncSession, settings: Settings) -> AsyncGenerator[Moc @pytest_asyncio.fixture async def mock_group( session: AsyncSession, - settings: Settings, -) -> AsyncGenerator[MockGroup, None]: + access_token_factory, +): group_owner = await create_user( session=session, username=f"{secrets.token_hex(5)}_group_connection_owner", @@ -263,7 +269,7 @@ async def mock_group( username=username, group_id=group.id, session=session, - settings=settings, + access_token_factory=access_token_factory, ), ) @@ -273,7 +279,7 @@ async def mock_group( group=group, owner=MockUser( user=group_owner, - auth_token=sign_jwt(group_owner.id, settings), + auth_token=access_token_factory(group_owner.id), role=UserTestRoles.Owner, ), members=members, @@ -288,9 +294,8 @@ async def mock_group( @pytest_asyncio.fixture async def group_queue( session: AsyncSession, - settings: Settings, mock_group: MockGroup, -) -> AsyncGenerator[Queue, None]: +) -> Queue: queue = await create_queue( session=session, name=f"{secrets.token_hex(5)}_test_queue", @@ -306,7 +311,6 @@ async def group_queue( @pytest_asyncio.fixture async def mock_queue( session: AsyncSession, - settings: Settings, group: MockGroup, ) -> Queue: queue = await create_queue( diff --git a/tests/test_unit/test_connections/connection_fixtures/group_connection_fixture.py b/tests/test_unit/test_connections/connection_fixtures/group_connection_fixture.py index 73638b60..96fcd088 100644 --- a/tests/test_unit/test_connections/connection_fixtures/group_connection_fixture.py +++ b/tests/test_unit/test_connections/connection_fixtures/group_connection_fixture.py @@ -3,9 +3,8 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.backend.api.v1.auth.utils import sign_jwt +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import ( MockConnection, MockCredentials, @@ -28,6 +27,7 @@ async def group_connection( settings: Settings, create_connection_data: dict | None, create_connection_auth_data: dict | None, + access_token_factory, ) -> MockConnection: group_owner = await create_user( session=session, @@ -50,7 +50,7 @@ async def group_connection( username=username, group_id=group.id, session=session, - settings=settings, + access_token_factory=access_token_factory, ), ) @@ -68,7 +68,7 @@ async def group_connection( connection_id=connection.id, auth_data=create_connection_auth_data, ) - + token = access_token_factory(group_owner.id) yield MockConnection( credentials=MockCredentials( value=decrypt_auth_data(credentials.value, settings=settings), @@ -79,7 +79,7 @@ async def group_connection( group=group, owner=MockUser( user=group_owner, - auth_token=sign_jwt(group_owner.id, settings), + auth_token=token, role=UserTestRoles.Owner, ), members=members, diff --git a/tests/test_unit/test_connections/test_copy_connection.py b/tests/test_unit/test_connections/test_copy_connection.py index 3d2e98f0..eb46beca 100644 --- a/tests/test_unit/test_connections/test_copy_connection.py +++ b/tests/test_unit/test_connections/test_copy_connection.py @@ -5,9 +5,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockConnection, MockGroup, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] diff --git a/tests/test_unit/test_connections/test_create_connection.py b/tests/test_unit/test_connections/test_create_connection.py index 14cb2f73..3efc280f 100644 --- a/tests/test_unit/test_connections/test_create_connection.py +++ b/tests/test_unit/test_connections/test_create_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockConnection, MockGroup, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_clickhouse_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_clickhouse_connection.py index 3caba257..f87996df 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_clickhouse_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_clickhouse_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.clickhouse] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_hdfs_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_hdfs_connection.py index 290ff99b..519ba44b 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_hdfs_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_hdfs_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.hdfs] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_hive_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_hive_connection.py index aee3c3de..9bbaeb02 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_hive_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_hive_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.hive] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_mssql_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_mssql_connection.py index 62d4f3e1..b04ec3f7 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_mssql_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_mssql_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.mssql] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_mysql_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_mysql_connection.py index 5ce60c8e..a9e5e03d 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_mysql_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_mysql_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.mysql] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_oracle_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_oracle_connection.py index 6658fa86..2e566bdb 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_oracle_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_oracle_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.oracle] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_postgres_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_postgres_connection.py index a67e7104..583b631b 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_postgres_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_postgres_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.postgres] diff --git a/tests/test_unit/test_connections/test_db_connection/test_create_s3_connection.py b/tests/test_unit/test_connections/test_db_connection/test_create_s3_connection.py index c58b69bf..0acc8346 100644 --- a/tests/test_unit/test_connections/test_db_connection/test_create_s3_connection.py +++ b/tests/test_unit/test_connections/test_db_connection/test_create_s3_connection.py @@ -3,9 +3,9 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import AuthData, Connection from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import MockGroup, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.s3] diff --git a/tests/test_unit/test_connections/test_read_connections.py b/tests/test_unit/test_connections/test_read_connections.py index 54eb6826..9d156d1c 100644 --- a/tests/test_unit/test_connections/test_read_connections.py +++ b/tests/test_unit/test_connections/test_read_connections.py @@ -6,7 +6,7 @@ from httpx import AsyncClient from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.settings import Settings +from syncmaster.backend.settings import BackendSettings as Settings from tests.mocks import MockConnection, MockGroup, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] diff --git a/tests/test_unit/test_runs/run_fixtures/run_fixture.py b/tests/test_unit/test_runs/run_fixtures/run_fixture.py index b387cc8f..085d96ab 100644 --- a/tests/test_unit/test_runs/run_fixtures/run_fixture.py +++ b/tests/test_unit/test_runs/run_fixtures/run_fixture.py @@ -3,8 +3,6 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.backend.api.v1.auth.utils import sign_jwt -from syncmaster.settings import Settings from tests.mocks import ( MockConnection, MockGroup, @@ -25,7 +23,7 @@ @pytest_asyncio.fixture -async def group_run(session: AsyncSession, settings: Settings) -> MockRun: +async def group_run(session: AsyncSession, access_token_factory) -> MockRun: group_owner = await create_user(session=session, username="group_owner_connection", is_active=True) group = await create_group(session=session, name="connection_group", owner_id=group_owner.id) members: list[MockUser] = [] @@ -39,15 +37,16 @@ async def group_run(session: AsyncSession, settings: Settings) -> MockRun: username=username, group_id=group.id, session=session, - settings=settings, + access_token_factory=access_token_factory, ), ) await session.commit() + token = access_token_factory(group_owner.id) mock_group = MockGroup( group=group, owner=MockUser( user=group_owner, - auth_token=sign_jwt(group_owner.id, settings), + auth_token=token, role=UserTestRoles.Owner, ), members=members, diff --git a/tests/test_unit/test_runs/test_create_run.py b/tests/test_unit/test_runs/test_create_run.py index 473c6190..70c48ab7 100644 --- a/tests/test_unit/test_runs/test_create_run.py +++ b/tests/test_unit/test_runs/test_create_run.py @@ -6,7 +6,6 @@ from sqlalchemy.ext.asyncio import AsyncSession from syncmaster.db.models import Run, RunType, Status -from syncmaster.settings import Settings from tests.mocks import MockGroup, MockTransfer, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] @@ -137,13 +136,9 @@ async def test_superuser_can_create_run( superuser: MockUser, group_transfer: MockTransfer, session: AsyncSession, - settings: Settings, mocker, ) -> None: # Arrange - settings.worker.LOG_URL_TEMPLATE = ( - "https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}" - ) mock_send_task = mocker.patch("syncmaster.worker.config.celery.send_task") mock_to_thread = mocker.patch("asyncio.to_thread", new_callable=AsyncMock) diff --git a/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py index 3e0169b7..0d6601ff 100644 --- a/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py +++ b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py @@ -1,7 +1,7 @@ import pytest +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.scheduler.transfer_fetcher import TransferFetcher -from syncmaster.settings import Settings @pytest.fixture diff --git a/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py index 3ee2e2f5..be1647bd 100644 --- a/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py +++ b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py @@ -4,8 +4,8 @@ from sqlalchemy import text from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.scheduler.transfer_job_manager import TransferJobManager -from syncmaster.settings import Settings @pytest_asyncio.fixture diff --git a/tests/test_unit/test_transfers/transfer_fixtures/transfer_fixture.py b/tests/test_unit/test_transfers/transfer_fixtures/transfer_fixture.py index d89bf396..bf0912f5 100644 --- a/tests/test_unit/test_transfers/transfer_fixtures/transfer_fixture.py +++ b/tests/test_unit/test_transfers/transfer_fixtures/transfer_fixture.py @@ -4,9 +4,8 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.backend.api.v1.auth.utils import sign_jwt +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.repositories.utils import decrypt_auth_data -from syncmaster.settings import Settings from tests.mocks import ( MockConnection, MockCredentials, @@ -32,6 +31,7 @@ async def group_transfer( settings: Settings, create_connection_data: dict | None, create_transfer_data: dict | None, + access_token_factory, ) -> AsyncGenerator[MockTransfer, None]: group_owner = await create_user( session=session, @@ -61,16 +61,17 @@ async def group_transfer( username=username, group_id=group.id, session=session, - settings=settings, + access_token_factory=access_token_factory, ), ) await session.commit() + token = access_token_factory(group_owner.id) mock_group = MockGroup( group=group, owner=MockUser( user=group_owner, - auth_token=sign_jwt(group_owner.id, settings), + auth_token=token, role=UserTestRoles.Owner, ), members=members, diff --git a/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_duplicate_fixture.py b/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_duplicate_fixture.py index 5de2e7d6..947c6266 100644 --- a/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_duplicate_fixture.py +++ b/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_duplicate_fixture.py @@ -3,8 +3,8 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Queue -from syncmaster.settings import Settings from tests.mocks import MockTransfer from tests.test_unit.utils import create_connection, create_credentials, create_transfer diff --git a/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_user_role_fixtures.py b/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_user_role_fixtures.py index 0a833f0e..c4d4c6b6 100644 --- a/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_user_role_fixtures.py +++ b/tests/test_unit/test_transfers/transfer_fixtures/transfer_with_user_role_fixtures.py @@ -3,8 +3,8 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Connection, Queue -from syncmaster.settings import Settings from tests.mocks import MockTransfer, UserTestRoles from tests.test_unit.conftest import add_user_to_group from tests.test_unit.utils import create_connection, create_credentials, create_transfer diff --git a/tests/test_unit/test_transfers/transfer_fixtures/transfers_fixture.py b/tests/test_unit/test_transfers/transfer_fixtures/transfers_fixture.py index 9790372d..e83b9acc 100644 --- a/tests/test_unit/test_transfers/transfer_fixtures/transfers_fixture.py +++ b/tests/test_unit/test_transfers/transfer_fixtures/transfers_fixture.py @@ -3,9 +3,9 @@ import pytest_asyncio from sqlalchemy.ext.asyncio import AsyncSession +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.repositories.utils import decrypt_auth_data from syncmaster.schemas.v1.connection_types import ConnectionType -from syncmaster.settings import Settings from tests.mocks import MockConnection, MockCredentials, MockTransfer from tests.test_unit.utils import create_connection, create_credentials, create_transfer diff --git a/tests/test_unit/tests_users/test_read_user.py b/tests/test_unit/tests_users/test_read_user.py index 756faf22..b8c8619f 100644 --- a/tests/test_unit/tests_users/test_read_user.py +++ b/tests/test_unit/tests_users/test_read_user.py @@ -55,7 +55,7 @@ async def test_get_user_inactive(client: AsyncClient, simple_user: MockUser, ina assert response.json() == { "error": { "code": "forbidden", - "message": "Inactive user", + "message": "You have no power here", "details": None, }, } @@ -95,7 +95,7 @@ async def test_get_current_user_inactive(client: AsyncClient, inactive_user: Moc assert response.json() == { "error": { "code": "forbidden", - "message": "Inactive user", + "message": "You have no power here", "details": None, }, } diff --git a/tests/test_unit/tests_users/test_read_users.py b/tests/test_unit/tests_users/test_read_users.py index 3f988d67..c120f0df 100644 --- a/tests/test_unit/tests_users/test_read_users.py +++ b/tests/test_unit/tests_users/test_read_users.py @@ -54,7 +54,7 @@ async def test_get_users_inactive(client: AsyncClient, inactive_user: MockUser): assert response.json() == { "error": { "code": "forbidden", - "message": "Inactive user", + "message": "You have no power here", "details": None, }, } diff --git a/tests/test_unit/utils.py b/tests/test_unit/utils.py index b3f8fde2..78554bd7 100644 --- a/tests/test_unit/utils.py +++ b/tests/test_unit/utils.py @@ -11,6 +11,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import joinedload +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import ( AuthData, Connection, @@ -23,7 +24,6 @@ ) from syncmaster.db.repositories.utils import encrypt_auth_data from syncmaster.schemas.v1.transfers import ReadFullTransferSchema -from syncmaster.settings import Settings @asynccontextmanager @@ -33,9 +33,21 @@ async def create_user_cm( is_active: bool = False, is_superuser: bool = False, is_deleted: bool = False, + email: str = None, + first_name: str = None, + middle_name: str = None, + last_name: str = None, ) -> AsyncGenerator[User, None]: + email = email or f"{username}@user.user" + first_name = first_name or f"{username}_first" + middle_name = middle_name or f"{username}_middle" + last_name = last_name or f"{username}_last" u = User( username=username, + email=email, + first_name=first_name, + middle_name=middle_name, + last_name=last_name, is_active=is_active, is_superuser=is_superuser, is_deleted=is_deleted, @@ -54,9 +66,21 @@ async def create_user( is_active: bool = False, is_superuser: bool = False, is_deleted: bool = False, + email: str = None, + first_name: str = None, + middle_name: str = None, + last_name: str = None, ) -> User: + email = email or f"{username}@user.user" + first_name = first_name or f"{username}_first" + middle_name = middle_name or f"{username}_middle" + last_name = last_name or f"{username}_last" u = User( username=username, + email=email, + first_name=first_name, + middle_name=middle_name, + last_name=last_name, is_active=is_active, is_superuser=is_superuser, is_deleted=is_deleted, diff --git a/tests/utils.py b/tests/utils.py index 77a06116..cdda8747 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -17,8 +17,8 @@ create_async_engine, ) +from syncmaster.backend.settings import BackendSettings as Settings from syncmaster.db.models import Status -from syncmaster.settings import Settings logger = logging.getLogger(__name__)