diff --git a/.github/workflows/scheduler-tests.yml b/.github/workflows/scheduler-tests.yml new file mode 100644 index 00000000..db1e1bc7 --- /dev/null +++ b/.github/workflows/scheduler-tests.yml @@ -0,0 +1,79 @@ +name: Scheduler Tests +on: + workflow_call: + +env: + DEFAULT_PYTHON: '3.12' + +jobs: + tests: + name: Run Scheduler tests + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Cache jars + uses: actions/cache@v4 + with: + path: ./cached_jars + key: ${{ runner.os }}-python-${{ env.DEFAULT_PYTHON }}-test-scheduler + restore-keys: | + ${{ runner.os }}-python-${{ env.DEFAULT_PYTHON }}-test-scheduler + ${{ runner.os }}-python- + + - name: Build Worker Image + uses: docker/build-push-action@v6 + with: + context: . + tags: mtsrus/syncmaster-worker:${{ github.sha }} + target: test + file: docker/Dockerfile.worker + load: true + cache-from: mtsrus/syncmaster-worker:develop + + - name: Docker compose up + run: | + docker compose -f docker-compose.test.yml --profile all down -v --remove-orphans + docker compose -f docker-compose.test.yml --profile worker up -d --wait --wait-timeout 200 + env: + WORKER_IMAGE_TAG: ${{ github.sha }} + + # This is important, as coverage is exported after receiving SIGTERM + - name: Run Scheduler Tests + run: | + docker compose -f ./docker-compose.test.yml --profile worker exec -T worker coverage run -m pytest -vvv -s -m "worker and scheduler_integration" + + - name: Dump worker logs on failure + if: failure() + uses: jwalton/gh-docker-logs@v2 + with: + images: mtsrus/syncmaster-worker + dest: ./logs + + - name: Shutdown + if: always() + run: | + docker compose -f docker-compose.test.yml --profile all down -v --remove-orphans + + - name: Upload worker logs + uses: actions/upload-artifact@v4 + if: failure() + with: + name: worker-logs-scheduler + path: logs/* + + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-scheduler + path: reports/* + # https://github.com/actions/upload-artifact/issues/602 + include-hidden-files: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4c66a009..eb978bb6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,7 +2,7 @@ name: Run All Tests on: push: branches: - - develop + - feature/DOP-21350 pull_request: branches-ignore: - master @@ -32,6 +32,10 @@ jobs: name: S3 tests uses: ./.github/workflows/s3-tests.yml + scheduler_tests: + name: Scheduler tests + uses: ./.github/workflows/scheduler-tests.yml + unit_tests: name: Unit tests uses: ./.github/workflows/unit-test.yml diff --git a/.github/workflows/unit-test.yml b/.github/workflows/unit-test.yml index fddec5e4..b662bb9d 100644 --- a/.github/workflows/unit-test.yml +++ b/.github/workflows/unit-test.yml @@ -56,7 +56,7 @@ jobs: run: | source .env.local poetry run python -m syncmaster.db.migrations upgrade head - poetry run coverage run -m pytest -vvv -s -m backend + poetry run coverage run -m pytest -vvv -s -m "backend or scheduler" - name: Shutdown if: always() diff --git a/docker-compose.test.yml b/docker-compose.test.yml index 7ad4250b..1e304566 100644 --- a/docker-compose.test.yml +++ b/docker-compose.test.yml @@ -51,6 +51,25 @@ services: condition: service_healthy profiles: [backend, all] + scheduler: + image: mtsrus/syncmaster-backend:${BACKEND_IMAGE_TAG:-test} + restart: unless-stopped + build: + dockerfile: docker/Dockerfile.backend + context: . + target: test + env_file: .env.docker + volumes: + - ./syncmaster:/app/syncmaster + - ./pyproject.toml:/app/pyproject.toml + depends_on: + db: + condition: service_healthy + rabbitmq: + condition: service_healthy + entrypoint: [python, -m, syncmaster.scheduler] + profiles: [scheduler, all] + worker: image: mtsrus/syncmaster-worker:${WORKER_IMAGE_TAG:-test} restart: unless-stopped @@ -59,6 +78,7 @@ services: context: . target: test command: --loglevel=info -Q test_queue + entrypoint: [python, -m, celery, -A, tests.test_integration.celery_test, worker, --max-tasks-per-child=1] env_file: .env.docker volumes: - ./syncmaster:/app/syncmaster @@ -71,7 +91,7 @@ services: condition: service_healthy rabbitmq: condition: service_healthy - profiles: [worker, s3, oracle, hdfs, hive, all] + profiles: [worker, scheduler, s3, oracle, hdfs, hive, all] test-postgres: image: postgres diff --git a/docs/changelog/next_release/114.feature.rst b/docs/changelog/next_release/114.feature.rst new file mode 100644 index 00000000..d7e2072f --- /dev/null +++ b/docs/changelog/next_release/114.feature.rst @@ -0,0 +1 @@ +Implement a scheduler to run celery tasks on a schedule. This can be done via the `Transfer` table by fields `is_scheduled` and `schedule` (cron-like expression). The Run model now has a `type` field with options `MANUAL` and `SCHEDULED`. \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 92ae16ed..96f0f3bc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "alembic" -version = "1.13.3" +version = "1.14.0" description = "A database migration tool for SQLAlchemy." optional = true python-versions = ">=3.8" files = [ - {file = "alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e"}, - {file = "alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2"}, + {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, + {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, ] [package.dependencies] @@ -32,13 +32,13 @@ tz = ["backports.zoneinfo"] [[package]] name = "amqp" -version = "5.2.0" +version = "5.3.1" description = "Low-level AMQP client for Python (fork of amqplib)." optional = true python-versions = ">=3.6" files = [ - {file = "amqp-5.2.0-py3-none-any.whl", hash = "sha256:827cb12fb0baa892aad844fd95258143bce4027fdac4fccddbc43330fd281637"}, - {file = "amqp-5.2.0.tar.gz", hash = "sha256:a1ecff425ad063ad42a486c902807d1482311481c8ad95a72694b2975e75f7fd"}, + {file = "amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2"}, + {file = "amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432"}, ] [package.dependencies] @@ -75,6 +75,34 @@ doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] +[[package]] +name = "apscheduler" +version = "3.10.4" +description = "In-process task scheduler with Cron-like capabilities" +optional = true +python-versions = ">=3.6" +files = [ + {file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"}, + {file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"}, +] + +[package.dependencies] +pytz = "*" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.dev0 || >=4.dev0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + [[package]] name = "argon2-cffi" version = "23.1.0" @@ -874,13 +902,13 @@ gmpy2 = ["gmpy2"] [[package]] name = "etl-entities" -version = "2.3.1" +version = "2.4.0" description = "ETL Entities lib for onETL" optional = false python-versions = ">=3.7" files = [ - {file = "etl_entities-2.3.1-py3-none-any.whl", hash = "sha256:a5513bf4735ec1bf113a22285c04b4b9f42fc7dc4b42507cb72e44ab048b14bb"}, - {file = "etl_entities-2.3.1.tar.gz", hash = "sha256:81ba23b732cdae5b36e5b5a0e287eece8f1b5cf34f1d728f905b9c7838e6e35a"}, + {file = "etl_entities-2.4.0-py3-none-any.whl", hash = "sha256:44fcbeb790003124cc1fa7ddd226fadbd979f737995519d5fc6d5a5d8e634b29"}, + {file = "etl_entities-2.4.0.tar.gz", hash = "sha256:7bbf28a0d2ad2bff4fac954486f2afeda88e3171e37e1e0e7de18e40c797db93"}, ] [package.dependencies] @@ -904,13 +932,13 @@ files = [ [[package]] name = "faker" -version = "30.8.1" +version = "30.10.0" description = "Faker is a Python package that generates fake data for you." optional = false python-versions = ">=3.8" files = [ - {file = "Faker-30.8.1-py3-none-any.whl", hash = "sha256:4f7f133560b9d4d2a915581f4ba86f9a6a83421b89e911f36c4c96cff58135a5"}, - {file = "faker-30.8.1.tar.gz", hash = "sha256:93e8b70813f76d05d98951154681180cb795cfbcff3eced7680d963bcc0da2a9"}, + {file = "Faker-30.10.0-py3-none-any.whl", hash = "sha256:5f05ee92ddf0e1736d95dca41b2a16ee06d987b736fa4ddecdb047abf2e9024b"}, + {file = "faker-30.10.0.tar.gz", hash = "sha256:c2e627d3becec67f7a45400d3670018b5abb3f0728b7dfaa06c135b7df1ce3fb"}, ] [package.dependencies] @@ -919,13 +947,13 @@ typing-extensions = "*" [[package]] name = "fastapi" -version = "0.115.4" +version = "0.115.5" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" optional = true python-versions = ">=3.8" files = [ - {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, - {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, + {file = "fastapi-0.115.5-py3-none-any.whl", hash = "sha256:596b95adbe1474da47049e802f9a65ab2ffa9c2b07e7efee70eb8a66c9f2f796"}, + {file = "fastapi-0.115.5.tar.gz", hash = "sha256:0e7a4d0dc0d01c68df21887cce0945e72d3c48b9f4f79dfe7a7d53aa08fbb289"}, ] [package.dependencies] @@ -1052,49 +1080,49 @@ sphinx-basic-ng = ">=1.0.0.beta2" [[package]] name = "gevent" -version = "24.10.3" +version = "24.11.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.9" files = [ - {file = "gevent-24.10.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:d7a1ad0f2da582f5bd238bca067e1c6c482c30c15a6e4d14aaa3215cbb2232f3"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4e526fdc279c655c1e809b0c34b45844182c2a6b219802da5e411bd2cf5a8ad"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:57a5c4e0bdac482c5f02f240d0354e61362df73501ef6ebafce8ef635cad7527"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d67daed8383326dc8b5e58d88e148d29b6b52274a489e383530b0969ae7b9cb9"}, - {file = "gevent-24.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e24ffea72e27987979c009536fd0868e52239b44afe6cf7135ce8aafd0f108e"}, - {file = "gevent-24.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c1d80090485da1ea3d99205fe97908b31188c1f4857f08b333ffaf2de2e89d18"}, - {file = "gevent-24.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0c129f81d60cda614acb4b0c5731997ca05b031fb406fcb58ad53a7ade53b13"}, - {file = "gevent-24.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:26ca7a6b42d35129617025ac801135118333cad75856ffc3217b38e707383eba"}, - {file = "gevent-24.10.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:68c3a0d8402755eba7f69022e42e8021192a721ca8341908acc222ea597029b6"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d850a453d66336272be4f1d3a8126777f3efdaea62d053b4829857f91e09755"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e58ee3723f1fbe07d66892f1caa7481c306f653a6829b6fd16cb23d618a5915"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b52382124eca13135a3abe4f65c6bd428656975980a48e51b17aeab68bdb14db"}, - {file = "gevent-24.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ca2266e08f43c0e22c028801dff7d92a0b102ef20e4caeb6a46abfb95f6a328"}, - {file = "gevent-24.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d758f0d4dbf32502ec87bb9b536ca8055090a16f8305f0ada3ce6f34e70f2fd7"}, - {file = "gevent-24.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0de6eb3d55c03138fda567d9bfed28487ce5d0928c5107549767a93efdf2be26"}, - {file = "gevent-24.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:385710355eadecdb70428a5ae3e7e5a45dcf888baa1426884588be9d25ac4290"}, - {file = "gevent-24.10.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ad8fb70aa0ebc935729c9699ac31b210a49b689a7b27b7ac9f91676475f3f53"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f18689f7a70d2ed0e75bad5036ec3c89690a493d4cfac8d7cdb258ac04b132bd"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f4f171d4d2018170454d84c934842e1b5f6ce7468ba298f6e7f7cff15000a3"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7021e26d70189b33c27173d4173f27bf4685d6b6f1c0ea50e5335f8491cb110c"}, - {file = "gevent-24.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34aea15f9c79f27a8faeaa361bc1e72c773a9b54a1996a2ec4eefc8bcd59a824"}, - {file = "gevent-24.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8af65a4d4feaec6042c666d22c322a310fba3b47e841ad52f724b9c3ce5da48e"}, - {file = "gevent-24.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:89c4115e3f5ada55f92b61701a46043fe42f702b5af863b029e4c1a76f6cc2d4"}, - {file = "gevent-24.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:1ce6dab94c0b0d24425ba55712de2f8c9cb21267150ca63f5bb3a0e1f165da99"}, - {file = "gevent-24.10.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:f147e38423fbe96e8731f60a63475b3d2cab2f3d10578d8ee9d10c507c58a2ff"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e6984ec96fc95fd67488555c38ece3015be1f38b1bcceb27b7d6c36b343008"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:051b22e2758accfddb0457728bfc9abf8c3f2ce6bca43f1ff6e07b5ed9e49bf4"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb5edb6433764119a664bbb148d2aea9990950aa89cc3498f475c2408d523ea3"}, - {file = "gevent-24.10.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce417bcaaab496bc9c77f75566531e9d93816262037b8b2dbb88b0fdcd66587c"}, - {file = "gevent-24.10.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:1c3a828b033fb02b7c31da4d75014a1f82e6c072fc0523456569a57f8b025861"}, - {file = "gevent-24.10.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f2ae3efbbd120cdf4a68b7abc27a37e61e6f443c5a06ec2c6ad94c37cd8471ec"}, - {file = "gevent-24.10.3-cp313-cp313-win_amd64.whl", hash = "sha256:9e1210334a9bc9f76c3d008e0785ca62214f8a54e1325f6c2ecab3b6a572a015"}, - {file = "gevent-24.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70e9ed7ecb70e0df7dc97c3bc420de9a45a7c76bd5861c6cfec8c549700e681e"}, - {file = "gevent-24.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3ac83b74304487afa211a01909c7dd257e574db0cd429d866c298e21df7aeedf"}, - {file = "gevent-24.10.3-cp39-cp39-win32.whl", hash = "sha256:a9a89d6e396ef6f1e3968521bf56e8c4bee25b193bbf5d428b7782d582410822"}, - {file = "gevent-24.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:40ea3e40e8bb4fdb143c2a8edf2ccfdebd56016c7317c341ce8094c7bee08818"}, - {file = "gevent-24.10.3-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:e534e6a968d74463b11de6c9c67f4b4bf61775fb00f2e6e0f7fcdd412ceade18"}, - {file = "gevent-24.10.3.tar.gz", hash = "sha256:aa7ee1bd5cabb2b7ef35105f863b386c8d5e332f754b60cfc354148bd70d35d1"}, + {file = "gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7398c629d43b1b6fd785db8ebd46c0a353880a6fab03d1cf9b6788e7240ee32e"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7886b63ebfb865178ab28784accd32f287d5349b3ed71094c86e4d3ca738af5"}, + {file = "gevent-24.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ca80711e6553880974898d99357fb649e062f9058418a92120ca06c18c3c59"}, + {file = "gevent-24.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e24181d172f50097ac8fc272c8c5b030149b630df02d1c639ee9f878a470ba2b"}, + {file = "gevent-24.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1d4fadc319b13ef0a3c44d2792f7918cf1bca27cacd4d41431c22e6b46668026"}, + {file = "gevent-24.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d882faa24f347f761f934786dde6c73aa6c9187ee710189f12dcc3a63ed4a50"}, + {file = "gevent-24.11.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d"}, + {file = "gevent-24.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546"}, + {file = "gevent-24.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c"}, + {file = "gevent-24.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61"}, + {file = "gevent-24.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897"}, + {file = "gevent-24.11.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6"}, + {file = "gevent-24.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671"}, + {file = "gevent-24.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f"}, + {file = "gevent-24.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a"}, + {file = "gevent-24.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae"}, + {file = "gevent-24.11.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d618e118fdb7af1d6c1a96597a5cd6ac84a9f3732b5be8515c6a66e098d498b6"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2142704c2adce9cd92f6600f371afb2860a446bfd0be5bd86cca5b3e12130766"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92e0d7759de2450a501effd99374256b26359e801b2d8bf3eedd3751973e87f5"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca845138965c8c56d1550499d6b923eb1a2331acfa9e13b817ad8305dde83d11"}, + {file = "gevent-24.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:356b73d52a227d3313f8f828025b665deada57a43d02b1cf54e5d39028dbcf8d"}, + {file = "gevent-24.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:58851f23c4bdb70390f10fc020c973ffcf409eb1664086792c8b1e20f25eef43"}, + {file = "gevent-24.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1ea50009ecb7f1327347c37e9eb6561bdbc7de290769ee1404107b9a9cba7cf1"}, + {file = "gevent-24.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:ec68e270543ecd532c4c1d70fca020f90aa5486ad49c4f3b8b2e64a66f5c9274"}, + {file = "gevent-24.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9347690f4e53de2c4af74e62d6fabc940b6d4a6cad555b5a379f61e7d3f2a8e"}, + {file = "gevent-24.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8619d5c888cb7aebf9aec6703e410620ef5ad48cdc2d813dd606f8aa7ace675f"}, + {file = "gevent-24.11.1-cp39-cp39-win32.whl", hash = "sha256:c6b775381f805ff5faf250e3a07c0819529571d19bb2a9d474bee8c3f90d66af"}, + {file = "gevent-24.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c3443b0ed23dcb7c36a748d42587168672953d368f2956b17fad36d43b58836"}, + {file = "gevent-24.11.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:f43f47e702d0c8e1b8b997c00f1601486f9f976f84ab704f8f11536e3fa144c9"}, + {file = "gevent-24.11.1.tar.gz", hash = "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca"}, ] [package.dependencies] @@ -1303,13 +1331,13 @@ tests = ["freezegun", "pytest", "pytest-cov"] [[package]] name = "identify" -version = "2.6.1" +version = "2.6.2" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, + {file = "identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3"}, + {file = "identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd"}, ] [package.extras] @@ -1635,64 +1663,66 @@ files = [ [[package]] name = "numpy" -version = "2.1.2" +version = "2.1.3" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" files = [ - {file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648"}, - {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d"}, - {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86"}, - {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7"}, - {file = "numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03"}, - {file = "numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466"}, - {file = "numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb"}, - {file = "numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f"}, - {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4"}, - {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a"}, - {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1"}, - {file = "numpy-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2"}, - {file = "numpy-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146"}, - {file = "numpy-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c"}, - {file = "numpy-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1"}, - {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426"}, - {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0"}, - {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df"}, - {file = "numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366"}, - {file = "numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142"}, - {file = "numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550"}, - {file = "numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e"}, - {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3"}, - {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8"}, - {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a"}, - {file = "numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98"}, - {file = "numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe"}, - {file = "numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a"}, - {file = "numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17"}, - {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6"}, - {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8"}, - {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35"}, - {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62"}, - {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7"}, - {file = "numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e"}, - {file = "numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c894b4305373b9c5576d7a12b473702afdf48ce5369c074ba304cc5ad8730dff"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b47fbb433d3260adcd51eb54f92a2ffbc90a4595f8970ee00e064c644ac788f5"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:825656d0743699c529c5943554d223c021ff0494ff1442152ce887ef4f7561a1"}, + {file = "numpy-2.1.3-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a4825252fcc430a182ac4dee5a505053d262c807f8a924603d411f6718b88fd"}, + {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e711e02f49e176a01d0349d82cb5f05ba4db7d5e7e0defd026328e5cfb3226d3"}, + {file = "numpy-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78574ac2d1a4a02421f25da9559850d59457bac82f2b8d7a44fe83a64f770098"}, + {file = "numpy-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c7662f0e3673fe4e832fe07b65c50342ea27d989f92c80355658c7f888fcc83c"}, + {file = "numpy-2.1.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa2d1337dc61c8dc417fbccf20f6d1e139896a30721b7f1e832b2bb6ef4eb6c4"}, + {file = "numpy-2.1.3-cp310-cp310-win32.whl", hash = "sha256:72dcc4a35a8515d83e76b58fdf8113a5c969ccd505c8a946759b24e3182d1f23"}, + {file = "numpy-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:ecc76a9ba2911d8d37ac01de72834d8849e55473457558e12995f4cd53e778e0"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4d1167c53b93f1f5d8a139a742b3c6f4d429b54e74e6b57d0eff40045187b15d"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c80e4a09b3d95b4e1cac08643f1152fa71a0a821a2d4277334c88d54b2219a41"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:576a1c1d25e9e02ed7fa5477f30a127fe56debd53b8d2c89d5578f9857d03ca9"}, + {file = "numpy-2.1.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:973faafebaae4c0aaa1a1ca1ce02434554d67e628b8d805e61f874b84e136b09"}, + {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:762479be47a4863e261a840e8e01608d124ee1361e48b96916f38b119cfda04a"}, + {file = "numpy-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f24b3d1ecc1eebfbf5d6051faa49af40b03be1aaa781ebdadcbc090b4539b"}, + {file = "numpy-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:17ee83a1f4fef3c94d16dc1802b998668b5419362c8a4f4e8a491de1b41cc3ee"}, + {file = "numpy-2.1.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:15cb89f39fa6d0bdfb600ea24b250e5f1a3df23f901f51c8debaa6a5d122b2f0"}, + {file = "numpy-2.1.3-cp311-cp311-win32.whl", hash = "sha256:d9beb777a78c331580705326d2367488d5bc473b49a9bc3036c154832520aca9"}, + {file = "numpy-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:d89dd2b6da69c4fff5e39c28a382199ddedc3a5be5390115608345dec660b9e2"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f55ba01150f52b1027829b50d70ef1dafd9821ea82905b63936668403c3b471e"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13138eadd4f4da03074851a698ffa7e405f41a0845a6b1ad135b81596e4e9958"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:a6b46587b14b888e95e4a24d7b13ae91fa22386c199ee7b418f449032b2fa3b8"}, + {file = "numpy-2.1.3-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:0fa14563cc46422e99daef53d725d0c326e99e468a9320a240affffe87852564"}, + {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8637dcd2caa676e475503d1f8fdb327bc495554e10838019651b76d17b98e512"}, + {file = "numpy-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2312b2aa89e1f43ecea6da6ea9a810d06aae08321609d8dc0d0eda6d946a541b"}, + {file = "numpy-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a38c19106902bb19351b83802531fea19dee18e5b37b36454f27f11ff956f7fc"}, + {file = "numpy-2.1.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:02135ade8b8a84011cbb67dc44e07c58f28575cf9ecf8ab304e51c05528c19f0"}, + {file = "numpy-2.1.3-cp312-cp312-win32.whl", hash = "sha256:e6988e90fcf617da2b5c78902fe8e668361b43b4fe26dbf2d7b0f8034d4cafb9"}, + {file = "numpy-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:0d30c543f02e84e92c4b1f415b7c6b5326cbe45ee7882b6b77db7195fb971e3a"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96fe52fcdb9345b7cd82ecd34547fca4321f7656d500eca497eb7ea5a926692f"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f653490b33e9c3a4c1c01d41bc2aef08f9475af51146e4a7710c450cf9761598"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dc258a761a16daa791081d026f0ed4399b582712e6fc887a95af09df10c5ca57"}, + {file = "numpy-2.1.3-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:016d0f6f5e77b0f0d45d77387ffa4bb89816b57c835580c3ce8e099ef830befe"}, + {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c181ba05ce8299c7aa3125c27b9c2167bca4a4445b7ce73d5febc411ca692e43"}, + {file = "numpy-2.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5641516794ca9e5f8a4d17bb45446998c6554704d888f86df9b200e66bdcce56"}, + {file = "numpy-2.1.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea4dedd6e394a9c180b33c2c872b92f7ce0f8e7ad93e9585312b0c5a04777a4a"}, + {file = "numpy-2.1.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0df3635b9c8ef48bd3be5f862cf71b0a4716fa0e702155c45067c6b711ddcef"}, + {file = "numpy-2.1.3-cp313-cp313-win32.whl", hash = "sha256:50ca6aba6e163363f132b5c101ba078b8cbd3fa92c7865fd7d4d62d9779ac29f"}, + {file = "numpy-2.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:747641635d3d44bcb380d950679462fae44f54b131be347d5ec2bce47d3df9ed"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:996bb9399059c5b82f76b53ff8bb686069c05acc94656bb259b1d63d04a9506f"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:45966d859916ad02b779706bb43b954281db43e185015df6eb3323120188f9e4"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:baed7e8d7481bfe0874b566850cb0b85243e982388b7b23348c6db2ee2b2ae8e"}, + {file = "numpy-2.1.3-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f7f672a3388133335589cfca93ed468509cb7b93ba3105fce780d04a6576a0"}, + {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7aac50327da5d208db2eec22eb11e491e3fe13d22653dce51b0f4109101b408"}, + {file = "numpy-2.1.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4394bc0dbd074b7f9b52024832d16e019decebf86caf909d94f6b3f77a8ee3b6"}, + {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:50d18c4358a0a8a53f12a8ba9d772ab2d460321e6a93d6064fc22443d189853f"}, + {file = "numpy-2.1.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:14e253bd43fc6b37af4921b10f6add6925878a42a0c5fe83daee390bca80bc17"}, + {file = "numpy-2.1.3-cp313-cp313t-win32.whl", hash = "sha256:08788d27a5fd867a663f6fc753fd7c3ad7e92747efc73c53bca2f19f8bc06f48"}, + {file = "numpy-2.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:2564fbdf2b99b3f815f2107c1bbc93e2de8ee655a69c261363a1172a79a257d4"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:4f2015dfe437dfebbfce7c85c7b53d81ba49e71ba7eadbf1df40c915af75979f"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3522b0dfe983a575e6a9ab3a4a4dfe156c3e428468ff08ce582b9bb6bd1d71d4"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c006b607a865b07cd981ccb218a04fc86b600411d83d6fc261357f1c0966755d"}, + {file = "numpy-2.1.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e14e26956e6f1696070788252dcdff11b4aca4c3e8bd166e0df1bb8f315a67cb"}, + {file = "numpy-2.1.3.tar.gz", hash = "sha256:aa08e04e08aaf974d4458def539dece0d28146d866a39da5639596f4921fd761"}, ] [[package]] @@ -1717,17 +1747,17 @@ test = ["matplotlib", "pytest", "pytest-cov"] [[package]] name = "onetl" -version = "0.12.1" +version = "0.12.2" description = "One ETL tool to rule them all" optional = false python-versions = ">=3.7" files = [ - {file = "onetl-0.12.1-py3-none-any.whl", hash = "sha256:815540af6b993ffdd541955aa2092e6ae3a8e70dffacaba76c40a521c3e1a911"}, - {file = "onetl-0.12.1.tar.gz", hash = "sha256:d9deee458c5142465ad166fa949ad40c0cf0605a284b4471aa55567f21f7b327"}, + {file = "onetl-0.12.2-py3-none-any.whl", hash = "sha256:a81bc491c6a50d44b09ef9462993b9b369d8685cf5ca8087cff40b4883b2416b"}, + {file = "onetl-0.12.2.tar.gz", hash = "sha256:ad3ec2a9956f211fa9f6628a993047ed69e2c523bd4e01619ef49aa7c894f366"}, ] [package.dependencies] -etl-entities = ">=2.2,<2.4" +etl-entities = ">=2.2,<2.5" evacuator = ">=1.0,<1.1" frozendict = "*" hdfs = {version = ">2", optional = true, markers = "extra == \"hdfs\""} @@ -1770,13 +1800,13 @@ dev = ["black", "mypy", "pytest"] [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -2205,13 +2235,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pydantic-settings" -version = "2.6.0" +version = "2.6.1" description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"}, - {file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"}, + {file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"}, + {file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"}, ] [package.dependencies] @@ -2437,6 +2467,17 @@ files = [ {file = "python_multipart-0.0.16.tar.gz", hash = "sha256:8dee37b88dab9b59922ca173c35acb627cc12ec74019f5cd4578369c6df36554"}, ] +[[package]] +name = "pytz" +version = "2024.2" +description = "World timezone definitions, modern and historical" +optional = true +python-versions = "*" +files = [ + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, +] + [[package]] name = "pyyaml" version = "6.0.2" @@ -2536,23 +2577,23 @@ pyasn1 = ">=0.1.3" [[package]] name = "setuptools" -version = "75.3.0" +version = "75.5.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd"}, - {file = "setuptools-75.3.0.tar.gz", hash = "sha256:fba5dd4d766e97be1b1681d98712680ae8f2f26d7881245f2ce9e40714f1a686"}, + {file = "setuptools-75.5.0-py3-none-any.whl", hash = "sha256:87cb777c3b96d638ca02031192d40390e0ad97737e27b6b4fa831bea86f2f829"}, + {file = "setuptools-75.5.0.tar.gz", hash = "sha256:5c4ccb41111392671f02bb5f8436dfc5a9a7185e80500531b133f5775c4163ef"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] +core = ["importlib-metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.12.*)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] [[package]] name = "six" @@ -3105,6 +3146,23 @@ files = [ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, ] +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = true +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "urllib3" version = "2.2.3" @@ -3195,13 +3253,13 @@ files = [ [[package]] name = "zipp" -version = "3.20.2" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, - {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, ] [package.extras] @@ -3285,10 +3343,11 @@ test = ["coverage[toml]", "zope.event", "zope.testing"] testing = ["coverage[toml]", "zope.event", "zope.testing"] [extras] -backend = ["alembic", "asgi-correlation-id", "asyncpg", "celery", "coloredlogs", "fastapi", "jinja2", "psycopg", "pydantic-settings", "python-jose", "python-json-logger", "python-multipart", "sqlalchemy", "sqlalchemy-utils", "uuid6", "uvicorn"] +backend = ["alembic", "apscheduler", "asgi-correlation-id", "asyncpg", "celery", "coloredlogs", "fastapi", "jinja2", "psycopg", "pydantic-settings", "python-jose", "python-json-logger", "python-multipart", "sqlalchemy", "sqlalchemy-utils", "uuid6", "uvicorn"] +scheduler = ["apscheduler", "asyncpg", "celery", "pydantic-settings", "python-jose", "python-multipart", "sqlalchemy", "sqlalchemy-utils"] worker = ["asgi-correlation-id", "celery", "coloredlogs", "jinja2", "onetl", "psycopg", "pydantic-settings", "python-json-logger", "sqlalchemy", "sqlalchemy-utils", "uuid6"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "86edb5832161c81ddf76aa541e37bb901adf668e0b75b1cefab0952ed0604922" +content-hash = "48326d22452380fc13087fa6680a5def3aa079789bc6566e3b5bd5c9ced735fd" diff --git a/pyproject.toml b/pyproject.toml index affef9f6..22a16df6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -66,6 +66,7 @@ uuid6 = "^2024.7.10" coloredlogs = {version = "*", optional = true} python-json-logger = {version = "*", optional = true} asyncpg = { version = ">=0.29,<0.31", optional = true } +apscheduler = { version = "^3.10.4", optional = true } [tool.poetry.extras] backend = [ @@ -86,6 +87,7 @@ backend = [ "asyncpg", # migrations only "celery", + "apscheduler", ] worker = [ @@ -101,6 +103,18 @@ worker = [ "coloredlogs", "python-json-logger", ] + +scheduler = [ + "apscheduler", + "pydantic-settings", + "sqlalchemy", + "sqlalchemy-utils", + "asyncpg", + "python-multipart", + "python-jose", + "celery", +] + [tool.poetry.group.test.dependencies] pandas-stubs = "^2.2.2.240909" pytest = "^8.3.3" @@ -204,6 +218,7 @@ ignore_missing_imports = true [tool.pytest.ini_options] markers = [ "backend: tests for backend (require running database)", + "scheduler: tests for scheduler (require running database)", "worker: tests for syncmaster worker", "hive: tests for Hive", "postgres: tests on Postgres", diff --git a/syncmaster/backend/api/v1/runs.py b/syncmaster/backend/api/v1/runs.py index fce8ca41..1ec7698b 100644 --- a/syncmaster/backend/api/v1/runs.py +++ b/syncmaster/backend/api/v1/runs.py @@ -1,5 +1,6 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 +import asyncio from datetime import datetime from typing import Annotated @@ -10,7 +11,7 @@ from syncmaster.backend.dependencies import Stub from syncmaster.backend.services import UnitOfWork, get_user -from syncmaster.db.models import Status, User +from syncmaster.db.models import RunType, Status, User from syncmaster.db.utils import Permission from syncmaster.errors.registration import get_error_responses from syncmaster.exceptions.base import ActionNotAllowedError @@ -116,6 +117,7 @@ async def start_run( # the work of checking fields and removing passwords is delegated to the ReadAuthDataSchema class source_creds=ReadAuthDataSchema(auth_data=credentials_source).dict(), target_creds=ReadAuthDataSchema(auth_data=credentials_target).dict(), + type=RunType.MANUAL, ) log_url = Template(settings.worker.LOG_URL_TEMPLATE).render( @@ -128,7 +130,8 @@ async def start_run( ) try: - celery.send_task( + await asyncio.to_thread( + celery.send_task, "run_transfer_task", kwargs={"run_id": run.id}, queue=transfer.queue.name, diff --git a/syncmaster/backend/api/v1/transfers.py b/syncmaster/backend/api/v1/transfers.py index d0e5ff1d..121b1040 100644 --- a/syncmaster/backend/api/v1/transfers.py +++ b/syncmaster/backend/api/v1/transfers.py @@ -131,6 +131,8 @@ async def create_transfer( target_params=transfer_data.target_params.dict(), strategy_params=transfer_data.strategy_params.dict(), queue_id=transfer_data.queue_id, + is_scheduled=transfer_data.is_scheduled, + schedule=transfer_data.schedule, ) return ReadTransferSchema.from_orm(transfer) diff --git a/syncmaster/db/migrations/versions/2023-11-23_0008_create_run_table.py b/syncmaster/db/migrations/versions/2023-11-23_0008_create_run_table.py index aeae2b8b..3efdcf67 100644 --- a/syncmaster/db/migrations/versions/2023-11-23_0008_create_run_table.py +++ b/syncmaster/db/migrations/versions/2023-11-23_0008_create_run_table.py @@ -25,6 +25,7 @@ def upgrade(): sa.Column("started_at", sa.DateTime(), nullable=True), sa.Column("ended_at", sa.DateTime(), nullable=True), sa.Column("status", sa.String(255), nullable=False), + sa.Column("type", sa.String(64), nullable=False), sa.Column("log_url", sa.String(length=512), nullable=True), sa.Column("transfer_dump", sa.JSON(), nullable=False), sa.Column("created_at", sa.DateTime(), server_default=sa.text("now()"), nullable=False), @@ -37,6 +38,7 @@ def upgrade(): ), sa.PrimaryKeyConstraint("id", name=op.f("pk__run")), ) + op.create_index(op.f("ix__run__type"), "run", ["type"], unique=False) op.create_index(op.f("ix__run__status"), "run", ["status"], unique=False) op.create_index(op.f("ix__run__transfer_id"), "run", ["transfer_id"], unique=False) @@ -44,4 +46,5 @@ def upgrade(): def downgrade(): op.drop_index(op.f("ix__run__transfer_id"), table_name="run") op.drop_index(op.f("ix__run__status"), table_name="run") + op.drop_index(op.f("ix__run__type"), table_name="run") op.drop_table("run") diff --git a/syncmaster/db/migrations/versions/2024-11-01_0011_create_apscheduler_table.py b/syncmaster/db/migrations/versions/2024-11-01_0011_create_apscheduler_table.py new file mode 100644 index 00000000..dc3529dc --- /dev/null +++ b/syncmaster/db/migrations/versions/2024-11-01_0011_create_apscheduler_table.py @@ -0,0 +1,44 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +"""Create apscheduler table + +Revision ID: 0011 +Revises: 0010 +Create Date: 2024-11-01 08:37:47.078657 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "0011" +down_revision = "0010" +branch_labels = None +depends_on = None + + +def exists_table(table_name: str) -> bool: + conn = op.get_bind() + inspector = sa.inspect(conn) + tables = inspector.get_table_names() + return table_name in tables + + +def upgrade() -> None: + if exists_table("apscheduler_jobs"): + return + + op.create_table( + "apscheduler_jobs", + sa.Column("id", sa.VARCHAR(length=191), autoincrement=False, nullable=False), + sa.Column("next_run_time", sa.Float(25), autoincrement=False, nullable=True), + sa.Column("job_state", sa.LargeBinary(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint("id", name="apscheduler_jobs_pkey"), + ) + + op.create_index("ix_apscheduler_jobs_next_run_time", "apscheduler_jobs", ["next_run_time"], unique=False) + + +def downgrade() -> None: + op.drop_index("ix_apscheduler_jobs_next_run_time", table_name="apscheduler_jobs") + op.drop_table("apscheduler_jobs") diff --git a/syncmaster/db/models/__init__.py b/syncmaster/db/models/__init__.py index af6479bd..2eada95b 100644 --- a/syncmaster/db/models/__init__.py +++ b/syncmaster/db/models/__init__.py @@ -1,10 +1,11 @@ # SPDX-FileCopyrightText: 2023-2024 MTS PJSC # SPDX-License-Identifier: Apache-2.0 +from syncmaster.db.models.apscheduler_job import APSchedulerJob from syncmaster.db.models.auth_data import AuthData from syncmaster.db.models.base import Base from syncmaster.db.models.connection import Connection from syncmaster.db.models.group import Group, GroupMemberRole, UserGroup from syncmaster.db.models.queue import Queue -from syncmaster.db.models.run import Run, Status +from syncmaster.db.models.run import Run, RunType, Status from syncmaster.db.models.transfer import Transfer from syncmaster.db.models.user import User diff --git a/syncmaster/db/models/apscheduler_job.py b/syncmaster/db/models/apscheduler_job.py new file mode 100644 index 00000000..56390d3a --- /dev/null +++ b/syncmaster/db/models/apscheduler_job.py @@ -0,0 +1,16 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from sqlalchemy import Float, Index, LargeBinary, String +from sqlalchemy.orm import Mapped, mapped_column + +from syncmaster.db.models.base import Base + + +class APSchedulerJob(Base): + __tablename__ = "apscheduler_jobs" + + id: Mapped[str] = mapped_column(String(191), primary_key=True, nullable=False) + next_run_time: Mapped[float | None] = mapped_column(Float(25), nullable=True) + job_state: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + + __table_args__ = (Index("ix_apscheduler_jobs_next_run_time", "next_run_time"),) diff --git a/syncmaster/db/models/run.py b/syncmaster/db/models/run.py index ae2c4a59..e5ff426f 100644 --- a/syncmaster/db/models/run.py +++ b/syncmaster/db/models/run.py @@ -24,6 +24,11 @@ class Status(enum.StrEnum): FINISHED = "FINISHED" +class RunType(enum.StrEnum): + MANUAL = "MANUAL" + SCHEDULED = "SCHEDULED" + + class Run(Base, TimestampMixin): id: Mapped[int] = mapped_column( BigInteger, @@ -51,6 +56,12 @@ class Run(Base, TimestampMixin): default=Status.CREATED, index=True, ) + type: Mapped[str] = mapped_column( + String(64), + nullable=False, + default=RunType.MANUAL, + index=True, + ) log_url: Mapped[str] = mapped_column(String(512), nullable=True) transfer_dump: Mapped[dict[str, Any]] = mapped_column(JSON, nullable=False, default={}) @@ -61,5 +72,6 @@ def __repr__(self): f"" ) diff --git a/syncmaster/db/repositories/run.py b/syncmaster/db/repositories/run.py index 8f8b9b38..6f031505 100644 --- a/syncmaster/db/repositories/run.py +++ b/syncmaster/db/repositories/run.py @@ -8,7 +8,7 @@ from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload -from syncmaster.db.models import Run, Status, Transfer +from syncmaster.db.models import Run, RunType, Status, Transfer from syncmaster.db.repositories.base import Repository from syncmaster.db.utils import Pagination from syncmaster.exceptions import SyncmasterError @@ -54,10 +54,12 @@ async def create( transfer_id: int, source_creds: dict, target_creds: dict, + type: RunType, ) -> Run: run = Run() run.transfer_id = transfer_id run.transfer_dump = await self.read_full_serialized_transfer(transfer_id, source_creds, target_creds) + run.type = type try: self._session.add(run) await self._session.flush() diff --git a/syncmaster/db/repositories/transfer.py b/syncmaster/db/repositories/transfer.py index 64ce4fc7..5552fe06 100644 --- a/syncmaster/db/repositories/transfer.py +++ b/syncmaster/db/repositories/transfer.py @@ -118,6 +118,8 @@ async def create( target_params: dict[str, Any], strategy_params: dict[str, Any], queue_id: int, + is_scheduled: bool, + schedule: str | None, ) -> Transfer: query = ( insert(Transfer) @@ -131,6 +133,8 @@ async def create( target_params=target_params, strategy_params=strategy_params, queue_id=queue_id, + is_scheduled=is_scheduled, + schedule=schedule or "", ) .returning(Transfer) ) diff --git a/syncmaster/scheduler/__init__.py b/syncmaster/scheduler/__init__.py new file mode 100644 index 00000000..aa0c378f --- /dev/null +++ b/syncmaster/scheduler/__init__.py @@ -0,0 +1,4 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from syncmaster.scheduler.transfer_fetcher import TransferFetcher +from syncmaster.scheduler.transfer_job_manager import TransferJobManager diff --git a/syncmaster/scheduler/__main__.py b/syncmaster/scheduler/__main__.py new file mode 100644 index 00000000..74eac548 --- /dev/null +++ b/syncmaster/scheduler/__main__.py @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +import asyncio +import logging + +from syncmaster.scheduler.transfer_fetcher import TransferFetcher +from syncmaster.scheduler.transfer_job_manager import TransferJobManager +from syncmaster.settings import Settings + +logger = logging.getLogger(__name__) + + +async def main(): + settings = Settings() + transfer_fetcher = TransferFetcher(settings) + transfer_job_manager = TransferJobManager(settings) + transfer_job_manager.scheduler.start() + + while True: + logger.info("Looking at the transfer table...") + transfers = await transfer_fetcher.fetch_updated_jobs() + + if transfers: + logger.info( + "Found %d updated transfers with ids: %s", + len(transfers), + ", ".join(str(t.id) for t in transfers), + ) + transfer_job_manager.update_jobs(transfers) + transfer_fetcher.last_updated_at = max(t.updated_at for t in transfers) + logger.info("Scheduler state has been updated. Last updated at: %s", transfer_fetcher.last_updated_at) + + await asyncio.sleep(settings.SCHEDULER_TRANSFER_FETCHING_TIMEOUT) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/syncmaster/scheduler/transfer_fetcher.py b/syncmaster/scheduler/transfer_fetcher.py new file mode 100644 index 00000000..0f7ac524 --- /dev/null +++ b/syncmaster/scheduler/transfer_fetcher.py @@ -0,0 +1,25 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from sqlalchemy import select + +from syncmaster.db.models import Transfer +from syncmaster.scheduler.utils import get_async_session +from syncmaster.settings import Settings + + +class TransferFetcher: + def __init__(self, settings: Settings): + self.settings = settings + self.last_updated_at = None + + async def fetch_updated_jobs(self) -> list[Transfer]: + + async with get_async_session(self.settings) as session: + query = select(Transfer) + if self.last_updated_at is not None: + query = query.filter(Transfer.updated_at > self.last_updated_at) + + result = await session.execute(query) + transfers = result.scalars().all() + + return transfers diff --git a/syncmaster/scheduler/transfer_job_manager.py b/syncmaster/scheduler/transfer_job_manager.py new file mode 100644 index 00000000..a735e3ae --- /dev/null +++ b/syncmaster/scheduler/transfer_job_manager.py @@ -0,0 +1,83 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +import asyncio + +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from apscheduler.triggers.cron import CronTrigger +from kombu.exceptions import KombuError + +from syncmaster.backend.services.unit_of_work import UnitOfWork +from syncmaster.db.models import RunType, Status, Transfer +from syncmaster.exceptions.run import CannotConnectToTaskQueueError +from syncmaster.scheduler.utils import get_async_session +from syncmaster.schemas.v1.connections.connection import ReadAuthDataSchema +from syncmaster.settings import Settings +from syncmaster.worker.config import celery + + +class TransferJobManager: + def __init__(self, settings: Settings): + self.scheduler = AsyncIOScheduler(timezone=settings.TZ) + self.scheduler.add_jobstore("sqlalchemy", url=settings.database.sync_url) + self.settings = settings + + def update_jobs(self, transfers: list[Transfer]) -> None: + for transfer in transfers: + job_id = str(transfer.id) + existing_job = self.scheduler.get_job(job_id) + + if not transfer.is_scheduled or transfer.is_deleted: + if existing_job: + self.scheduler.remove_job(job_id) + continue + + if existing_job: + self.scheduler.modify_job( + job_id=job_id, + trigger=CronTrigger.from_crontab(transfer.schedule), + misfire_grace_time=self.settings.SCHEDULER_MISFIRE_GRACE_TIME, + args=(transfer.id,), + ) + else: + self.scheduler.add_job( + func=TransferJobManager.send_job_to_celery, + id=job_id, + trigger=CronTrigger.from_crontab(transfer.schedule), + misfire_grace_time=self.settings.SCHEDULER_MISFIRE_GRACE_TIME, + args=(transfer.id,), + ) + + @staticmethod + async def send_job_to_celery(transfer_id: int) -> None: + """ + Do not pass additional arguments like settings, + otherwise they will be serialized in jobs table. + """ + settings = Settings() + + async with get_async_session(settings) as session: + unit_of_work = UnitOfWork(session=session, settings=settings) + + transfer = await unit_of_work.transfer.read_by_id(transfer_id) + credentials_source = await unit_of_work.credentials.read(transfer.source_connection_id) + credentials_target = await unit_of_work.credentials.read(transfer.target_connection_id) + + async with unit_of_work: + run = await unit_of_work.run.create( + transfer_id=transfer_id, + source_creds=ReadAuthDataSchema(auth_data=credentials_source).dict(), + target_creds=ReadAuthDataSchema(auth_data=credentials_target).dict(), + type=RunType.SCHEDULED, + ) + + try: + await asyncio.to_thread( + celery.send_task, + "run_transfer_task", + kwargs={"run_id": run.id}, + queue=transfer.queue.name, + ) + except KombuError as e: + async with unit_of_work: + await unit_of_work.run.update(run_id=run.id, status=Status.FAILED) + raise CannotConnectToTaskQueueError(run_id=run.id) from e diff --git a/syncmaster/scheduler/utils.py b/syncmaster/scheduler/utils.py new file mode 100644 index 00000000..3899c040 --- /dev/null +++ b/syncmaster/scheduler/utils.py @@ -0,0 +1,11 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0 +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine + +from syncmaster.settings import Settings + + +def get_async_session(settings: Settings) -> AsyncSession: + engine = create_async_engine(url=settings.database.sync_url) + session_factory = async_sessionmaker(bind=engine, class_=AsyncSession, expire_on_commit=False) + return session_factory() diff --git a/syncmaster/schemas/v1/transfers/run.py b/syncmaster/schemas/v1/transfers/run.py index 920f72cd..7439e599 100644 --- a/syncmaster/schemas/v1/transfers/run.py +++ b/syncmaster/schemas/v1/transfers/run.py @@ -6,7 +6,7 @@ from pydantic import BaseModel -from syncmaster.db.models import Status +from syncmaster.db.models import RunType, Status from syncmaster.schemas.v1.page import PageSchema @@ -17,6 +17,7 @@ class ShortRunSchema(BaseModel): ended_at: datetime | None = None status: Status log_url: str | None = None + type: RunType class Config: from_attributes = True diff --git a/syncmaster/settings/__init__.py b/syncmaster/settings/__init__.py index d5a790e9..19aa9995 100644 --- a/syncmaster/settings/__init__.py +++ b/syncmaster/settings/__init__.py @@ -44,11 +44,15 @@ class Settings(BaseSettings): SYNCMASTER__SERVER__DEBUG=True """ - # TODO: move settings to corresponding classes + # TODO: move settings to corresponding classes (scheduler also) SECRET_KEY: str = "secret" SECURITY_ALGORITHM: str = "HS256" CRYPTO_KEY: str = "UBgPTioFrtH2unlC4XFDiGf5sYfzbdSf_VgiUSaQc94=" + TZ: str = "UTC" + SCHEDULER_TRANSFER_FETCHING_TIMEOUT: int = 180 # seconds + SCHEDULER_MISFIRE_GRACE_TIME: int = 300 # seconds + TOKEN_EXPIRED_TIME: int = 60 * 60 * 10 # 10 hours CREATE_SPARK_SESSION_FUNCTION: ImportString = "syncmaster.worker.spark.get_worker_spark_session" diff --git a/syncmaster/settings/log/plain.yml b/syncmaster/settings/log/plain.yml index 4deaad92..ce05751a 100644 --- a/syncmaster/settings/log/plain.yml +++ b/syncmaster/settings/log/plain.yml @@ -39,7 +39,7 @@ loggers: handlers: [main] level: INFO propagate: false - celery: - level: DEBUG - handlers: [celery] - propagate: false + celery: + level: DEBUG + handlers: [celery] + propagate: false diff --git a/tests/conftest.py b/tests/conftest.py index c6183415..8304f8d7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -31,6 +31,8 @@ "tests.test_unit.test_transfers.transfer_fixtures", "tests.test_unit.test_runs.run_fixtures", "tests.test_unit.test_connections.connection_fixtures", + "tests.test_unit.test_scheduler.scheduler_fixtures", + "tests.test_integration.test_scheduler.scheduler_fixtures", ] diff --git a/tests/test_integration/celery_test.py b/tests/test_integration/celery_test.py new file mode 100644 index 00000000..2991ae27 --- /dev/null +++ b/tests/test_integration/celery_test.py @@ -0,0 +1,3 @@ +from syncmaster.worker.config import celery + +celery.conf.update(imports=list(celery.conf.imports) + ["tests.test_integration.test_scheduler.test_task"]) diff --git a/tests/test_integration/test_scheduler/__init__.py b/tests/test_integration/test_scheduler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_integration/test_scheduler/scheduler_fixtures/__init__.py b/tests/test_integration/test_scheduler/scheduler_fixtures/__init__.py new file mode 100644 index 00000000..a0781350 --- /dev/null +++ b/tests/test_integration/test_scheduler/scheduler_fixtures/__init__.py @@ -0,0 +1,7 @@ +from tests.test_integration.test_scheduler.scheduler_fixtures.mocker_fixtures import ( + mock_add_job, + mock_send_task_to_tick, +) +from tests.test_integration.test_scheduler.scheduler_fixtures.transfer_fixture import ( + group_transfer_integration_mock, +) diff --git a/tests/test_integration/test_scheduler/scheduler_fixtures/mocker_fixtures.py b/tests/test_integration/test_scheduler/scheduler_fixtures/mocker_fixtures.py new file mode 100644 index 00000000..9380bff3 --- /dev/null +++ b/tests/test_integration/test_scheduler/scheduler_fixtures/mocker_fixtures.py @@ -0,0 +1,33 @@ +import asyncio + +import pytest +from apscheduler.triggers.cron import CronTrigger +from pytest_mock import MockerFixture, MockType + +from syncmaster.scheduler.transfer_job_manager import TransferJobManager +from syncmaster.worker.config import celery + + +@pytest.fixture +def mock_send_task_to_tick(mocker: MockerFixture) -> MockType: + original_to_thread = asyncio.to_thread + return mocker.patch( + "asyncio.to_thread", + new=lambda func, *args, **kwargs: original_to_thread(celery.send_task, "tick", *args[1:], **kwargs), + ) + + +@pytest.fixture +def mock_add_job(mocker: MockerFixture, transfer_job_manager: TransferJobManager) -> MockType: + original_add_job = transfer_job_manager.scheduler.add_job + return mocker.patch.object( + transfer_job_manager.scheduler, + "add_job", + side_effect=lambda func, id, trigger, misfire_grace_time, args: original_add_job( + func=func, + id=id, + trigger=CronTrigger(second="*"), + misfire_grace_time=misfire_grace_time, + args=args, + ), + ) diff --git a/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py b/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py new file mode 100644 index 00000000..d88f4fdc --- /dev/null +++ b/tests/test_integration/test_scheduler/scheduler_fixtures/transfer_fixture.py @@ -0,0 +1,140 @@ +from collections.abc import AsyncGenerator + +import pytest_asyncio +from sqlalchemy.ext.asyncio import AsyncSession + +from syncmaster.backend.api.v1.auth.utils import sign_jwt +from syncmaster.db.repositories.utils import decrypt_auth_data +from syncmaster.settings import Settings +from tests.mocks import ( + MockConnection, + MockCredentials, + MockGroup, + MockTransfer, + MockUser, + UserTestRoles, +) +from tests.test_unit.conftest import create_group_member +from tests.test_unit.utils import ( + create_connection, + create_credentials, + create_group, + create_queue, + create_transfer, + create_user, +) + + +@pytest_asyncio.fixture +async def group_transfer_integration_mock( + session: AsyncSession, + settings: Settings, + create_connection_data: dict | None, + create_transfer_data: dict | None, +) -> AsyncGenerator[MockTransfer, None]: + group_owner = await create_user( + session=session, + username="group_transfer_owner", + is_active=True, + ) + group = await create_group( + session=session, + name="group_for_group_transfer", + owner_id=group_owner.id, + ) + + queue = await create_queue( + session=session, + name="test_queue", + group_id=group.id, + ) + + members: list[MockUser] = [] + for username in ( + "transfer_group_member_maintainer", + "transfer_group_member_developer", + "transfer_group_member_guest", + ): + members.append( + await create_group_member( + username=username, + group_id=group.id, + session=session, + settings=settings, + ), + ) + + await session.commit() + mock_group = MockGroup( + group=group, + owner=MockUser( + user=group_owner, + auth_token=sign_jwt(group_owner.id, settings), + role=UserTestRoles.Owner, + ), + members=members, + ) + + source_connection = await create_connection( + session=session, + name="group_transfer_source_connection", + group_id=group.id, + data=create_connection_data, + ) + source_connection_creds = await create_credentials( + session=session, + settings=settings, + connection_id=source_connection.id, + ) + target_connection = await create_connection( + session=session, + name="group_transfer_target_connection", + group_id=group.id, + data=create_connection_data, + ) + target_connection_creds = await create_credentials( + session=session, + settings=settings, + connection_id=target_connection.id, + ) + + transfer = await create_transfer( + session=session, + name="group_transfer", + group_id=group.id, + source_connection_id=source_connection.id, + target_connection_id=target_connection.id, + queue_id=queue.id, + source_params=create_transfer_data, + target_params=create_transfer_data, + ) + + yield MockTransfer( + transfer=transfer, + source_connection=MockConnection( + connection=source_connection, + owner_group=mock_group, + credentials=MockCredentials( + value=decrypt_auth_data(source_connection_creds.value, settings=settings), + connection_id=source_connection.id, + ), + ), + target_connection=MockConnection( + connection=target_connection, + owner_group=mock_group, + credentials=MockCredentials( + value=decrypt_auth_data(target_connection_creds.value, settings=settings), + connection_id=target_connection.id, + ), + ), + owner_group=mock_group, + ) + await session.delete(transfer) + await session.delete(source_connection) + await session.delete(target_connection) + await session.delete(group) + await session.delete(group_owner) + await session.delete(queue) + for member in members: + await session.delete(member.user) + await session.commit() diff --git a/tests/test_integration/test_scheduler/test_scheduler.py b/tests/test_integration/test_scheduler/test_scheduler.py new file mode 100644 index 00000000..cbf6ad36 --- /dev/null +++ b/tests/test_integration/test_scheduler/test_scheduler.py @@ -0,0 +1,51 @@ +import asyncio + +import pytest +from pytest_mock import MockType +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from syncmaster.db.models import Run, Status +from syncmaster.scheduler import TransferFetcher, TransferJobManager +from syncmaster.settings import Settings +from tests.mocks import MockTransfer + +pytestmark = [pytest.mark.asyncio, pytest.mark.worker, pytest.mark.scheduler_integration] + + +async def test_scheduler( + session: AsyncSession, + settings: Settings, + group_transfer_integration_mock: MockTransfer, + transfer_job_manager: TransferJobManager, + mock_send_task_to_tick: MockType, + mock_add_job: MockType, +): + group_transfer = group_transfer_integration_mock + transfer_fetcher = TransferFetcher(settings) + transfers = await transfer_fetcher.fetch_updated_jobs() + assert transfers + assert group_transfer.transfer.id in {t.id for t in transfers} + + transfer_job_manager.update_jobs(transfers) + + job = transfer_job_manager.scheduler.get_job(str(group_transfer.id)) + assert job is not None + + await asyncio.sleep(1.5) # make sure that created job with every-second cron worked + + run = await session.scalar( + select(Run).filter_by(transfer_id=group_transfer.id).order_by(Run.created_at.desc()), + ) + assert run is not None + assert run.status in [Status.CREATED, Status.STARTED] + + for _ in range(3): + await asyncio.sleep(2) + await session.refresh(run) + run = await session.scalar(select(Run, run.id)) + if run.status == Status.FINISHED: + break + + assert run.status == Status.FINISHED + assert run.ended_at is not None diff --git a/tests/test_integration/test_scheduler/test_task.py b/tests/test_integration/test_scheduler/test_task.py new file mode 100644 index 00000000..b4d5d2c7 --- /dev/null +++ b/tests/test_integration/test_scheduler/test_task.py @@ -0,0 +1,28 @@ +import time +from datetime import datetime, timezone + +from sqlalchemy.orm import Session + +from syncmaster.db.models.run import Run, Status +from syncmaster.exceptions.run import RunNotFoundError +from syncmaster.worker.base import WorkerTask +from syncmaster.worker.config import celery + + +@celery.task(name="tick", bind=True, track_started=True) +def tick(self: WorkerTask, run_id: int) -> None: + with Session(self.engine) as session: + run = session.get(Run, run_id) + if run is None: + raise RunNotFoundError + + run.started_at = datetime.now(tz=timezone.utc) + run.status = Status.STARTED + session.add(run) + session.commit() + + time.sleep(2) # to make sure that previous status is handled in test + run.status = Status.FINISHED + run.ended_at = datetime.now(tz=timezone.utc) + session.add(run) + session.commit() diff --git a/tests/test_unit/test_runs/test_create_run.py b/tests/test_unit/test_runs/test_create_run.py index d2190623..473c6190 100644 --- a/tests/test_unit/test_runs/test_create_run.py +++ b/tests/test_unit/test_runs/test_create_run.py @@ -1,9 +1,11 @@ +from unittest.mock import AsyncMock + import pytest from httpx import AsyncClient from sqlalchemy import desc, select from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.db.models import Run, Status +from syncmaster.db.models import Run, RunType, Status from syncmaster.settings import Settings from tests.mocks import MockGroup, MockTransfer, MockUser, UserTestRoles @@ -19,7 +21,8 @@ async def test_developer_plus_can_create_run_of_transfer_his_group( ) -> None: # Arrange user = group_transfer.owner_group.get_member_of_role(role_developer_plus) - mocker.patch("syncmaster.worker.config.celery.send_task") + mock_send_task = mocker.patch("syncmaster.worker.config.celery.send_task") + mock_to_thread = mocker.patch("asyncio.to_thread", new_callable=AsyncMock) run = ( await session.scalars( @@ -51,9 +54,17 @@ async def test_developer_plus_can_create_run_of_transfer_his_group( "started_at": run.started_at, "ended_at": run.ended_at, "transfer_dump": run.transfer_dump, + "type": RunType.MANUAL, } assert result.status_code == 200 + mock_to_thread.assert_awaited_once_with( + mock_send_task, + "run_transfer_task", + kwargs={"run_id": run.id}, + queue=group_transfer.queue.name, + ) + async def test_groupless_user_cannot_create_run( client: AsyncClient, @@ -64,6 +75,7 @@ async def test_groupless_user_cannot_create_run( ) -> None: # Arrange mocker.patch("syncmaster.worker.config.celery.send_task") + mocker.patch("asyncio.to_thread", new_callable=AsyncMock) # Act result = await client.post( @@ -93,6 +105,7 @@ async def test_group_member_cannot_create_run_of_other_group_transfer( ): # Arrange mocker.patch("syncmaster.worker.config.celery.send_task") + mocker.patch("asyncio.to_thread", new_callable=AsyncMock) user = group.get_member_of_role(role_guest_plus) # Act @@ -131,7 +144,8 @@ async def test_superuser_can_create_run( settings.worker.LOG_URL_TEMPLATE = ( "https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}" ) - mocker.patch("syncmaster.worker.config.celery.send_task") + mock_send_task = mocker.patch("syncmaster.worker.config.celery.send_task") + mock_to_thread = mocker.patch("asyncio.to_thread", new_callable=AsyncMock) # Act result = await client.post( @@ -155,10 +169,17 @@ async def test_superuser_can_create_run( "started_at": run.started_at, "ended_at": run.ended_at, "transfer_dump": run.transfer_dump, + "type": RunType.MANUAL, } assert result.status_code == 200 assert "correlation_id" in response.get("log_url") assert "run_id" in response.get("log_url") + mock_to_thread.assert_awaited_once_with( + mock_send_task, + "run_transfer_task", + kwargs={"run_id": run.id}, + queue=group_transfer.queue.name, + ) async def test_unauthorized_user_cannot_create_run( @@ -168,6 +189,7 @@ async def test_unauthorized_user_cannot_create_run( ) -> None: # Arrange mocker.patch("syncmaster.worker.config.celery.send_task") + mocker.patch("asyncio.to_thread", new_callable=AsyncMock) # Act result = await client.post( @@ -196,6 +218,7 @@ async def test_group_member_cannot_create_run_of_unknown_transfer_error( # Arrange user = group_transfer.owner_group.get_member_of_role(role_guest_plus) mocker.patch("syncmaster.worker.config.celery.send_task") + mocker.patch("asyncio.to_thread", new_callable=AsyncMock) # Act result = await client.post( @@ -223,6 +246,7 @@ async def test_superuser_cannot_create_run_of_unknown_transfer_error( ) -> None: # Arrange mocker.patch("syncmaster.worker.config.celery.send_task") + mocker.patch("asyncio.to_thread", new_callable=AsyncMock) # Act result = await client.post( diff --git a/tests/test_unit/test_runs/test_read_run.py b/tests/test_unit/test_runs/test_read_run.py index d87d63a0..c68f3791 100644 --- a/tests/test_unit/test_runs/test_read_run.py +++ b/tests/test_unit/test_runs/test_read_run.py @@ -1,6 +1,7 @@ import pytest from httpx import AsyncClient +from syncmaster.db.models import RunType from tests.mocks import MockGroup, MockRun, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] @@ -29,6 +30,7 @@ async def test_developer_plus_can_read_run( "ended_at": group_run.ended_at, "log_url": group_run.log_url, "transfer_dump": group_run.transfer_dump, + "type": RunType.MANUAL, } assert result.status_code == 200 @@ -100,6 +102,7 @@ async def test_superuser_can_read_runs( "ended_at": group_run.ended_at, "log_url": group_run.log_url, "transfer_dump": group_run.transfer_dump, + "type": RunType.MANUAL, } assert result.status_code == 200 diff --git a/tests/test_unit/test_runs/test_read_runs.py b/tests/test_unit/test_runs/test_read_runs.py index 0e577209..d50e40ff 100644 --- a/tests/test_unit/test_runs/test_read_runs.py +++ b/tests/test_unit/test_runs/test_read_runs.py @@ -4,7 +4,7 @@ import pytest from httpx import AsyncClient -from syncmaster.db.models import Status +from syncmaster.db.models import RunType, Status from tests.mocks import MockGroup, MockRun, MockTransfer, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] @@ -52,6 +52,7 @@ async def test_developer_plus_can_read_runs_of_the_transfer( "started_at": group_run.started_at, "ended_at": group_run.ended_at, "log_url": group_run.log_url, + "type": RunType.MANUAL, }, ], } @@ -112,6 +113,7 @@ async def test_superuser_can_read_runs( "started_at": group_run.started_at, "ended_at": group_run.ended_at, "log_url": group_run.log_url, + "type": RunType.MANUAL, }, ], } diff --git a/tests/test_unit/test_runs/test_stop_run.py b/tests/test_unit/test_runs/test_stop_run.py index bf50448c..f5e5df4e 100644 --- a/tests/test_unit/test_runs/test_stop_run.py +++ b/tests/test_unit/test_runs/test_stop_run.py @@ -2,7 +2,7 @@ from httpx import AsyncClient from sqlalchemy.ext.asyncio import AsyncSession -from syncmaster.db.models import Status +from syncmaster.db.models import RunType, Status from tests.mocks import MockGroup, MockRun, MockUser, UserTestRoles pytestmark = [pytest.mark.asyncio, pytest.mark.backend] @@ -36,6 +36,7 @@ async def test_developer_plus_can_stop_run_of_transfer_his_group( "started_at": group_run.started_at, "ended_at": group_run.ended_at, "transfer_dump": group_run.transfer_dump, + "type": RunType.MANUAL, } @@ -112,6 +113,7 @@ async def test_superuser_can_stop_run( "started_at": group_run.started_at, "ended_at": group_run.ended_at, "transfer_dump": group_run.transfer_dump, + "type": RunType.MANUAL, } assert result.status_code == 200 diff --git a/tests/test_unit/test_scheduler/__init__.py b/tests/test_unit/test_scheduler/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_unit/test_scheduler/scheduler_fixtures/__init__.py b/tests/test_unit/test_scheduler/scheduler_fixtures/__init__.py new file mode 100644 index 00000000..a4bc3903 --- /dev/null +++ b/tests/test_unit/test_scheduler/scheduler_fixtures/__init__.py @@ -0,0 +1,6 @@ +from tests.test_unit.test_scheduler.scheduler_fixtures.transfer_fetcher_fixture import ( + transfer_fetcher, +) +from tests.test_unit.test_scheduler.scheduler_fixtures.transfer_job_manager_fixture import ( + transfer_job_manager, +) diff --git a/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py new file mode 100644 index 00000000..3e0169b7 --- /dev/null +++ b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_fetcher_fixture.py @@ -0,0 +1,9 @@ +import pytest + +from syncmaster.scheduler.transfer_fetcher import TransferFetcher +from syncmaster.settings import Settings + + +@pytest.fixture +def transfer_fetcher(settings: Settings) -> TransferFetcher: + return TransferFetcher(settings) diff --git a/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py new file mode 100644 index 00000000..3ee2e2f5 --- /dev/null +++ b/tests/test_unit/test_scheduler/scheduler_fixtures/transfer_job_manager_fixture.py @@ -0,0 +1,20 @@ +from collections.abc import AsyncGenerator + +import pytest_asyncio +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from syncmaster.scheduler.transfer_job_manager import TransferJobManager +from syncmaster.settings import Settings + + +@pytest_asyncio.fixture +async def transfer_job_manager(session: AsyncSession, settings: Settings) -> AsyncGenerator[TransferJobManager, None]: + transfer_job_manager = TransferJobManager(settings) + transfer_job_manager.scheduler.start() + + yield transfer_job_manager + + transfer_job_manager.scheduler.shutdown() + await session.execute(text("TRUNCATE TABLE apscheduler_jobs")) + await session.commit() diff --git a/tests/test_unit/test_scheduler/test_transfer_fetcher.py b/tests/test_unit/test_scheduler/test_transfer_fetcher.py new file mode 100644 index 00000000..bfedbda9 --- /dev/null +++ b/tests/test_unit/test_scheduler/test_transfer_fetcher.py @@ -0,0 +1,57 @@ +from datetime import datetime, timedelta + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + +from syncmaster.scheduler.transfer_fetcher import TransferFetcher +from tests.mocks import MockTransfer + +pytestmark = [pytest.mark.asyncio, pytest.mark.scheduler] + + +async def test_fetch_jobs_without_last_updated_at( + session: AsyncSession, + transfer_fetcher: TransferFetcher, + group_transfers: list[MockTransfer], +): + # Arrange + transfer_fetcher.last_updated_at = None + + # Act + fetched_transfers = await transfer_fetcher.fetch_updated_jobs() + + # Assert + assert len(fetched_transfers) == len(group_transfers) + assert {t.id for t in fetched_transfers} == {t.transfer.id for t in group_transfers} + + +async def test_fetch_jobs_with_outdated_last_updated_at( + session: AsyncSession, + transfer_fetcher: TransferFetcher, + group_transfers: list[MockTransfer], +): + # Arrange + transfer_fetcher.last_updated_at = datetime.now() - timedelta(days=1) + wanted_transfers = [t for t in group_transfers if t.transfer.updated_at > transfer_fetcher.last_updated_at] + + # Act + fetched_transfers = await transfer_fetcher.fetch_updated_jobs() + + # Assert + assert len(fetched_transfers) == len(wanted_transfers) + assert {t.id for t in fetched_transfers} == {t.transfer.id for t in wanted_transfers} + + +async def test_fetch_jobs_with_up_to_date_last_updated_at( + session: AsyncSession, + transfer_fetcher: TransferFetcher, + group_transfers: list[MockTransfer], +): + # Arrange + transfer_fetcher.last_updated_at = datetime.now() + + # Act + fetched_transfers = await transfer_fetcher.fetch_updated_jobs() + + # Assert + assert not fetched_transfers diff --git a/tests/test_unit/test_scheduler/test_transfer_job_manager.py b/tests/test_unit/test_scheduler/test_transfer_job_manager.py new file mode 100644 index 00000000..486db41b --- /dev/null +++ b/tests/test_unit/test_scheduler/test_transfer_job_manager.py @@ -0,0 +1,126 @@ +from unittest.mock import AsyncMock + +import pytest +from apscheduler.triggers.cron import CronTrigger +from kombu.exceptions import KombuError +from pytest_mock import MockerFixture +from sqlalchemy import desc, select +from sqlalchemy.ext.asyncio import AsyncSession + +from syncmaster.db.models import Run, Status +from syncmaster.exceptions.run import CannotConnectToTaskQueueError +from syncmaster.scheduler.transfer_job_manager import TransferJobManager +from tests.mocks import MockTransfer + +pytestmark = [pytest.mark.asyncio, pytest.mark.scheduler] + + +async def test_adding_new_jobs(transfer_job_manager: TransferJobManager, group_transfers: list[MockTransfer]): + # Act + transfer_job_manager.update_jobs(group_transfers) + + # Assert + for transfer in group_transfers: + job = transfer_job_manager.scheduler.get_job(str(transfer.transfer.id)) + assert job is not None + assert job.trigger.fields == CronTrigger.from_crontab(transfer.transfer.schedule).fields + + +async def test_modifying_existing_jobs(transfer_job_manager: TransferJobManager, group_transfers: list[MockTransfer]): + # Arrange + transfer_job_manager.update_jobs(group_transfers) + modified_transfer = group_transfers[0] + expected_cron = "0 2 * * *" + modified_transfer.transfer.schedule = expected_cron + + # Act + transfer_job_manager.update_jobs([modified_transfer]) + job = transfer_job_manager.scheduler.get_job(str(modified_transfer.transfer.id)) + + # Assert + assert job is not None + assert job.trigger.fields == CronTrigger.from_crontab(expected_cron).fields + + +@pytest.mark.parametrize( + "transfer_attr, expected_state, is_existing_job", + [ + ("is_deleted", True, True), + ("is_deleted", True, False), + ("is_scheduled", False, True), + ("is_scheduled", False, False), + ], +) +async def test_handling_irrelevant_jobs( + transfer_job_manager: TransferJobManager, + group_transfers: list[MockTransfer], + transfer_attr: str, + expected_state: bool, + is_existing_job: bool, +): + # Arrange + if is_existing_job: + transfer_job_manager.update_jobs(group_transfers) + + test_transfer = group_transfers[0] + setattr(test_transfer.transfer, transfer_attr, expected_state) + + # Act + transfer_job_manager.update_jobs([test_transfer]) + job = transfer_job_manager.scheduler.get_job(str(test_transfer.transfer.id)) + + # Assert + assert job is None + + +async def test_send_job_to_celery_with_success( + mocker: MockerFixture, + session: AsyncSession, + transfer_job_manager: TransferJobManager, + group_transfer: MockTransfer, +): + # Arrange + mock_send_task = mocker.patch("syncmaster.worker.config.celery.send_task") + mock_to_thread = mocker.patch("asyncio.to_thread", new_callable=AsyncMock) + + # Act + await transfer_job_manager.send_job_to_celery(group_transfer.transfer.id) + run = ( + await session.scalars( + select(Run).filter_by(transfer_id=group_transfer.id, status=Status.CREATED).order_by(desc(Run.created_at)), + ) + ).first() + + # Assert + mock_to_thread.assert_awaited_once_with( + mock_send_task, + "run_transfer_task", + kwargs={"run_id": run.id}, + queue=group_transfer.queue.name, + ) + + +async def test_send_job_to_celery_with_failure( + mocker: MockerFixture, + session: AsyncSession, + transfer_job_manager: TransferJobManager, + group_transfer: MockTransfer, +): + # Arrange + mocker.patch("syncmaster.worker.config.celery.send_task") + mocker.patch("asyncio.to_thread", new_callable=AsyncMock, side_effect=KombuError) + + # Act & Assert + with pytest.raises(CannotConnectToTaskQueueError) as exc_info: + await transfer_job_manager.send_job_to_celery(group_transfer.transfer.id) + + assert exc_info.value.run_id is not None + + run = ( + await session.scalars( + select(Run).filter_by(transfer_id=group_transfer.id).order_by(desc(Run.created_at)), + ) + ).first() + + assert run is not None + assert run.status == Status.FAILED diff --git a/tests/test_unit/utils.py b/tests/test_unit/utils.py index e112a32a..2681eb5c 100644 --- a/tests/test_unit/utils.py +++ b/tests/test_unit/utils.py @@ -154,7 +154,7 @@ async def create_transfer( source_params: dict | None = None, target_params: dict | None = None, is_scheduled: bool = True, - schedule: str = "0 0 * * *", + schedule: str = "* * * * *", strategy_params: dict | None = None, description: str = "", ) -> Transfer: diff --git a/tmp.txt b/tmp.txt new file mode 100644 index 00000000..eb9bf462 --- /dev/null +++ b/tmp.txt @@ -0,0 +1,2 @@ +# SPDX-FileCopyrightText: 2023-2024 MTS PJSC +# SPDX-License-Identifier: Apache-2.0